* cp-demangle.c (__cxa_demangle): Pass DMGL_PARAMS to d_demangle.
[official-gcc.git] / gcc / fold-const.c
blobeaefe02000504fa7648048ffe67492ddce085cdf
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_mathfn_p (enum built_in_function);
64 static bool negate_expr_p (tree);
65 static tree negate_expr (tree);
66 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
67 static tree associate_trees (tree, tree, enum tree_code, tree);
68 static tree int_const_binop (enum tree_code, tree, tree, int);
69 static tree const_binop (enum tree_code, tree, tree, int);
70 static hashval_t size_htab_hash (const void *);
71 static int size_htab_eq (const void *, const void *);
72 static tree fold_convert_const (enum tree_code, tree, tree);
73 static tree fold_convert (tree, tree);
74 static enum tree_code invert_tree_comparison (enum tree_code);
75 static enum tree_code swap_tree_comparison (enum tree_code);
76 static int comparison_to_compcode (enum tree_code);
77 static enum tree_code compcode_to_comparison (int);
78 static int truth_value_p (enum tree_code);
79 static int operand_equal_for_comparison_p (tree, tree, tree);
80 static int twoval_comparison_p (tree, tree *, tree *, int *);
81 static tree eval_subst (tree, tree, tree, tree, tree);
82 static tree pedantic_omit_one_operand (tree, tree, tree);
83 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
84 static tree make_bit_field_ref (tree, tree, int, int, int);
85 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
86 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
87 enum machine_mode *, int *, int *,
88 tree *, tree *);
89 static int all_ones_mask_p (tree, int);
90 static tree sign_bit_p (tree, tree);
91 static int simple_operand_p (tree);
92 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
93 static tree make_range (tree, int *, tree *, tree *);
94 static tree build_range_check (tree, tree, int, tree, tree);
95 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
96 tree);
97 static tree fold_range_test (tree);
98 static tree unextend (tree, int, int, tree);
99 static tree fold_truthop (enum tree_code, tree, tree, tree);
100 static tree optimize_minmax_comparison (tree);
101 static tree extract_muldiv (tree, tree, enum tree_code, tree);
102 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
103 static tree strip_compound_expr (tree, tree);
104 static int multiple_of_p (tree, tree, tree);
105 static tree constant_boolean_node (int, tree);
106 static int count_cond (tree, int);
107 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
108 tree, int);
109 static bool fold_real_zero_addition_p (tree, tree, int);
110 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
111 tree, tree, tree);
112 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
113 static bool reorder_operands_p (tree, tree);
114 static bool tree_swap_operands_p (tree, tree, bool);
116 /* The following constants represent a bit based encoding of GCC's
117 comparison operators. This encoding simplifies transformations
118 on relational comparison operators, such as AND and OR. */
119 #define COMPCODE_FALSE 0
120 #define COMPCODE_LT 1
121 #define COMPCODE_EQ 2
122 #define COMPCODE_LE 3
123 #define COMPCODE_GT 4
124 #define COMPCODE_NE 5
125 #define COMPCODE_GE 6
126 #define COMPCODE_TRUE 7
128 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
129 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
130 and SUM1. Then this yields nonzero if overflow occurred during the
131 addition.
133 Overflow occurs if A and B have the same sign, but A and SUM differ in
134 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
135 sign. */
136 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
138 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
139 We do that by representing the two-word integer in 4 words, with only
140 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
141 number. The value of the word is LOWPART + HIGHPART * BASE. */
143 #define LOWPART(x) \
144 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
145 #define HIGHPART(x) \
146 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
147 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
149 /* Unpack a two-word integer into 4 words.
150 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
151 WORDS points to the array of HOST_WIDE_INTs. */
153 static void
154 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
156 words[0] = LOWPART (low);
157 words[1] = HIGHPART (low);
158 words[2] = LOWPART (hi);
159 words[3] = HIGHPART (hi);
162 /* Pack an array of 4 words into a two-word integer.
163 WORDS points to the array of words.
164 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
166 static void
167 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
168 HOST_WIDE_INT *hi)
170 *low = words[0] + words[1] * BASE;
171 *hi = words[2] + words[3] * BASE;
174 /* Make the integer constant T valid for its type by setting to 0 or 1 all
175 the bits in the constant that don't belong in the type.
177 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
178 nonzero, a signed overflow has already occurred in calculating T, so
179 propagate it. */
182 force_fit_type (tree t, int overflow)
184 unsigned HOST_WIDE_INT low;
185 HOST_WIDE_INT high;
186 unsigned int prec;
188 if (TREE_CODE (t) == REAL_CST)
190 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
191 Consider doing it via real_convert now. */
192 return overflow;
195 else if (TREE_CODE (t) != INTEGER_CST)
196 return overflow;
198 low = TREE_INT_CST_LOW (t);
199 high = TREE_INT_CST_HIGH (t);
201 if (POINTER_TYPE_P (TREE_TYPE (t))
202 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
203 prec = POINTER_SIZE;
204 else
205 prec = TYPE_PRECISION (TREE_TYPE (t));
207 /* First clear all bits that are beyond the type's precision. */
209 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
211 else if (prec > HOST_BITS_PER_WIDE_INT)
212 TREE_INT_CST_HIGH (t)
213 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
214 else
216 TREE_INT_CST_HIGH (t) = 0;
217 if (prec < HOST_BITS_PER_WIDE_INT)
218 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
221 /* Unsigned types do not suffer sign extension or overflow unless they
222 are a sizetype. */
223 if (TREE_UNSIGNED (TREE_TYPE (t))
224 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
225 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
226 return overflow;
228 /* If the value's sign bit is set, extend the sign. */
229 if (prec != 2 * HOST_BITS_PER_WIDE_INT
230 && (prec > HOST_BITS_PER_WIDE_INT
231 ? 0 != (TREE_INT_CST_HIGH (t)
232 & ((HOST_WIDE_INT) 1
233 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
234 : 0 != (TREE_INT_CST_LOW (t)
235 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
237 /* Value is negative:
238 set to 1 all the bits that are outside this type's precision. */
239 if (prec > HOST_BITS_PER_WIDE_INT)
240 TREE_INT_CST_HIGH (t)
241 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
242 else
244 TREE_INT_CST_HIGH (t) = -1;
245 if (prec < HOST_BITS_PER_WIDE_INT)
246 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
250 /* Return nonzero if signed overflow occurred. */
251 return
252 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
253 != 0);
256 /* Add two doubleword integers with doubleword result.
257 Each argument is given as two `HOST_WIDE_INT' pieces.
258 One argument is L1 and H1; the other, L2 and H2.
259 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
262 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
263 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
264 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
266 unsigned HOST_WIDE_INT l;
267 HOST_WIDE_INT h;
269 l = l1 + l2;
270 h = h1 + h2 + (l < l1);
272 *lv = l;
273 *hv = h;
274 return OVERFLOW_SUM_SIGN (h1, h2, h);
277 /* Negate a doubleword integer with doubleword result.
278 Return nonzero if the operation overflows, assuming it's signed.
279 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
280 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
283 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
284 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
286 if (l1 == 0)
288 *lv = 0;
289 *hv = - h1;
290 return (*hv & h1) < 0;
292 else
294 *lv = -l1;
295 *hv = ~h1;
296 return 0;
300 /* Multiply two doubleword integers with doubleword result.
301 Return nonzero if the operation overflows, assuming it's signed.
302 Each argument is given as two `HOST_WIDE_INT' pieces.
303 One argument is L1 and H1; the other, L2 and H2.
304 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
307 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
308 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
309 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
311 HOST_WIDE_INT arg1[4];
312 HOST_WIDE_INT arg2[4];
313 HOST_WIDE_INT prod[4 * 2];
314 unsigned HOST_WIDE_INT carry;
315 int i, j, k;
316 unsigned HOST_WIDE_INT toplow, neglow;
317 HOST_WIDE_INT tophigh, neghigh;
319 encode (arg1, l1, h1);
320 encode (arg2, l2, h2);
322 memset (prod, 0, sizeof prod);
324 for (i = 0; i < 4; i++)
326 carry = 0;
327 for (j = 0; j < 4; j++)
329 k = i + j;
330 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
331 carry += arg1[i] * arg2[j];
332 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
333 carry += prod[k];
334 prod[k] = LOWPART (carry);
335 carry = HIGHPART (carry);
337 prod[i + 4] = carry;
340 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
342 /* Check for overflow by calculating the top half of the answer in full;
343 it should agree with the low half's sign bit. */
344 decode (prod + 4, &toplow, &tophigh);
345 if (h1 < 0)
347 neg_double (l2, h2, &neglow, &neghigh);
348 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
350 if (h2 < 0)
352 neg_double (l1, h1, &neglow, &neghigh);
353 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
355 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
358 /* Shift the doubleword integer in L1, H1 left by COUNT places
359 keeping only PREC bits of result.
360 Shift right if COUNT is negative.
361 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
362 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
364 void
365 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
366 HOST_WIDE_INT count, unsigned int prec,
367 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
369 unsigned HOST_WIDE_INT signmask;
371 if (count < 0)
373 rshift_double (l1, h1, -count, prec, lv, hv, arith);
374 return;
377 if (SHIFT_COUNT_TRUNCATED)
378 count %= prec;
380 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
382 /* Shifting by the host word size is undefined according to the
383 ANSI standard, so we must handle this as a special case. */
384 *hv = 0;
385 *lv = 0;
387 else if (count >= HOST_BITS_PER_WIDE_INT)
389 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
390 *lv = 0;
392 else
394 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
395 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
396 *lv = l1 << count;
399 /* Sign extend all bits that are beyond the precision. */
401 signmask = -((prec > HOST_BITS_PER_WIDE_INT
402 ? ((unsigned HOST_WIDE_INT) *hv
403 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
404 : (*lv >> (prec - 1))) & 1);
406 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
408 else if (prec >= HOST_BITS_PER_WIDE_INT)
410 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
411 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
413 else
415 *hv = signmask;
416 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
417 *lv |= signmask << prec;
421 /* Shift the doubleword integer in L1, H1 right by COUNT places
422 keeping only PREC bits of result. COUNT must be positive.
423 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
424 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
426 void
427 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
428 HOST_WIDE_INT count, unsigned int prec,
429 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
430 int arith)
432 unsigned HOST_WIDE_INT signmask;
434 signmask = (arith
435 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
436 : 0);
438 if (SHIFT_COUNT_TRUNCATED)
439 count %= prec;
441 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
443 /* Shifting by the host word size is undefined according to the
444 ANSI standard, so we must handle this as a special case. */
445 *hv = 0;
446 *lv = 0;
448 else if (count >= HOST_BITS_PER_WIDE_INT)
450 *hv = 0;
451 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
453 else
455 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
456 *lv = ((l1 >> count)
457 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
460 /* Zero / sign extend all bits that are beyond the precision. */
462 if (count >= (HOST_WIDE_INT)prec)
464 *hv = signmask;
465 *lv = signmask;
467 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
469 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
471 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
472 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
474 else
476 *hv = signmask;
477 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
478 *lv |= signmask << (prec - count);
482 /* Rotate the doubleword integer in L1, H1 left by COUNT places
483 keeping only PREC bits of result.
484 Rotate right if COUNT is negative.
485 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
487 void
488 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
489 HOST_WIDE_INT count, unsigned int prec,
490 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
492 unsigned HOST_WIDE_INT s1l, s2l;
493 HOST_WIDE_INT s1h, s2h;
495 count %= prec;
496 if (count < 0)
497 count += prec;
499 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
500 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
501 *lv = s1l | s2l;
502 *hv = s1h | s2h;
505 /* Rotate the doubleword integer in L1, H1 left by COUNT places
506 keeping only PREC bits of result. COUNT must be positive.
507 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
509 void
510 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
511 HOST_WIDE_INT count, unsigned int prec,
512 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
514 unsigned HOST_WIDE_INT s1l, s2l;
515 HOST_WIDE_INT s1h, s2h;
517 count %= prec;
518 if (count < 0)
519 count += prec;
521 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
522 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
523 *lv = s1l | s2l;
524 *hv = s1h | s2h;
527 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
528 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
529 CODE is a tree code for a kind of division, one of
530 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
531 or EXACT_DIV_EXPR
532 It controls how the quotient is rounded to an integer.
533 Return nonzero if the operation overflows.
534 UNS nonzero says do unsigned division. */
537 div_and_round_double (enum tree_code code, int uns,
538 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
539 HOST_WIDE_INT hnum_orig,
540 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
541 HOST_WIDE_INT hden_orig,
542 unsigned HOST_WIDE_INT *lquo,
543 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
544 HOST_WIDE_INT *hrem)
546 int quo_neg = 0;
547 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
548 HOST_WIDE_INT den[4], quo[4];
549 int i, j;
550 unsigned HOST_WIDE_INT work;
551 unsigned HOST_WIDE_INT carry = 0;
552 unsigned HOST_WIDE_INT lnum = lnum_orig;
553 HOST_WIDE_INT hnum = hnum_orig;
554 unsigned HOST_WIDE_INT lden = lden_orig;
555 HOST_WIDE_INT hden = hden_orig;
556 int overflow = 0;
558 if (hden == 0 && lden == 0)
559 overflow = 1, lden = 1;
561 /* Calculate quotient sign and convert operands to unsigned. */
562 if (!uns)
564 if (hnum < 0)
566 quo_neg = ~ quo_neg;
567 /* (minimum integer) / (-1) is the only overflow case. */
568 if (neg_double (lnum, hnum, &lnum, &hnum)
569 && ((HOST_WIDE_INT) lden & hden) == -1)
570 overflow = 1;
572 if (hden < 0)
574 quo_neg = ~ quo_neg;
575 neg_double (lden, hden, &lden, &hden);
579 if (hnum == 0 && hden == 0)
580 { /* single precision */
581 *hquo = *hrem = 0;
582 /* This unsigned division rounds toward zero. */
583 *lquo = lnum / lden;
584 goto finish_up;
587 if (hnum == 0)
588 { /* trivial case: dividend < divisor */
589 /* hden != 0 already checked. */
590 *hquo = *lquo = 0;
591 *hrem = hnum;
592 *lrem = lnum;
593 goto finish_up;
596 memset (quo, 0, sizeof quo);
598 memset (num, 0, sizeof num); /* to zero 9th element */
599 memset (den, 0, sizeof den);
601 encode (num, lnum, hnum);
602 encode (den, lden, hden);
604 /* Special code for when the divisor < BASE. */
605 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
607 /* hnum != 0 already checked. */
608 for (i = 4 - 1; i >= 0; i--)
610 work = num[i] + carry * BASE;
611 quo[i] = work / lden;
612 carry = work % lden;
615 else
617 /* Full double precision division,
618 with thanks to Don Knuth's "Seminumerical Algorithms". */
619 int num_hi_sig, den_hi_sig;
620 unsigned HOST_WIDE_INT quo_est, scale;
622 /* Find the highest nonzero divisor digit. */
623 for (i = 4 - 1;; i--)
624 if (den[i] != 0)
626 den_hi_sig = i;
627 break;
630 /* Insure that the first digit of the divisor is at least BASE/2.
631 This is required by the quotient digit estimation algorithm. */
633 scale = BASE / (den[den_hi_sig] + 1);
634 if (scale > 1)
635 { /* scale divisor and dividend */
636 carry = 0;
637 for (i = 0; i <= 4 - 1; i++)
639 work = (num[i] * scale) + carry;
640 num[i] = LOWPART (work);
641 carry = HIGHPART (work);
644 num[4] = carry;
645 carry = 0;
646 for (i = 0; i <= 4 - 1; i++)
648 work = (den[i] * scale) + carry;
649 den[i] = LOWPART (work);
650 carry = HIGHPART (work);
651 if (den[i] != 0) den_hi_sig = i;
655 num_hi_sig = 4;
657 /* Main loop */
658 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
660 /* Guess the next quotient digit, quo_est, by dividing the first
661 two remaining dividend digits by the high order quotient digit.
662 quo_est is never low and is at most 2 high. */
663 unsigned HOST_WIDE_INT tmp;
665 num_hi_sig = i + den_hi_sig + 1;
666 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
667 if (num[num_hi_sig] != den[den_hi_sig])
668 quo_est = work / den[den_hi_sig];
669 else
670 quo_est = BASE - 1;
672 /* Refine quo_est so it's usually correct, and at most one high. */
673 tmp = work - quo_est * den[den_hi_sig];
674 if (tmp < BASE
675 && (den[den_hi_sig - 1] * quo_est
676 > (tmp * BASE + num[num_hi_sig - 2])))
677 quo_est--;
679 /* Try QUO_EST as the quotient digit, by multiplying the
680 divisor by QUO_EST and subtracting from the remaining dividend.
681 Keep in mind that QUO_EST is the I - 1st digit. */
683 carry = 0;
684 for (j = 0; j <= den_hi_sig; j++)
686 work = quo_est * den[j] + carry;
687 carry = HIGHPART (work);
688 work = num[i + j] - LOWPART (work);
689 num[i + j] = LOWPART (work);
690 carry += HIGHPART (work) != 0;
693 /* If quo_est was high by one, then num[i] went negative and
694 we need to correct things. */
695 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
697 quo_est--;
698 carry = 0; /* add divisor back in */
699 for (j = 0; j <= den_hi_sig; j++)
701 work = num[i + j] + den[j] + carry;
702 carry = HIGHPART (work);
703 num[i + j] = LOWPART (work);
706 num [num_hi_sig] += carry;
709 /* Store the quotient digit. */
710 quo[i] = quo_est;
714 decode (quo, lquo, hquo);
716 finish_up:
717 /* If result is negative, make it so. */
718 if (quo_neg)
719 neg_double (*lquo, *hquo, lquo, hquo);
721 /* compute trial remainder: rem = num - (quo * den) */
722 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
723 neg_double (*lrem, *hrem, lrem, hrem);
724 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
726 switch (code)
728 case TRUNC_DIV_EXPR:
729 case TRUNC_MOD_EXPR: /* round toward zero */
730 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
731 return overflow;
733 case FLOOR_DIV_EXPR:
734 case FLOOR_MOD_EXPR: /* round toward negative infinity */
735 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
737 /* quo = quo - 1; */
738 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
739 lquo, hquo);
741 else
742 return overflow;
743 break;
745 case CEIL_DIV_EXPR:
746 case CEIL_MOD_EXPR: /* round toward positive infinity */
747 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
749 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
750 lquo, hquo);
752 else
753 return overflow;
754 break;
756 case ROUND_DIV_EXPR:
757 case ROUND_MOD_EXPR: /* round to closest integer */
759 unsigned HOST_WIDE_INT labs_rem = *lrem;
760 HOST_WIDE_INT habs_rem = *hrem;
761 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
762 HOST_WIDE_INT habs_den = hden, htwice;
764 /* Get absolute values. */
765 if (*hrem < 0)
766 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
767 if (hden < 0)
768 neg_double (lden, hden, &labs_den, &habs_den);
770 /* If (2 * abs (lrem) >= abs (lden)) */
771 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
772 labs_rem, habs_rem, &ltwice, &htwice);
774 if (((unsigned HOST_WIDE_INT) habs_den
775 < (unsigned HOST_WIDE_INT) htwice)
776 || (((unsigned HOST_WIDE_INT) habs_den
777 == (unsigned HOST_WIDE_INT) htwice)
778 && (labs_den < ltwice)))
780 if (*hquo < 0)
781 /* quo = quo - 1; */
782 add_double (*lquo, *hquo,
783 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
784 else
785 /* quo = quo + 1; */
786 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
787 lquo, hquo);
789 else
790 return overflow;
792 break;
794 default:
795 abort ();
798 /* Compute true remainder: rem = num - (quo * den) */
799 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
800 neg_double (*lrem, *hrem, lrem, hrem);
801 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
802 return overflow;
805 /* Return true if built-in mathematical function specified by CODE
806 preserves the sign of it argument, i.e. -f(x) == f(-x). */
808 static bool
809 negate_mathfn_p (enum built_in_function code)
811 switch (code)
813 case BUILT_IN_ASIN:
814 case BUILT_IN_ASINF:
815 case BUILT_IN_ASINL:
816 case BUILT_IN_ATAN:
817 case BUILT_IN_ATANF:
818 case BUILT_IN_ATANL:
819 case BUILT_IN_SIN:
820 case BUILT_IN_SINF:
821 case BUILT_IN_SINL:
822 case BUILT_IN_TAN:
823 case BUILT_IN_TANF:
824 case BUILT_IN_TANL:
825 return true;
827 default:
828 break;
830 return false;
833 /* Determine whether an expression T can be cheaply negated using
834 the function negate_expr. */
836 static bool
837 negate_expr_p (tree t)
839 unsigned HOST_WIDE_INT val;
840 unsigned int prec;
841 tree type;
843 if (t == 0)
844 return false;
846 type = TREE_TYPE (t);
848 STRIP_SIGN_NOPS (t);
849 switch (TREE_CODE (t))
851 case INTEGER_CST:
852 if (TREE_UNSIGNED (type) || ! flag_trapv)
853 return true;
855 /* Check that -CST will not overflow type. */
856 prec = TYPE_PRECISION (type);
857 if (prec > HOST_BITS_PER_WIDE_INT)
859 if (TREE_INT_CST_LOW (t) != 0)
860 return true;
861 prec -= HOST_BITS_PER_WIDE_INT;
862 val = TREE_INT_CST_HIGH (t);
864 else
865 val = TREE_INT_CST_LOW (t);
866 if (prec < HOST_BITS_PER_WIDE_INT)
867 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
868 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
870 case REAL_CST:
871 case NEGATE_EXPR:
872 return true;
874 case COMPLEX_CST:
875 return negate_expr_p (TREE_REALPART (t))
876 && negate_expr_p (TREE_IMAGPART (t));
878 case PLUS_EXPR:
879 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
880 return false;
881 /* -(A + B) -> (-B) - A. */
882 if (negate_expr_p (TREE_OPERAND (t, 1))
883 && reorder_operands_p (TREE_OPERAND (t, 0),
884 TREE_OPERAND (t, 1)))
885 return true;
886 /* -(A + B) -> (-A) - B. */
887 return negate_expr_p (TREE_OPERAND (t, 0));
889 case MINUS_EXPR:
890 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
891 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
892 && reorder_operands_p (TREE_OPERAND (t, 0),
893 TREE_OPERAND (t, 1));
895 case MULT_EXPR:
896 if (TREE_UNSIGNED (TREE_TYPE (t)))
897 break;
899 /* Fall through. */
901 case RDIV_EXPR:
902 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
903 return negate_expr_p (TREE_OPERAND (t, 1))
904 || negate_expr_p (TREE_OPERAND (t, 0));
905 break;
907 case NOP_EXPR:
908 /* Negate -((double)float) as (double)(-float). */
909 if (TREE_CODE (type) == REAL_TYPE)
911 tree tem = strip_float_extensions (t);
912 if (tem != t)
913 return negate_expr_p (tem);
915 break;
917 case CALL_EXPR:
918 /* Negate -f(x) as f(-x). */
919 if (negate_mathfn_p (builtin_mathfn_code (t)))
920 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
921 break;
923 default:
924 break;
926 return false;
929 /* Given T, an expression, return the negation of T. Allow for T to be
930 null, in which case return null. */
932 static tree
933 negate_expr (tree t)
935 tree type;
936 tree tem;
938 if (t == 0)
939 return 0;
941 type = TREE_TYPE (t);
942 STRIP_SIGN_NOPS (t);
944 switch (TREE_CODE (t))
946 case INTEGER_CST:
948 unsigned HOST_WIDE_INT low;
949 HOST_WIDE_INT high;
950 int overflow = neg_double (TREE_INT_CST_LOW (t),
951 TREE_INT_CST_HIGH (t),
952 &low, &high);
953 tem = build_int_2 (low, high);
954 TREE_TYPE (tem) = type;
955 TREE_OVERFLOW (tem)
956 = (TREE_OVERFLOW (t)
957 | force_fit_type (tem, overflow && !TREE_UNSIGNED (type)));
958 TREE_CONSTANT_OVERFLOW (tem)
959 = TREE_OVERFLOW (tem) | TREE_CONSTANT_OVERFLOW (t);
961 if (! TREE_OVERFLOW (tem)
962 || TREE_UNSIGNED (type)
963 || ! flag_trapv)
964 return tem;
965 break;
967 case REAL_CST:
968 tem = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (t)));
969 /* Two's complement FP formats, such as c4x, may overflow. */
970 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
971 return fold_convert (type, tem);
972 break;
974 case COMPLEX_CST:
976 tree rpart = negate_expr (TREE_REALPART (t));
977 tree ipart = negate_expr (TREE_IMAGPART (t));
979 if ((TREE_CODE (rpart) == REAL_CST
980 && TREE_CODE (ipart) == REAL_CST)
981 || (TREE_CODE (rpart) == INTEGER_CST
982 && TREE_CODE (ipart) == INTEGER_CST))
983 return build_complex (type, rpart, ipart);
985 break;
987 case NEGATE_EXPR:
988 return fold_convert (type, TREE_OPERAND (t, 0));
990 case PLUS_EXPR:
991 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
993 /* -(A + B) -> (-B) - A. */
994 if (negate_expr_p (TREE_OPERAND (t, 1))
995 && reorder_operands_p (TREE_OPERAND (t, 0),
996 TREE_OPERAND (t, 1)))
997 return fold_convert (type,
998 fold (build (MINUS_EXPR, TREE_TYPE (t),
999 negate_expr (TREE_OPERAND (t, 1)),
1000 TREE_OPERAND (t, 0))));
1001 /* -(A + B) -> (-A) - B. */
1002 if (negate_expr_p (TREE_OPERAND (t, 0)))
1003 return fold_convert (type,
1004 fold (build (MINUS_EXPR, TREE_TYPE (t),
1005 negate_expr (TREE_OPERAND (t, 0)),
1006 TREE_OPERAND (t, 1))));
1008 break;
1010 case MINUS_EXPR:
1011 /* - (A - B) -> B - A */
1012 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1013 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1014 return fold_convert (type,
1015 fold (build (MINUS_EXPR, TREE_TYPE (t),
1016 TREE_OPERAND (t, 1),
1017 TREE_OPERAND (t, 0))));
1018 break;
1020 case MULT_EXPR:
1021 if (TREE_UNSIGNED (TREE_TYPE (t)))
1022 break;
1024 /* Fall through. */
1026 case RDIV_EXPR:
1027 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1029 tem = TREE_OPERAND (t, 1);
1030 if (negate_expr_p (tem))
1031 return fold_convert (type,
1032 fold (build (TREE_CODE (t), TREE_TYPE (t),
1033 TREE_OPERAND (t, 0),
1034 negate_expr (tem))));
1035 tem = TREE_OPERAND (t, 0);
1036 if (negate_expr_p (tem))
1037 return fold_convert (type,
1038 fold (build (TREE_CODE (t), TREE_TYPE (t),
1039 negate_expr (tem),
1040 TREE_OPERAND (t, 1))));
1042 break;
1044 case NOP_EXPR:
1045 /* Convert -((double)float) into (double)(-float). */
1046 if (TREE_CODE (type) == REAL_TYPE)
1048 tem = strip_float_extensions (t);
1049 if (tem != t && negate_expr_p (tem))
1050 return fold_convert (type, negate_expr (tem));
1052 break;
1054 case CALL_EXPR:
1055 /* Negate -f(x) as f(-x). */
1056 if (negate_mathfn_p (builtin_mathfn_code (t))
1057 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1059 tree fndecl, arg, arglist;
1061 fndecl = get_callee_fndecl (t);
1062 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1063 arglist = build_tree_list (NULL_TREE, arg);
1064 return build_function_call_expr (fndecl, arglist);
1066 break;
1068 default:
1069 break;
1072 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1073 return fold_convert (type, tem);
1076 /* Split a tree IN into a constant, literal and variable parts that could be
1077 combined with CODE to make IN. "constant" means an expression with
1078 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1079 commutative arithmetic operation. Store the constant part into *CONP,
1080 the literal in *LITP and return the variable part. If a part isn't
1081 present, set it to null. If the tree does not decompose in this way,
1082 return the entire tree as the variable part and the other parts as null.
1084 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1085 case, we negate an operand that was subtracted. Except if it is a
1086 literal for which we use *MINUS_LITP instead.
1088 If NEGATE_P is true, we are negating all of IN, again except a literal
1089 for which we use *MINUS_LITP instead.
1091 If IN is itself a literal or constant, return it as appropriate.
1093 Note that we do not guarantee that any of the three values will be the
1094 same type as IN, but they will have the same signedness and mode. */
1096 static tree
1097 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1098 tree *minus_litp, int negate_p)
1100 tree var = 0;
1102 *conp = 0;
1103 *litp = 0;
1104 *minus_litp = 0;
1106 /* Strip any conversions that don't change the machine mode or signedness. */
1107 STRIP_SIGN_NOPS (in);
1109 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1110 *litp = in;
1111 else if (TREE_CODE (in) == code
1112 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1113 /* We can associate addition and subtraction together (even
1114 though the C standard doesn't say so) for integers because
1115 the value is not affected. For reals, the value might be
1116 affected, so we can't. */
1117 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1118 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1120 tree op0 = TREE_OPERAND (in, 0);
1121 tree op1 = TREE_OPERAND (in, 1);
1122 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1123 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1125 /* First see if either of the operands is a literal, then a constant. */
1126 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1127 *litp = op0, op0 = 0;
1128 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1129 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1131 if (op0 != 0 && TREE_CONSTANT (op0))
1132 *conp = op0, op0 = 0;
1133 else if (op1 != 0 && TREE_CONSTANT (op1))
1134 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1136 /* If we haven't dealt with either operand, this is not a case we can
1137 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1138 if (op0 != 0 && op1 != 0)
1139 var = in;
1140 else if (op0 != 0)
1141 var = op0;
1142 else
1143 var = op1, neg_var_p = neg1_p;
1145 /* Now do any needed negations. */
1146 if (neg_litp_p)
1147 *minus_litp = *litp, *litp = 0;
1148 if (neg_conp_p)
1149 *conp = negate_expr (*conp);
1150 if (neg_var_p)
1151 var = negate_expr (var);
1153 else if (TREE_CONSTANT (in))
1154 *conp = in;
1155 else
1156 var = in;
1158 if (negate_p)
1160 if (*litp)
1161 *minus_litp = *litp, *litp = 0;
1162 else if (*minus_litp)
1163 *litp = *minus_litp, *minus_litp = 0;
1164 *conp = negate_expr (*conp);
1165 var = negate_expr (var);
1168 return var;
1171 /* Re-associate trees split by the above function. T1 and T2 are either
1172 expressions to associate or null. Return the new expression, if any. If
1173 we build an operation, do it in TYPE and with CODE. */
1175 static tree
1176 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1178 if (t1 == 0)
1179 return t2;
1180 else if (t2 == 0)
1181 return t1;
1183 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1184 try to fold this since we will have infinite recursion. But do
1185 deal with any NEGATE_EXPRs. */
1186 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1187 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1189 if (code == PLUS_EXPR)
1191 if (TREE_CODE (t1) == NEGATE_EXPR)
1192 return build (MINUS_EXPR, type, fold_convert (type, t2),
1193 fold_convert (type, TREE_OPERAND (t1, 0)));
1194 else if (TREE_CODE (t2) == NEGATE_EXPR)
1195 return build (MINUS_EXPR, type, fold_convert (type, t1),
1196 fold_convert (type, TREE_OPERAND (t2, 0)));
1198 return build (code, type, fold_convert (type, t1),
1199 fold_convert (type, t2));
1202 return fold (build (code, type, fold_convert (type, t1),
1203 fold_convert (type, t2)));
1206 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1207 to produce a new constant.
1209 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1211 static tree
1212 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1214 unsigned HOST_WIDE_INT int1l, int2l;
1215 HOST_WIDE_INT int1h, int2h;
1216 unsigned HOST_WIDE_INT low;
1217 HOST_WIDE_INT hi;
1218 unsigned HOST_WIDE_INT garbagel;
1219 HOST_WIDE_INT garbageh;
1220 tree t;
1221 tree type = TREE_TYPE (arg1);
1222 int uns = TREE_UNSIGNED (type);
1223 int is_sizetype
1224 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1225 int overflow = 0;
1226 int no_overflow = 0;
1228 int1l = TREE_INT_CST_LOW (arg1);
1229 int1h = TREE_INT_CST_HIGH (arg1);
1230 int2l = TREE_INT_CST_LOW (arg2);
1231 int2h = TREE_INT_CST_HIGH (arg2);
1233 switch (code)
1235 case BIT_IOR_EXPR:
1236 low = int1l | int2l, hi = int1h | int2h;
1237 break;
1239 case BIT_XOR_EXPR:
1240 low = int1l ^ int2l, hi = int1h ^ int2h;
1241 break;
1243 case BIT_AND_EXPR:
1244 low = int1l & int2l, hi = int1h & int2h;
1245 break;
1247 case RSHIFT_EXPR:
1248 int2l = -int2l;
1249 case LSHIFT_EXPR:
1250 /* It's unclear from the C standard whether shifts can overflow.
1251 The following code ignores overflow; perhaps a C standard
1252 interpretation ruling is needed. */
1253 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1254 &low, &hi, !uns);
1255 no_overflow = 1;
1256 break;
1258 case RROTATE_EXPR:
1259 int2l = - int2l;
1260 case LROTATE_EXPR:
1261 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1262 &low, &hi);
1263 break;
1265 case PLUS_EXPR:
1266 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1267 break;
1269 case MINUS_EXPR:
1270 neg_double (int2l, int2h, &low, &hi);
1271 add_double (int1l, int1h, low, hi, &low, &hi);
1272 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1273 break;
1275 case MULT_EXPR:
1276 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1277 break;
1279 case TRUNC_DIV_EXPR:
1280 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1281 case EXACT_DIV_EXPR:
1282 /* This is a shortcut for a common special case. */
1283 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1284 && ! TREE_CONSTANT_OVERFLOW (arg1)
1285 && ! TREE_CONSTANT_OVERFLOW (arg2)
1286 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1288 if (code == CEIL_DIV_EXPR)
1289 int1l += int2l - 1;
1291 low = int1l / int2l, hi = 0;
1292 break;
1295 /* ... fall through ... */
1297 case ROUND_DIV_EXPR:
1298 if (int2h == 0 && int2l == 1)
1300 low = int1l, hi = int1h;
1301 break;
1303 if (int1l == int2l && int1h == int2h
1304 && ! (int1l == 0 && int1h == 0))
1306 low = 1, hi = 0;
1307 break;
1309 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1310 &low, &hi, &garbagel, &garbageh);
1311 break;
1313 case TRUNC_MOD_EXPR:
1314 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1315 /* This is a shortcut for a common special case. */
1316 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1317 && ! TREE_CONSTANT_OVERFLOW (arg1)
1318 && ! TREE_CONSTANT_OVERFLOW (arg2)
1319 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1321 if (code == CEIL_MOD_EXPR)
1322 int1l += int2l - 1;
1323 low = int1l % int2l, hi = 0;
1324 break;
1327 /* ... fall through ... */
1329 case ROUND_MOD_EXPR:
1330 overflow = div_and_round_double (code, uns,
1331 int1l, int1h, int2l, int2h,
1332 &garbagel, &garbageh, &low, &hi);
1333 break;
1335 case MIN_EXPR:
1336 case MAX_EXPR:
1337 if (uns)
1338 low = (((unsigned HOST_WIDE_INT) int1h
1339 < (unsigned HOST_WIDE_INT) int2h)
1340 || (((unsigned HOST_WIDE_INT) int1h
1341 == (unsigned HOST_WIDE_INT) int2h)
1342 && int1l < int2l));
1343 else
1344 low = (int1h < int2h
1345 || (int1h == int2h && int1l < int2l));
1347 if (low == (code == MIN_EXPR))
1348 low = int1l, hi = int1h;
1349 else
1350 low = int2l, hi = int2h;
1351 break;
1353 default:
1354 abort ();
1357 /* If this is for a sizetype, can be represented as one (signed)
1358 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1359 constants. */
1360 if (is_sizetype
1361 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1362 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1363 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1364 return size_int_type_wide (low, type);
1365 else
1367 t = build_int_2 (low, hi);
1368 TREE_TYPE (t) = TREE_TYPE (arg1);
1371 TREE_OVERFLOW (t)
1372 = ((notrunc
1373 ? (!uns || is_sizetype) && overflow
1374 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1375 && ! no_overflow))
1376 | TREE_OVERFLOW (arg1)
1377 | TREE_OVERFLOW (arg2));
1379 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1380 So check if force_fit_type truncated the value. */
1381 if (is_sizetype
1382 && ! TREE_OVERFLOW (t)
1383 && (TREE_INT_CST_HIGH (t) != hi
1384 || TREE_INT_CST_LOW (t) != low))
1385 TREE_OVERFLOW (t) = 1;
1387 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1388 | TREE_CONSTANT_OVERFLOW (arg1)
1389 | TREE_CONSTANT_OVERFLOW (arg2));
1390 return t;
1393 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1394 constant. We assume ARG1 and ARG2 have the same data type, or at least
1395 are the same kind of constant and the same machine mode.
1397 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1399 static tree
1400 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1402 STRIP_NOPS (arg1);
1403 STRIP_NOPS (arg2);
1405 if (TREE_CODE (arg1) == INTEGER_CST)
1406 return int_const_binop (code, arg1, arg2, notrunc);
1408 if (TREE_CODE (arg1) == REAL_CST)
1410 enum machine_mode mode;
1411 REAL_VALUE_TYPE d1;
1412 REAL_VALUE_TYPE d2;
1413 REAL_VALUE_TYPE value;
1414 tree t, type;
1416 d1 = TREE_REAL_CST (arg1);
1417 d2 = TREE_REAL_CST (arg2);
1419 type = TREE_TYPE (arg1);
1420 mode = TYPE_MODE (type);
1422 /* Don't perform operation if we honor signaling NaNs and
1423 either operand is a NaN. */
1424 if (HONOR_SNANS (mode)
1425 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1426 return NULL_TREE;
1428 /* Don't perform operation if it would raise a division
1429 by zero exception. */
1430 if (code == RDIV_EXPR
1431 && REAL_VALUES_EQUAL (d2, dconst0)
1432 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1433 return NULL_TREE;
1435 /* If either operand is a NaN, just return it. Otherwise, set up
1436 for floating-point trap; we return an overflow. */
1437 if (REAL_VALUE_ISNAN (d1))
1438 return arg1;
1439 else if (REAL_VALUE_ISNAN (d2))
1440 return arg2;
1442 REAL_ARITHMETIC (value, code, d1, d2);
1444 t = build_real (type, real_value_truncate (mode, value));
1446 TREE_OVERFLOW (t)
1447 = (force_fit_type (t, 0)
1448 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1449 TREE_CONSTANT_OVERFLOW (t)
1450 = TREE_OVERFLOW (t)
1451 | TREE_CONSTANT_OVERFLOW (arg1)
1452 | TREE_CONSTANT_OVERFLOW (arg2);
1453 return t;
1455 if (TREE_CODE (arg1) == COMPLEX_CST)
1457 tree type = TREE_TYPE (arg1);
1458 tree r1 = TREE_REALPART (arg1);
1459 tree i1 = TREE_IMAGPART (arg1);
1460 tree r2 = TREE_REALPART (arg2);
1461 tree i2 = TREE_IMAGPART (arg2);
1462 tree t;
1464 switch (code)
1466 case PLUS_EXPR:
1467 t = build_complex (type,
1468 const_binop (PLUS_EXPR, r1, r2, notrunc),
1469 const_binop (PLUS_EXPR, i1, i2, notrunc));
1470 break;
1472 case MINUS_EXPR:
1473 t = build_complex (type,
1474 const_binop (MINUS_EXPR, r1, r2, notrunc),
1475 const_binop (MINUS_EXPR, i1, i2, notrunc));
1476 break;
1478 case MULT_EXPR:
1479 t = build_complex (type,
1480 const_binop (MINUS_EXPR,
1481 const_binop (MULT_EXPR,
1482 r1, r2, notrunc),
1483 const_binop (MULT_EXPR,
1484 i1, i2, notrunc),
1485 notrunc),
1486 const_binop (PLUS_EXPR,
1487 const_binop (MULT_EXPR,
1488 r1, i2, notrunc),
1489 const_binop (MULT_EXPR,
1490 i1, r2, notrunc),
1491 notrunc));
1492 break;
1494 case RDIV_EXPR:
1496 tree magsquared
1497 = const_binop (PLUS_EXPR,
1498 const_binop (MULT_EXPR, r2, r2, notrunc),
1499 const_binop (MULT_EXPR, i2, i2, notrunc),
1500 notrunc);
1502 t = build_complex (type,
1503 const_binop
1504 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1505 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1506 const_binop (PLUS_EXPR,
1507 const_binop (MULT_EXPR, r1, r2,
1508 notrunc),
1509 const_binop (MULT_EXPR, i1, i2,
1510 notrunc),
1511 notrunc),
1512 magsquared, notrunc),
1513 const_binop
1514 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1515 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1516 const_binop (MINUS_EXPR,
1517 const_binop (MULT_EXPR, i1, r2,
1518 notrunc),
1519 const_binop (MULT_EXPR, r1, i2,
1520 notrunc),
1521 notrunc),
1522 magsquared, notrunc));
1524 break;
1526 default:
1527 abort ();
1529 return t;
1531 return 0;
1534 /* These are the hash table functions for the hash table of INTEGER_CST
1535 nodes of a sizetype. */
1537 /* Return the hash code code X, an INTEGER_CST. */
1539 static hashval_t
1540 size_htab_hash (const void *x)
1542 tree t = (tree) x;
1544 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1545 ^ htab_hash_pointer (TREE_TYPE (t))
1546 ^ (TREE_OVERFLOW (t) << 20));
1549 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1550 is the same as that given by *Y, which is the same. */
1552 static int
1553 size_htab_eq (const void *x, const void *y)
1555 tree xt = (tree) x;
1556 tree yt = (tree) y;
1558 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1559 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1560 && TREE_TYPE (xt) == TREE_TYPE (yt)
1561 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1564 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1565 bits are given by NUMBER and of the sizetype represented by KIND. */
1567 tree
1568 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1570 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1573 /* Likewise, but the desired type is specified explicitly. */
1575 static GTY (()) tree new_const;
1576 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1577 htab_t size_htab;
1579 tree
1580 size_int_type_wide (HOST_WIDE_INT number, tree type)
1582 void **slot;
1584 if (size_htab == 0)
1586 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1587 new_const = make_node (INTEGER_CST);
1590 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1591 hash table, we return the value from the hash table. Otherwise, we
1592 place that in the hash table and make a new node for the next time. */
1593 TREE_INT_CST_LOW (new_const) = number;
1594 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1595 TREE_TYPE (new_const) = type;
1596 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1597 = force_fit_type (new_const, 0);
1599 slot = htab_find_slot (size_htab, new_const, INSERT);
1600 if (*slot == 0)
1602 tree t = new_const;
1604 *slot = new_const;
1605 new_const = make_node (INTEGER_CST);
1606 return t;
1608 else
1609 return (tree) *slot;
1612 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1613 is a tree code. The type of the result is taken from the operands.
1614 Both must be the same type integer type and it must be a size type.
1615 If the operands are constant, so is the result. */
1617 tree
1618 size_binop (enum tree_code code, tree arg0, tree arg1)
1620 tree type = TREE_TYPE (arg0);
1622 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1623 || type != TREE_TYPE (arg1))
1624 abort ();
1626 /* Handle the special case of two integer constants faster. */
1627 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1629 /* And some specific cases even faster than that. */
1630 if (code == PLUS_EXPR && integer_zerop (arg0))
1631 return arg1;
1632 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1633 && integer_zerop (arg1))
1634 return arg0;
1635 else if (code == MULT_EXPR && integer_onep (arg0))
1636 return arg1;
1638 /* Handle general case of two integer constants. */
1639 return int_const_binop (code, arg0, arg1, 0);
1642 if (arg0 == error_mark_node || arg1 == error_mark_node)
1643 return error_mark_node;
1645 return fold (build (code, type, arg0, arg1));
1648 /* Given two values, either both of sizetype or both of bitsizetype,
1649 compute the difference between the two values. Return the value
1650 in signed type corresponding to the type of the operands. */
1652 tree
1653 size_diffop (tree arg0, tree arg1)
1655 tree type = TREE_TYPE (arg0);
1656 tree ctype;
1658 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1659 || type != TREE_TYPE (arg1))
1660 abort ();
1662 /* If the type is already signed, just do the simple thing. */
1663 if (! TREE_UNSIGNED (type))
1664 return size_binop (MINUS_EXPR, arg0, arg1);
1666 ctype = (type == bitsizetype || type == ubitsizetype
1667 ? sbitsizetype : ssizetype);
1669 /* If either operand is not a constant, do the conversions to the signed
1670 type and subtract. The hardware will do the right thing with any
1671 overflow in the subtraction. */
1672 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1673 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1674 fold_convert (ctype, arg1));
1676 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1677 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1678 overflow) and negate (which can't either). Special-case a result
1679 of zero while we're here. */
1680 if (tree_int_cst_equal (arg0, arg1))
1681 return fold_convert (ctype, integer_zero_node);
1682 else if (tree_int_cst_lt (arg1, arg0))
1683 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1684 else
1685 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1686 fold_convert (ctype, size_binop (MINUS_EXPR,
1687 arg1, arg0)));
1691 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1692 type TYPE. If no simplification can be done return NULL_TREE. */
1694 static tree
1695 fold_convert_const (enum tree_code code, tree type, tree arg1)
1697 int overflow = 0;
1698 tree t;
1700 if (TREE_TYPE (arg1) == type)
1701 return arg1;
1703 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1705 if (TREE_CODE (arg1) == INTEGER_CST)
1707 /* If we would build a constant wider than GCC supports,
1708 leave the conversion unfolded. */
1709 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1710 return NULL_TREE;
1712 /* If we are trying to make a sizetype for a small integer, use
1713 size_int to pick up cached types to reduce duplicate nodes. */
1714 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1715 && !TREE_CONSTANT_OVERFLOW (arg1)
1716 && compare_tree_int (arg1, 10000) < 0)
1717 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1719 /* Given an integer constant, make new constant with new type,
1720 appropriately sign-extended or truncated. */
1721 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1722 TREE_INT_CST_HIGH (arg1));
1723 TREE_TYPE (t) = type;
1724 /* Indicate an overflow if (1) ARG1 already overflowed,
1725 or (2) force_fit_type indicates an overflow.
1726 Tell force_fit_type that an overflow has already occurred
1727 if ARG1 is a too-large unsigned value and T is signed.
1728 But don't indicate an overflow if converting a pointer. */
1729 TREE_OVERFLOW (t)
1730 = ((force_fit_type (t,
1731 (TREE_INT_CST_HIGH (arg1) < 0
1732 && (TREE_UNSIGNED (type)
1733 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1734 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1735 || TREE_OVERFLOW (arg1));
1736 TREE_CONSTANT_OVERFLOW (t)
1737 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1738 return t;
1740 else if (TREE_CODE (arg1) == REAL_CST)
1742 /* The following code implements the floating point to integer
1743 conversion rules required by the Java Language Specification,
1744 that IEEE NaNs are mapped to zero and values that overflow
1745 the target precision saturate, i.e. values greater than
1746 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1747 are mapped to INT_MIN. These semantics are allowed by the
1748 C and C++ standards that simply state that the behavior of
1749 FP-to-integer conversion is unspecified upon overflow. */
1751 HOST_WIDE_INT high, low;
1753 REAL_VALUE_TYPE r;
1754 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1756 switch (code)
1758 case FIX_TRUNC_EXPR:
1759 real_trunc (&r, VOIDmode, &x);
1760 break;
1762 case FIX_CEIL_EXPR:
1763 real_ceil (&r, VOIDmode, &x);
1764 break;
1766 case FIX_FLOOR_EXPR:
1767 real_floor (&r, VOIDmode, &x);
1768 break;
1770 default:
1771 abort ();
1774 /* If R is NaN, return zero and show we have an overflow. */
1775 if (REAL_VALUE_ISNAN (r))
1777 overflow = 1;
1778 high = 0;
1779 low = 0;
1782 /* See if R is less than the lower bound or greater than the
1783 upper bound. */
1785 if (! overflow)
1787 tree lt = TYPE_MIN_VALUE (type);
1788 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1789 if (REAL_VALUES_LESS (r, l))
1791 overflow = 1;
1792 high = TREE_INT_CST_HIGH (lt);
1793 low = TREE_INT_CST_LOW (lt);
1797 if (! overflow)
1799 tree ut = TYPE_MAX_VALUE (type);
1800 if (ut)
1802 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1803 if (REAL_VALUES_LESS (u, r))
1805 overflow = 1;
1806 high = TREE_INT_CST_HIGH (ut);
1807 low = TREE_INT_CST_LOW (ut);
1812 if (! overflow)
1813 REAL_VALUE_TO_INT (&low, &high, r);
1815 t = build_int_2 (low, high);
1816 TREE_TYPE (t) = type;
1817 TREE_OVERFLOW (t)
1818 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1819 TREE_CONSTANT_OVERFLOW (t)
1820 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1821 return t;
1824 else if (TREE_CODE (type) == REAL_TYPE)
1826 if (TREE_CODE (arg1) == INTEGER_CST)
1827 return build_real_from_int_cst (type, arg1);
1828 if (TREE_CODE (arg1) == REAL_CST)
1830 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1832 /* We make a copy of ARG1 so that we don't modify an
1833 existing constant tree. */
1834 t = copy_node (arg1);
1835 TREE_TYPE (t) = type;
1836 return t;
1839 t = build_real (type,
1840 real_value_truncate (TYPE_MODE (type),
1841 TREE_REAL_CST (arg1)));
1843 TREE_OVERFLOW (t)
1844 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1845 TREE_CONSTANT_OVERFLOW (t)
1846 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1847 return t;
1850 return NULL_TREE;
1853 /* Convert expression ARG to type TYPE. Used by the middle-end for
1854 simple conversions in preference to calling the front-end's convert. */
1856 static tree
1857 fold_convert (tree type, tree arg)
1859 tree orig = TREE_TYPE (arg);
1860 tree tem;
1862 if (type == orig)
1863 return arg;
1865 if (TREE_CODE (arg) == ERROR_MARK
1866 || TREE_CODE (type) == ERROR_MARK
1867 || TREE_CODE (orig) == ERROR_MARK)
1868 return error_mark_node;
1870 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1871 return fold (build1 (NOP_EXPR, type, arg));
1873 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1875 if (TREE_CODE (arg) == INTEGER_CST)
1877 tem = fold_convert_const (NOP_EXPR, type, arg);
1878 if (tem != NULL_TREE)
1879 return tem;
1881 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1882 return fold (build1 (NOP_EXPR, type, arg));
1883 if (TREE_CODE (orig) == COMPLEX_TYPE)
1885 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1886 return fold_convert (type, tem);
1888 if (TREE_CODE (orig) == VECTOR_TYPE
1889 && GET_MODE_SIZE (TYPE_MODE (type))
1890 == GET_MODE_SIZE (TYPE_MODE (orig)))
1891 return fold (build1 (NOP_EXPR, type, arg));
1893 else if (TREE_CODE (type) == REAL_TYPE)
1895 if (TREE_CODE (arg) == INTEGER_CST)
1897 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1898 if (tem != NULL_TREE)
1899 return tem;
1901 else if (TREE_CODE (arg) == REAL_CST)
1903 tem = fold_convert_const (NOP_EXPR, type, arg);
1904 if (tem != NULL_TREE)
1905 return tem;
1908 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1909 return fold (build1 (FLOAT_EXPR, type, arg));
1910 if (TREE_CODE (orig) == REAL_TYPE)
1911 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1912 type, arg));
1913 if (TREE_CODE (orig) == COMPLEX_TYPE)
1915 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1916 return fold_convert (type, tem);
1919 else if (TREE_CODE (type) == COMPLEX_TYPE)
1921 if (INTEGRAL_TYPE_P (orig)
1922 || POINTER_TYPE_P (orig)
1923 || TREE_CODE (orig) == REAL_TYPE)
1924 return build (COMPLEX_EXPR, type,
1925 fold_convert (TREE_TYPE (type), arg),
1926 fold_convert (TREE_TYPE (type), integer_zero_node));
1927 if (TREE_CODE (orig) == COMPLEX_TYPE)
1929 tree rpart, ipart;
1931 if (TREE_CODE (arg) == COMPLEX_EXPR)
1933 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1934 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1935 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1938 arg = save_expr (arg);
1939 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1940 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1941 rpart = fold_convert (TREE_TYPE (type), rpart);
1942 ipart = fold_convert (TREE_TYPE (type), ipart);
1943 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1946 else if (TREE_CODE (type) == VECTOR_TYPE)
1948 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1949 && GET_MODE_SIZE (TYPE_MODE (type))
1950 == GET_MODE_SIZE (TYPE_MODE (orig)))
1951 return fold (build1 (NOP_EXPR, type, arg));
1952 if (TREE_CODE (orig) == VECTOR_TYPE
1953 && GET_MODE_SIZE (TYPE_MODE (type))
1954 == GET_MODE_SIZE (TYPE_MODE (orig)))
1955 return fold (build1 (NOP_EXPR, type, arg));
1957 else if (VOID_TYPE_P (type))
1958 return fold (build1 (CONVERT_EXPR, type, arg));
1959 abort ();
1962 /* Return an expr equal to X but certainly not valid as an lvalue. */
1964 tree
1965 non_lvalue (tree x)
1967 tree result;
1969 /* These things are certainly not lvalues. */
1970 if (TREE_CODE (x) == NON_LVALUE_EXPR
1971 || TREE_CODE (x) == INTEGER_CST
1972 || TREE_CODE (x) == REAL_CST
1973 || TREE_CODE (x) == STRING_CST
1974 || TREE_CODE (x) == ADDR_EXPR)
1975 return x;
1977 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1978 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1979 return result;
1982 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1983 Zero means allow extended lvalues. */
1985 int pedantic_lvalues;
1987 /* When pedantic, return an expr equal to X but certainly not valid as a
1988 pedantic lvalue. Otherwise, return X. */
1990 tree
1991 pedantic_non_lvalue (tree x)
1993 if (pedantic_lvalues)
1994 return non_lvalue (x);
1995 else
1996 return x;
1999 /* Given a tree comparison code, return the code that is the logical inverse
2000 of the given code. It is not safe to do this for floating-point
2001 comparisons, except for NE_EXPR and EQ_EXPR. */
2003 static enum tree_code
2004 invert_tree_comparison (enum tree_code code)
2006 switch (code)
2008 case EQ_EXPR:
2009 return NE_EXPR;
2010 case NE_EXPR:
2011 return EQ_EXPR;
2012 case GT_EXPR:
2013 return LE_EXPR;
2014 case GE_EXPR:
2015 return LT_EXPR;
2016 case LT_EXPR:
2017 return GE_EXPR;
2018 case LE_EXPR:
2019 return GT_EXPR;
2020 default:
2021 abort ();
2025 /* Similar, but return the comparison that results if the operands are
2026 swapped. This is safe for floating-point. */
2028 static enum tree_code
2029 swap_tree_comparison (enum tree_code code)
2031 switch (code)
2033 case EQ_EXPR:
2034 case NE_EXPR:
2035 return code;
2036 case GT_EXPR:
2037 return LT_EXPR;
2038 case GE_EXPR:
2039 return LE_EXPR;
2040 case LT_EXPR:
2041 return GT_EXPR;
2042 case LE_EXPR:
2043 return GE_EXPR;
2044 default:
2045 abort ();
2050 /* Convert a comparison tree code from an enum tree_code representation
2051 into a compcode bit-based encoding. This function is the inverse of
2052 compcode_to_comparison. */
2054 static int
2055 comparison_to_compcode (enum tree_code code)
2057 switch (code)
2059 case LT_EXPR:
2060 return COMPCODE_LT;
2061 case EQ_EXPR:
2062 return COMPCODE_EQ;
2063 case LE_EXPR:
2064 return COMPCODE_LE;
2065 case GT_EXPR:
2066 return COMPCODE_GT;
2067 case NE_EXPR:
2068 return COMPCODE_NE;
2069 case GE_EXPR:
2070 return COMPCODE_GE;
2071 default:
2072 abort ();
2076 /* Convert a compcode bit-based encoding of a comparison operator back
2077 to GCC's enum tree_code representation. This function is the
2078 inverse of comparison_to_compcode. */
2080 static enum tree_code
2081 compcode_to_comparison (int code)
2083 switch (code)
2085 case COMPCODE_LT:
2086 return LT_EXPR;
2087 case COMPCODE_EQ:
2088 return EQ_EXPR;
2089 case COMPCODE_LE:
2090 return LE_EXPR;
2091 case COMPCODE_GT:
2092 return GT_EXPR;
2093 case COMPCODE_NE:
2094 return NE_EXPR;
2095 case COMPCODE_GE:
2096 return GE_EXPR;
2097 default:
2098 abort ();
2102 /* Return nonzero if CODE is a tree code that represents a truth value. */
2104 static int
2105 truth_value_p (enum tree_code code)
2107 return (TREE_CODE_CLASS (code) == '<'
2108 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2109 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2110 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2113 /* Return nonzero if two operands (typically of the same tree node)
2114 are necessarily equal. If either argument has side-effects this
2115 function returns zero.
2117 If ONLY_CONST is nonzero, only return nonzero for constants.
2118 This function tests whether the operands are indistinguishable;
2119 it does not test whether they are equal using C's == operation.
2120 The distinction is important for IEEE floating point, because
2121 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2122 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2124 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
2125 even though it may hold multiple values during a function.
2126 This is because a GCC tree node guarantees that nothing else is
2127 executed between the evaluation of its "operands" (which may often
2128 be evaluated in arbitrary order). Hence if the operands themselves
2129 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2130 same value in each operand/subexpression. Hence a zero value for
2131 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2132 If comparing arbitrary expression trees, such as from different
2133 statements, ONLY_CONST must usually be nonzero. */
2136 operand_equal_p (tree arg0, tree arg1, int only_const)
2138 tree fndecl;
2140 /* If both types don't have the same signedness, then we can't consider
2141 them equal. We must check this before the STRIP_NOPS calls
2142 because they may change the signedness of the arguments. */
2143 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2144 return 0;
2146 STRIP_NOPS (arg0);
2147 STRIP_NOPS (arg1);
2149 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2150 /* This is needed for conversions and for COMPONENT_REF.
2151 Might as well play it safe and always test this. */
2152 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2153 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2154 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2155 return 0;
2157 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2158 We don't care about side effects in that case because the SAVE_EXPR
2159 takes care of that for us. In all other cases, two expressions are
2160 equal if they have no side effects. If we have two identical
2161 expressions with side effects that should be treated the same due
2162 to the only side effects being identical SAVE_EXPR's, that will
2163 be detected in the recursive calls below. */
2164 if (arg0 == arg1 && ! only_const
2165 && (TREE_CODE (arg0) == SAVE_EXPR
2166 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2167 return 1;
2169 /* Next handle constant cases, those for which we can return 1 even
2170 if ONLY_CONST is set. */
2171 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2172 switch (TREE_CODE (arg0))
2174 case INTEGER_CST:
2175 return (! TREE_CONSTANT_OVERFLOW (arg0)
2176 && ! TREE_CONSTANT_OVERFLOW (arg1)
2177 && tree_int_cst_equal (arg0, arg1));
2179 case REAL_CST:
2180 return (! TREE_CONSTANT_OVERFLOW (arg0)
2181 && ! TREE_CONSTANT_OVERFLOW (arg1)
2182 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2183 TREE_REAL_CST (arg1)));
2185 case VECTOR_CST:
2187 tree v1, v2;
2189 if (TREE_CONSTANT_OVERFLOW (arg0)
2190 || TREE_CONSTANT_OVERFLOW (arg1))
2191 return 0;
2193 v1 = TREE_VECTOR_CST_ELTS (arg0);
2194 v2 = TREE_VECTOR_CST_ELTS (arg1);
2195 while (v1 && v2)
2197 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2198 only_const))
2199 return 0;
2200 v1 = TREE_CHAIN (v1);
2201 v2 = TREE_CHAIN (v2);
2204 return 1;
2207 case COMPLEX_CST:
2208 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2209 only_const)
2210 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2211 only_const));
2213 case STRING_CST:
2214 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2215 && ! memcmp (TREE_STRING_POINTER (arg0),
2216 TREE_STRING_POINTER (arg1),
2217 TREE_STRING_LENGTH (arg0)));
2219 case ADDR_EXPR:
2220 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2222 default:
2223 break;
2226 if (only_const)
2227 return 0;
2229 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2231 case '1':
2232 /* Two conversions are equal only if signedness and modes match. */
2233 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2234 && (TREE_UNSIGNED (TREE_TYPE (arg0))
2235 != TREE_UNSIGNED (TREE_TYPE (arg1))))
2236 return 0;
2238 return operand_equal_p (TREE_OPERAND (arg0, 0),
2239 TREE_OPERAND (arg1, 0), 0);
2241 case '<':
2242 case '2':
2243 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2244 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2246 return 1;
2248 /* For commutative ops, allow the other order. */
2249 return (commutative_tree_code (TREE_CODE (arg0))
2250 && operand_equal_p (TREE_OPERAND (arg0, 0),
2251 TREE_OPERAND (arg1, 1), 0)
2252 && operand_equal_p (TREE_OPERAND (arg0, 1),
2253 TREE_OPERAND (arg1, 0), 0));
2255 case 'r':
2256 /* If either of the pointer (or reference) expressions we are
2257 dereferencing contain a side effect, these cannot be equal. */
2258 if (TREE_SIDE_EFFECTS (arg0)
2259 || TREE_SIDE_EFFECTS (arg1))
2260 return 0;
2262 switch (TREE_CODE (arg0))
2264 case INDIRECT_REF:
2265 return operand_equal_p (TREE_OPERAND (arg0, 0),
2266 TREE_OPERAND (arg1, 0), 0);
2268 case COMPONENT_REF:
2269 case ARRAY_REF:
2270 case ARRAY_RANGE_REF:
2271 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2272 TREE_OPERAND (arg1, 0), 0)
2273 && operand_equal_p (TREE_OPERAND (arg0, 1),
2274 TREE_OPERAND (arg1, 1), 0));
2276 case BIT_FIELD_REF:
2277 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2278 TREE_OPERAND (arg1, 0), 0)
2279 && operand_equal_p (TREE_OPERAND (arg0, 1),
2280 TREE_OPERAND (arg1, 1), 0)
2281 && operand_equal_p (TREE_OPERAND (arg0, 2),
2282 TREE_OPERAND (arg1, 2), 0));
2283 default:
2284 return 0;
2287 case 'e':
2288 switch (TREE_CODE (arg0))
2290 case ADDR_EXPR:
2291 case TRUTH_NOT_EXPR:
2292 return operand_equal_p (TREE_OPERAND (arg0, 0),
2293 TREE_OPERAND (arg1, 0), 0);
2295 case RTL_EXPR:
2296 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2298 case CALL_EXPR:
2299 /* If the CALL_EXPRs call different functions, then they
2300 clearly can not be equal. */
2301 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2302 TREE_OPERAND (arg1, 0), 0))
2303 return 0;
2305 /* Only consider const functions equivalent. */
2306 fndecl = get_callee_fndecl (arg0);
2307 if (fndecl == NULL_TREE
2308 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2309 return 0;
2311 /* Now see if all the arguments are the same. operand_equal_p
2312 does not handle TREE_LIST, so we walk the operands here
2313 feeding them to operand_equal_p. */
2314 arg0 = TREE_OPERAND (arg0, 1);
2315 arg1 = TREE_OPERAND (arg1, 1);
2316 while (arg0 && arg1)
2318 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2319 return 0;
2321 arg0 = TREE_CHAIN (arg0);
2322 arg1 = TREE_CHAIN (arg1);
2325 /* If we get here and both argument lists are exhausted
2326 then the CALL_EXPRs are equal. */
2327 return ! (arg0 || arg1);
2329 default:
2330 return 0;
2333 case 'd':
2334 /* Consider __builtin_sqrt equal to sqrt. */
2335 return TREE_CODE (arg0) == FUNCTION_DECL
2336 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2337 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2338 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2340 default:
2341 return 0;
2345 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2346 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2348 When in doubt, return 0. */
2350 static int
2351 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2353 int unsignedp1, unsignedpo;
2354 tree primarg0, primarg1, primother;
2355 unsigned int correct_width;
2357 if (operand_equal_p (arg0, arg1, 0))
2358 return 1;
2360 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2361 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2362 return 0;
2364 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2365 and see if the inner values are the same. This removes any
2366 signedness comparison, which doesn't matter here. */
2367 primarg0 = arg0, primarg1 = arg1;
2368 STRIP_NOPS (primarg0);
2369 STRIP_NOPS (primarg1);
2370 if (operand_equal_p (primarg0, primarg1, 0))
2371 return 1;
2373 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2374 actual comparison operand, ARG0.
2376 First throw away any conversions to wider types
2377 already present in the operands. */
2379 primarg1 = get_narrower (arg1, &unsignedp1);
2380 primother = get_narrower (other, &unsignedpo);
2382 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2383 if (unsignedp1 == unsignedpo
2384 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2385 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2387 tree type = TREE_TYPE (arg0);
2389 /* Make sure shorter operand is extended the right way
2390 to match the longer operand. */
2391 primarg1 = fold_convert ((*lang_hooks.types.signed_or_unsigned_type)
2392 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2394 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2395 return 1;
2398 return 0;
2401 /* See if ARG is an expression that is either a comparison or is performing
2402 arithmetic on comparisons. The comparisons must only be comparing
2403 two different values, which will be stored in *CVAL1 and *CVAL2; if
2404 they are nonzero it means that some operands have already been found.
2405 No variables may be used anywhere else in the expression except in the
2406 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2407 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2409 If this is true, return 1. Otherwise, return zero. */
2411 static int
2412 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2414 enum tree_code code = TREE_CODE (arg);
2415 char class = TREE_CODE_CLASS (code);
2417 /* We can handle some of the 'e' cases here. */
2418 if (class == 'e' && code == TRUTH_NOT_EXPR)
2419 class = '1';
2420 else if (class == 'e'
2421 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2422 || code == COMPOUND_EXPR))
2423 class = '2';
2425 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2426 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2428 /* If we've already found a CVAL1 or CVAL2, this expression is
2429 two complex to handle. */
2430 if (*cval1 || *cval2)
2431 return 0;
2433 class = '1';
2434 *save_p = 1;
2437 switch (class)
2439 case '1':
2440 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2442 case '2':
2443 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2444 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2445 cval1, cval2, save_p));
2447 case 'c':
2448 return 1;
2450 case 'e':
2451 if (code == COND_EXPR)
2452 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2453 cval1, cval2, save_p)
2454 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2455 cval1, cval2, save_p)
2456 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2457 cval1, cval2, save_p));
2458 return 0;
2460 case '<':
2461 /* First see if we can handle the first operand, then the second. For
2462 the second operand, we know *CVAL1 can't be zero. It must be that
2463 one side of the comparison is each of the values; test for the
2464 case where this isn't true by failing if the two operands
2465 are the same. */
2467 if (operand_equal_p (TREE_OPERAND (arg, 0),
2468 TREE_OPERAND (arg, 1), 0))
2469 return 0;
2471 if (*cval1 == 0)
2472 *cval1 = TREE_OPERAND (arg, 0);
2473 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2475 else if (*cval2 == 0)
2476 *cval2 = TREE_OPERAND (arg, 0);
2477 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2479 else
2480 return 0;
2482 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2484 else if (*cval2 == 0)
2485 *cval2 = TREE_OPERAND (arg, 1);
2486 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2488 else
2489 return 0;
2491 return 1;
2493 default:
2494 return 0;
2498 /* ARG is a tree that is known to contain just arithmetic operations and
2499 comparisons. Evaluate the operations in the tree substituting NEW0 for
2500 any occurrence of OLD0 as an operand of a comparison and likewise for
2501 NEW1 and OLD1. */
2503 static tree
2504 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2506 tree type = TREE_TYPE (arg);
2507 enum tree_code code = TREE_CODE (arg);
2508 char class = TREE_CODE_CLASS (code);
2510 /* We can handle some of the 'e' cases here. */
2511 if (class == 'e' && code == TRUTH_NOT_EXPR)
2512 class = '1';
2513 else if (class == 'e'
2514 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2515 class = '2';
2517 switch (class)
2519 case '1':
2520 return fold (build1 (code, type,
2521 eval_subst (TREE_OPERAND (arg, 0),
2522 old0, new0, old1, new1)));
2524 case '2':
2525 return fold (build (code, type,
2526 eval_subst (TREE_OPERAND (arg, 0),
2527 old0, new0, old1, new1),
2528 eval_subst (TREE_OPERAND (arg, 1),
2529 old0, new0, old1, new1)));
2531 case 'e':
2532 switch (code)
2534 case SAVE_EXPR:
2535 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2537 case COMPOUND_EXPR:
2538 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2540 case COND_EXPR:
2541 return fold (build (code, type,
2542 eval_subst (TREE_OPERAND (arg, 0),
2543 old0, new0, old1, new1),
2544 eval_subst (TREE_OPERAND (arg, 1),
2545 old0, new0, old1, new1),
2546 eval_subst (TREE_OPERAND (arg, 2),
2547 old0, new0, old1, new1)));
2548 default:
2549 break;
2551 /* Fall through - ??? */
2553 case '<':
2555 tree arg0 = TREE_OPERAND (arg, 0);
2556 tree arg1 = TREE_OPERAND (arg, 1);
2558 /* We need to check both for exact equality and tree equality. The
2559 former will be true if the operand has a side-effect. In that
2560 case, we know the operand occurred exactly once. */
2562 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2563 arg0 = new0;
2564 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2565 arg0 = new1;
2567 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2568 arg1 = new0;
2569 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2570 arg1 = new1;
2572 return fold (build (code, type, arg0, arg1));
2575 default:
2576 return arg;
2580 /* Return a tree for the case when the result of an expression is RESULT
2581 converted to TYPE and OMITTED was previously an operand of the expression
2582 but is now not needed (e.g., we folded OMITTED * 0).
2584 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2585 the conversion of RESULT to TYPE. */
2587 tree
2588 omit_one_operand (tree type, tree result, tree omitted)
2590 tree t = fold_convert (type, result);
2592 if (TREE_SIDE_EFFECTS (omitted))
2593 return build (COMPOUND_EXPR, type, omitted, t);
2595 return non_lvalue (t);
2598 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2600 static tree
2601 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2603 tree t = fold_convert (type, result);
2605 if (TREE_SIDE_EFFECTS (omitted))
2606 return build (COMPOUND_EXPR, type, omitted, t);
2608 return pedantic_non_lvalue (t);
2611 /* Return a simplified tree node for the truth-negation of ARG. This
2612 never alters ARG itself. We assume that ARG is an operation that
2613 returns a truth value (0 or 1). */
2615 tree
2616 invert_truthvalue (tree arg)
2618 tree type = TREE_TYPE (arg);
2619 enum tree_code code = TREE_CODE (arg);
2621 if (code == ERROR_MARK)
2622 return arg;
2624 /* If this is a comparison, we can simply invert it, except for
2625 floating-point non-equality comparisons, in which case we just
2626 enclose a TRUTH_NOT_EXPR around what we have. */
2628 if (TREE_CODE_CLASS (code) == '<')
2630 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2631 && !flag_unsafe_math_optimizations
2632 && code != NE_EXPR
2633 && code != EQ_EXPR)
2634 return build1 (TRUTH_NOT_EXPR, type, arg);
2635 else if (code == UNORDERED_EXPR
2636 || code == ORDERED_EXPR
2637 || code == UNEQ_EXPR
2638 || code == UNLT_EXPR
2639 || code == UNLE_EXPR
2640 || code == UNGT_EXPR
2641 || code == UNGE_EXPR)
2642 return build1 (TRUTH_NOT_EXPR, type, arg);
2643 else
2644 return build (invert_tree_comparison (code), type,
2645 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2648 switch (code)
2650 case INTEGER_CST:
2651 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2653 case TRUTH_AND_EXPR:
2654 return build (TRUTH_OR_EXPR, type,
2655 invert_truthvalue (TREE_OPERAND (arg, 0)),
2656 invert_truthvalue (TREE_OPERAND (arg, 1)));
2658 case TRUTH_OR_EXPR:
2659 return build (TRUTH_AND_EXPR, type,
2660 invert_truthvalue (TREE_OPERAND (arg, 0)),
2661 invert_truthvalue (TREE_OPERAND (arg, 1)));
2663 case TRUTH_XOR_EXPR:
2664 /* Here we can invert either operand. We invert the first operand
2665 unless the second operand is a TRUTH_NOT_EXPR in which case our
2666 result is the XOR of the first operand with the inside of the
2667 negation of the second operand. */
2669 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2670 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2671 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2672 else
2673 return build (TRUTH_XOR_EXPR, type,
2674 invert_truthvalue (TREE_OPERAND (arg, 0)),
2675 TREE_OPERAND (arg, 1));
2677 case TRUTH_ANDIF_EXPR:
2678 return build (TRUTH_ORIF_EXPR, type,
2679 invert_truthvalue (TREE_OPERAND (arg, 0)),
2680 invert_truthvalue (TREE_OPERAND (arg, 1)));
2682 case TRUTH_ORIF_EXPR:
2683 return build (TRUTH_ANDIF_EXPR, type,
2684 invert_truthvalue (TREE_OPERAND (arg, 0)),
2685 invert_truthvalue (TREE_OPERAND (arg, 1)));
2687 case TRUTH_NOT_EXPR:
2688 return TREE_OPERAND (arg, 0);
2690 case COND_EXPR:
2691 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2692 invert_truthvalue (TREE_OPERAND (arg, 1)),
2693 invert_truthvalue (TREE_OPERAND (arg, 2)));
2695 case COMPOUND_EXPR:
2696 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2697 invert_truthvalue (TREE_OPERAND (arg, 1)));
2699 case WITH_RECORD_EXPR:
2700 return build (WITH_RECORD_EXPR, type,
2701 invert_truthvalue (TREE_OPERAND (arg, 0)),
2702 TREE_OPERAND (arg, 1));
2704 case NON_LVALUE_EXPR:
2705 return invert_truthvalue (TREE_OPERAND (arg, 0));
2707 case NOP_EXPR:
2708 case CONVERT_EXPR:
2709 case FLOAT_EXPR:
2710 return build1 (TREE_CODE (arg), type,
2711 invert_truthvalue (TREE_OPERAND (arg, 0)));
2713 case BIT_AND_EXPR:
2714 if (!integer_onep (TREE_OPERAND (arg, 1)))
2715 break;
2716 return build (EQ_EXPR, type, arg,
2717 fold_convert (type, integer_zero_node));
2719 case SAVE_EXPR:
2720 return build1 (TRUTH_NOT_EXPR, type, arg);
2722 case CLEANUP_POINT_EXPR:
2723 return build1 (CLEANUP_POINT_EXPR, type,
2724 invert_truthvalue (TREE_OPERAND (arg, 0)));
2726 default:
2727 break;
2729 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2730 abort ();
2731 return build1 (TRUTH_NOT_EXPR, type, arg);
2734 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2735 operands are another bit-wise operation with a common input. If so,
2736 distribute the bit operations to save an operation and possibly two if
2737 constants are involved. For example, convert
2738 (A | B) & (A | C) into A | (B & C)
2739 Further simplification will occur if B and C are constants.
2741 If this optimization cannot be done, 0 will be returned. */
2743 static tree
2744 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2746 tree common;
2747 tree left, right;
2749 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2750 || TREE_CODE (arg0) == code
2751 || (TREE_CODE (arg0) != BIT_AND_EXPR
2752 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2753 return 0;
2755 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2757 common = TREE_OPERAND (arg0, 0);
2758 left = TREE_OPERAND (arg0, 1);
2759 right = TREE_OPERAND (arg1, 1);
2761 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2763 common = TREE_OPERAND (arg0, 0);
2764 left = TREE_OPERAND (arg0, 1);
2765 right = TREE_OPERAND (arg1, 0);
2767 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2769 common = TREE_OPERAND (arg0, 1);
2770 left = TREE_OPERAND (arg0, 0);
2771 right = TREE_OPERAND (arg1, 1);
2773 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2775 common = TREE_OPERAND (arg0, 1);
2776 left = TREE_OPERAND (arg0, 0);
2777 right = TREE_OPERAND (arg1, 0);
2779 else
2780 return 0;
2782 return fold (build (TREE_CODE (arg0), type, common,
2783 fold (build (code, type, left, right))));
2786 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2787 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2789 static tree
2790 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2791 int unsignedp)
2793 tree result = build (BIT_FIELD_REF, type, inner,
2794 size_int (bitsize), bitsize_int (bitpos));
2796 TREE_UNSIGNED (result) = unsignedp;
2798 return result;
2801 /* Optimize a bit-field compare.
2803 There are two cases: First is a compare against a constant and the
2804 second is a comparison of two items where the fields are at the same
2805 bit position relative to the start of a chunk (byte, halfword, word)
2806 large enough to contain it. In these cases we can avoid the shift
2807 implicit in bitfield extractions.
2809 For constants, we emit a compare of the shifted constant with the
2810 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2811 compared. For two fields at the same position, we do the ANDs with the
2812 similar mask and compare the result of the ANDs.
2814 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2815 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2816 are the left and right operands of the comparison, respectively.
2818 If the optimization described above can be done, we return the resulting
2819 tree. Otherwise we return zero. */
2821 static tree
2822 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2823 tree lhs, tree rhs)
2825 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2826 tree type = TREE_TYPE (lhs);
2827 tree signed_type, unsigned_type;
2828 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2829 enum machine_mode lmode, rmode, nmode;
2830 int lunsignedp, runsignedp;
2831 int lvolatilep = 0, rvolatilep = 0;
2832 tree linner, rinner = NULL_TREE;
2833 tree mask;
2834 tree offset;
2836 /* Get all the information about the extractions being done. If the bit size
2837 if the same as the size of the underlying object, we aren't doing an
2838 extraction at all and so can do nothing. We also don't want to
2839 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2840 then will no longer be able to replace it. */
2841 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2842 &lunsignedp, &lvolatilep);
2843 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2844 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2845 return 0;
2847 if (!const_p)
2849 /* If this is not a constant, we can only do something if bit positions,
2850 sizes, and signedness are the same. */
2851 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2852 &runsignedp, &rvolatilep);
2854 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2855 || lunsignedp != runsignedp || offset != 0
2856 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2857 return 0;
2860 /* See if we can find a mode to refer to this field. We should be able to,
2861 but fail if we can't. */
2862 nmode = get_best_mode (lbitsize, lbitpos,
2863 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2864 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2865 TYPE_ALIGN (TREE_TYPE (rinner))),
2866 word_mode, lvolatilep || rvolatilep);
2867 if (nmode == VOIDmode)
2868 return 0;
2870 /* Set signed and unsigned types of the precision of this mode for the
2871 shifts below. */
2872 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2873 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2875 /* Compute the bit position and size for the new reference and our offset
2876 within it. If the new reference is the same size as the original, we
2877 won't optimize anything, so return zero. */
2878 nbitsize = GET_MODE_BITSIZE (nmode);
2879 nbitpos = lbitpos & ~ (nbitsize - 1);
2880 lbitpos -= nbitpos;
2881 if (nbitsize == lbitsize)
2882 return 0;
2884 if (BYTES_BIG_ENDIAN)
2885 lbitpos = nbitsize - lbitsize - lbitpos;
2887 /* Make the mask to be used against the extracted field. */
2888 mask = build_int_2 (~0, ~0);
2889 TREE_TYPE (mask) = unsigned_type;
2890 force_fit_type (mask, 0);
2891 mask = fold_convert (unsigned_type, mask);
2892 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2893 mask = const_binop (RSHIFT_EXPR, mask,
2894 size_int (nbitsize - lbitsize - lbitpos), 0);
2896 if (! const_p)
2897 /* If not comparing with constant, just rework the comparison
2898 and return. */
2899 return build (code, compare_type,
2900 build (BIT_AND_EXPR, unsigned_type,
2901 make_bit_field_ref (linner, unsigned_type,
2902 nbitsize, nbitpos, 1),
2903 mask),
2904 build (BIT_AND_EXPR, unsigned_type,
2905 make_bit_field_ref (rinner, unsigned_type,
2906 nbitsize, nbitpos, 1),
2907 mask));
2909 /* Otherwise, we are handling the constant case. See if the constant is too
2910 big for the field. Warn and return a tree of for 0 (false) if so. We do
2911 this not only for its own sake, but to avoid having to test for this
2912 error case below. If we didn't, we might generate wrong code.
2914 For unsigned fields, the constant shifted right by the field length should
2915 be all zero. For signed fields, the high-order bits should agree with
2916 the sign bit. */
2918 if (lunsignedp)
2920 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2921 fold_convert (unsigned_type, rhs),
2922 size_int (lbitsize), 0)))
2924 warning ("comparison is always %d due to width of bit-field",
2925 code == NE_EXPR);
2926 return fold_convert (compare_type,
2927 (code == NE_EXPR
2928 ? integer_one_node : integer_zero_node));
2931 else
2933 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
2934 size_int (lbitsize - 1), 0);
2935 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2937 warning ("comparison is always %d due to width of bit-field",
2938 code == NE_EXPR);
2939 return fold_convert (compare_type,
2940 (code == NE_EXPR
2941 ? integer_one_node : integer_zero_node));
2945 /* Single-bit compares should always be against zero. */
2946 if (lbitsize == 1 && ! integer_zerop (rhs))
2948 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2949 rhs = fold_convert (type, integer_zero_node);
2952 /* Make a new bitfield reference, shift the constant over the
2953 appropriate number of bits and mask it with the computed mask
2954 (in case this was a signed field). If we changed it, make a new one. */
2955 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2956 if (lvolatilep)
2958 TREE_SIDE_EFFECTS (lhs) = 1;
2959 TREE_THIS_VOLATILE (lhs) = 1;
2962 rhs = fold (const_binop (BIT_AND_EXPR,
2963 const_binop (LSHIFT_EXPR,
2964 fold_convert (unsigned_type, rhs),
2965 size_int (lbitpos), 0),
2966 mask, 0));
2968 return build (code, compare_type,
2969 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2970 rhs);
2973 /* Subroutine for fold_truthop: decode a field reference.
2975 If EXP is a comparison reference, we return the innermost reference.
2977 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2978 set to the starting bit number.
2980 If the innermost field can be completely contained in a mode-sized
2981 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2983 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2984 otherwise it is not changed.
2986 *PUNSIGNEDP is set to the signedness of the field.
2988 *PMASK is set to the mask used. This is either contained in a
2989 BIT_AND_EXPR or derived from the width of the field.
2991 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2993 Return 0 if this is not a component reference or is one that we can't
2994 do anything with. */
2996 static tree
2997 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
2998 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
2999 int *punsignedp, int *pvolatilep,
3000 tree *pmask, tree *pand_mask)
3002 tree outer_type = 0;
3003 tree and_mask = 0;
3004 tree mask, inner, offset;
3005 tree unsigned_type;
3006 unsigned int precision;
3008 /* All the optimizations using this function assume integer fields.
3009 There are problems with FP fields since the type_for_size call
3010 below can fail for, e.g., XFmode. */
3011 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3012 return 0;
3014 /* We are interested in the bare arrangement of bits, so strip everything
3015 that doesn't affect the machine mode. However, record the type of the
3016 outermost expression if it may matter below. */
3017 if (TREE_CODE (exp) == NOP_EXPR
3018 || TREE_CODE (exp) == CONVERT_EXPR
3019 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3020 outer_type = TREE_TYPE (exp);
3021 STRIP_NOPS (exp);
3023 if (TREE_CODE (exp) == BIT_AND_EXPR)
3025 and_mask = TREE_OPERAND (exp, 1);
3026 exp = TREE_OPERAND (exp, 0);
3027 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3028 if (TREE_CODE (and_mask) != INTEGER_CST)
3029 return 0;
3032 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3033 punsignedp, pvolatilep);
3034 if ((inner == exp && and_mask == 0)
3035 || *pbitsize < 0 || offset != 0
3036 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3037 return 0;
3039 /* If the number of bits in the reference is the same as the bitsize of
3040 the outer type, then the outer type gives the signedness. Otherwise
3041 (in case of a small bitfield) the signedness is unchanged. */
3042 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3043 *punsignedp = TREE_UNSIGNED (outer_type);
3045 /* Compute the mask to access the bitfield. */
3046 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
3047 precision = TYPE_PRECISION (unsigned_type);
3049 mask = build_int_2 (~0, ~0);
3050 TREE_TYPE (mask) = unsigned_type;
3051 force_fit_type (mask, 0);
3052 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3053 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3055 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3056 if (and_mask != 0)
3057 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3058 fold_convert (unsigned_type, and_mask), mask));
3060 *pmask = mask;
3061 *pand_mask = and_mask;
3062 return inner;
3065 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3066 bit positions. */
3068 static int
3069 all_ones_mask_p (tree mask, int size)
3071 tree type = TREE_TYPE (mask);
3072 unsigned int precision = TYPE_PRECISION (type);
3073 tree tmask;
3075 tmask = build_int_2 (~0, ~0);
3076 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
3077 force_fit_type (tmask, 0);
3078 return
3079 tree_int_cst_equal (mask,
3080 const_binop (RSHIFT_EXPR,
3081 const_binop (LSHIFT_EXPR, tmask,
3082 size_int (precision - size),
3084 size_int (precision - size), 0));
3087 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3088 represents the sign bit of EXP's type. If EXP represents a sign
3089 or zero extension, also test VAL against the unextended type.
3090 The return value is the (sub)expression whose sign bit is VAL,
3091 or NULL_TREE otherwise. */
3093 static tree
3094 sign_bit_p (tree exp, tree val)
3096 unsigned HOST_WIDE_INT mask_lo, lo;
3097 HOST_WIDE_INT mask_hi, hi;
3098 int width;
3099 tree t;
3101 /* Tree EXP must have an integral type. */
3102 t = TREE_TYPE (exp);
3103 if (! INTEGRAL_TYPE_P (t))
3104 return NULL_TREE;
3106 /* Tree VAL must be an integer constant. */
3107 if (TREE_CODE (val) != INTEGER_CST
3108 || TREE_CONSTANT_OVERFLOW (val))
3109 return NULL_TREE;
3111 width = TYPE_PRECISION (t);
3112 if (width > HOST_BITS_PER_WIDE_INT)
3114 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3115 lo = 0;
3117 mask_hi = ((unsigned HOST_WIDE_INT) -1
3118 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3119 mask_lo = -1;
3121 else
3123 hi = 0;
3124 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3126 mask_hi = 0;
3127 mask_lo = ((unsigned HOST_WIDE_INT) -1
3128 >> (HOST_BITS_PER_WIDE_INT - width));
3131 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3132 treat VAL as if it were unsigned. */
3133 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3134 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3135 return exp;
3137 /* Handle extension from a narrower type. */
3138 if (TREE_CODE (exp) == NOP_EXPR
3139 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3140 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3142 return NULL_TREE;
3145 /* Subroutine for fold_truthop: determine if an operand is simple enough
3146 to be evaluated unconditionally. */
3148 static int
3149 simple_operand_p (tree exp)
3151 /* Strip any conversions that don't change the machine mode. */
3152 while ((TREE_CODE (exp) == NOP_EXPR
3153 || TREE_CODE (exp) == CONVERT_EXPR)
3154 && (TYPE_MODE (TREE_TYPE (exp))
3155 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3156 exp = TREE_OPERAND (exp, 0);
3158 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3159 || (DECL_P (exp)
3160 && ! TREE_ADDRESSABLE (exp)
3161 && ! TREE_THIS_VOLATILE (exp)
3162 && ! DECL_NONLOCAL (exp)
3163 /* Don't regard global variables as simple. They may be
3164 allocated in ways unknown to the compiler (shared memory,
3165 #pragma weak, etc). */
3166 && ! TREE_PUBLIC (exp)
3167 && ! DECL_EXTERNAL (exp)
3168 /* Loading a static variable is unduly expensive, but global
3169 registers aren't expensive. */
3170 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3173 /* The following functions are subroutines to fold_range_test and allow it to
3174 try to change a logical combination of comparisons into a range test.
3176 For example, both
3177 X == 2 || X == 3 || X == 4 || X == 5
3179 X >= 2 && X <= 5
3180 are converted to
3181 (unsigned) (X - 2) <= 3
3183 We describe each set of comparisons as being either inside or outside
3184 a range, using a variable named like IN_P, and then describe the
3185 range with a lower and upper bound. If one of the bounds is omitted,
3186 it represents either the highest or lowest value of the type.
3188 In the comments below, we represent a range by two numbers in brackets
3189 preceded by a "+" to designate being inside that range, or a "-" to
3190 designate being outside that range, so the condition can be inverted by
3191 flipping the prefix. An omitted bound is represented by a "-". For
3192 example, "- [-, 10]" means being outside the range starting at the lowest
3193 possible value and ending at 10, in other words, being greater than 10.
3194 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3195 always false.
3197 We set up things so that the missing bounds are handled in a consistent
3198 manner so neither a missing bound nor "true" and "false" need to be
3199 handled using a special case. */
3201 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3202 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3203 and UPPER1_P are nonzero if the respective argument is an upper bound
3204 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3205 must be specified for a comparison. ARG1 will be converted to ARG0's
3206 type if both are specified. */
3208 static tree
3209 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3210 tree arg1, int upper1_p)
3212 tree tem;
3213 int result;
3214 int sgn0, sgn1;
3216 /* If neither arg represents infinity, do the normal operation.
3217 Else, if not a comparison, return infinity. Else handle the special
3218 comparison rules. Note that most of the cases below won't occur, but
3219 are handled for consistency. */
3221 if (arg0 != 0 && arg1 != 0)
3223 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3224 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3225 STRIP_NOPS (tem);
3226 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3229 if (TREE_CODE_CLASS (code) != '<')
3230 return 0;
3232 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3233 for neither. In real maths, we cannot assume open ended ranges are
3234 the same. But, this is computer arithmetic, where numbers are finite.
3235 We can therefore make the transformation of any unbounded range with
3236 the value Z, Z being greater than any representable number. This permits
3237 us to treat unbounded ranges as equal. */
3238 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3239 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3240 switch (code)
3242 case EQ_EXPR:
3243 result = sgn0 == sgn1;
3244 break;
3245 case NE_EXPR:
3246 result = sgn0 != sgn1;
3247 break;
3248 case LT_EXPR:
3249 result = sgn0 < sgn1;
3250 break;
3251 case LE_EXPR:
3252 result = sgn0 <= sgn1;
3253 break;
3254 case GT_EXPR:
3255 result = sgn0 > sgn1;
3256 break;
3257 case GE_EXPR:
3258 result = sgn0 >= sgn1;
3259 break;
3260 default:
3261 abort ();
3264 return fold_convert (type, result ? integer_one_node : integer_zero_node);
3267 /* Given EXP, a logical expression, set the range it is testing into
3268 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3269 actually being tested. *PLOW and *PHIGH will be made of the same type
3270 as the returned expression. If EXP is not a comparison, we will most
3271 likely not be returning a useful value and range. */
3273 static tree
3274 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3276 enum tree_code code;
3277 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3278 tree orig_type = NULL_TREE;
3279 int in_p, n_in_p;
3280 tree low, high, n_low, n_high;
3282 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3283 and see if we can refine the range. Some of the cases below may not
3284 happen, but it doesn't seem worth worrying about this. We "continue"
3285 the outer loop when we've changed something; otherwise we "break"
3286 the switch, which will "break" the while. */
3288 in_p = 0;
3289 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3291 while (1)
3293 code = TREE_CODE (exp);
3295 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3297 if (first_rtl_op (code) > 0)
3298 arg0 = TREE_OPERAND (exp, 0);
3299 if (TREE_CODE_CLASS (code) == '<'
3300 || TREE_CODE_CLASS (code) == '1'
3301 || TREE_CODE_CLASS (code) == '2')
3302 type = TREE_TYPE (arg0);
3303 if (TREE_CODE_CLASS (code) == '2'
3304 || TREE_CODE_CLASS (code) == '<'
3305 || (TREE_CODE_CLASS (code) == 'e'
3306 && TREE_CODE_LENGTH (code) > 1))
3307 arg1 = TREE_OPERAND (exp, 1);
3310 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3311 lose a cast by accident. */
3312 if (type != NULL_TREE && orig_type == NULL_TREE)
3313 orig_type = type;
3315 switch (code)
3317 case TRUTH_NOT_EXPR:
3318 in_p = ! in_p, exp = arg0;
3319 continue;
3321 case EQ_EXPR: case NE_EXPR:
3322 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3323 /* We can only do something if the range is testing for zero
3324 and if the second operand is an integer constant. Note that
3325 saying something is "in" the range we make is done by
3326 complementing IN_P since it will set in the initial case of
3327 being not equal to zero; "out" is leaving it alone. */
3328 if (low == 0 || high == 0
3329 || ! integer_zerop (low) || ! integer_zerop (high)
3330 || TREE_CODE (arg1) != INTEGER_CST)
3331 break;
3333 switch (code)
3335 case NE_EXPR: /* - [c, c] */
3336 low = high = arg1;
3337 break;
3338 case EQ_EXPR: /* + [c, c] */
3339 in_p = ! in_p, low = high = arg1;
3340 break;
3341 case GT_EXPR: /* - [-, c] */
3342 low = 0, high = arg1;
3343 break;
3344 case GE_EXPR: /* + [c, -] */
3345 in_p = ! in_p, low = arg1, high = 0;
3346 break;
3347 case LT_EXPR: /* - [c, -] */
3348 low = arg1, high = 0;
3349 break;
3350 case LE_EXPR: /* + [-, c] */
3351 in_p = ! in_p, low = 0, high = arg1;
3352 break;
3353 default:
3354 abort ();
3357 exp = arg0;
3359 /* If this is an unsigned comparison, we also know that EXP is
3360 greater than or equal to zero. We base the range tests we make
3361 on that fact, so we record it here so we can parse existing
3362 range tests. */
3363 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3365 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3366 1, fold_convert (type, integer_zero_node),
3367 NULL_TREE))
3368 break;
3370 in_p = n_in_p, low = n_low, high = n_high;
3372 /* If the high bound is missing, but we have a nonzero low
3373 bound, reverse the range so it goes from zero to the low bound
3374 minus 1. */
3375 if (high == 0 && low && ! integer_zerop (low))
3377 in_p = ! in_p;
3378 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3379 integer_one_node, 0);
3380 low = fold_convert (type, integer_zero_node);
3383 continue;
3385 case NEGATE_EXPR:
3386 /* (-x) IN [a,b] -> x in [-b, -a] */
3387 n_low = range_binop (MINUS_EXPR, type,
3388 fold_convert (type, integer_zero_node),
3389 0, high, 1);
3390 n_high = range_binop (MINUS_EXPR, type,
3391 fold_convert (type, integer_zero_node),
3392 0, low, 0);
3393 low = n_low, high = n_high;
3394 exp = arg0;
3395 continue;
3397 case BIT_NOT_EXPR:
3398 /* ~ X -> -X - 1 */
3399 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3400 fold_convert (type, integer_one_node));
3401 continue;
3403 case PLUS_EXPR: case MINUS_EXPR:
3404 if (TREE_CODE (arg1) != INTEGER_CST)
3405 break;
3407 /* If EXP is signed, any overflow in the computation is undefined,
3408 so we don't worry about it so long as our computations on
3409 the bounds don't overflow. For unsigned, overflow is defined
3410 and this is exactly the right thing. */
3411 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3412 type, low, 0, arg1, 0);
3413 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3414 type, high, 1, arg1, 0);
3415 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3416 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3417 break;
3419 /* Check for an unsigned range which has wrapped around the maximum
3420 value thus making n_high < n_low, and normalize it. */
3421 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3423 low = range_binop (PLUS_EXPR, type, n_high, 0,
3424 integer_one_node, 0);
3425 high = range_binop (MINUS_EXPR, type, n_low, 0,
3426 integer_one_node, 0);
3428 /* If the range is of the form +/- [ x+1, x ], we won't
3429 be able to normalize it. But then, it represents the
3430 whole range or the empty set, so make it
3431 +/- [ -, - ]. */
3432 if (tree_int_cst_equal (n_low, low)
3433 && tree_int_cst_equal (n_high, high))
3434 low = high = 0;
3435 else
3436 in_p = ! in_p;
3438 else
3439 low = n_low, high = n_high;
3441 exp = arg0;
3442 continue;
3444 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3445 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3446 break;
3448 if (! INTEGRAL_TYPE_P (type)
3449 || (low != 0 && ! int_fits_type_p (low, type))
3450 || (high != 0 && ! int_fits_type_p (high, type)))
3451 break;
3453 n_low = low, n_high = high;
3455 if (n_low != 0)
3456 n_low = fold_convert (type, n_low);
3458 if (n_high != 0)
3459 n_high = fold_convert (type, n_high);
3461 /* If we're converting from an unsigned to a signed type,
3462 we will be doing the comparison as unsigned. The tests above
3463 have already verified that LOW and HIGH are both positive.
3465 So we have to make sure that the original unsigned value will
3466 be interpreted as positive. */
3467 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3469 tree equiv_type = (*lang_hooks.types.type_for_mode)
3470 (TYPE_MODE (type), 1);
3471 tree high_positive;
3473 /* A range without an upper bound is, naturally, unbounded.
3474 Since convert would have cropped a very large value, use
3475 the max value for the destination type. */
3476 high_positive
3477 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3478 : TYPE_MAX_VALUE (type);
3480 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3481 high_positive = fold (build (RSHIFT_EXPR, type,
3482 fold_convert (type,
3483 high_positive),
3484 fold_convert (type,
3485 integer_one_node)));
3487 /* If the low bound is specified, "and" the range with the
3488 range for which the original unsigned value will be
3489 positive. */
3490 if (low != 0)
3492 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3493 1, n_low, n_high, 1,
3494 fold_convert (type, integer_zero_node),
3495 high_positive))
3496 break;
3498 in_p = (n_in_p == in_p);
3500 else
3502 /* Otherwise, "or" the range with the range of the input
3503 that will be interpreted as negative. */
3504 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3505 0, n_low, n_high, 1,
3506 fold_convert (type, integer_zero_node),
3507 high_positive))
3508 break;
3510 in_p = (in_p != n_in_p);
3514 exp = arg0;
3515 low = n_low, high = n_high;
3516 continue;
3518 default:
3519 break;
3522 break;
3525 /* If EXP is a constant, we can evaluate whether this is true or false. */
3526 if (TREE_CODE (exp) == INTEGER_CST)
3528 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3529 exp, 0, low, 0))
3530 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3531 exp, 1, high, 1)));
3532 low = high = 0;
3533 exp = 0;
3536 *pin_p = in_p, *plow = low, *phigh = high;
3537 return exp;
3540 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3541 type, TYPE, return an expression to test if EXP is in (or out of, depending
3542 on IN_P) the range. */
3544 static tree
3545 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3547 tree etype = TREE_TYPE (exp);
3548 tree value;
3550 if (! in_p
3551 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3552 return invert_truthvalue (value);
3554 if (low == 0 && high == 0)
3555 return fold_convert (type, integer_one_node);
3557 if (low == 0)
3558 return fold (build (LE_EXPR, type, exp, high));
3560 if (high == 0)
3561 return fold (build (GE_EXPR, type, exp, low));
3563 if (operand_equal_p (low, high, 0))
3564 return fold (build (EQ_EXPR, type, exp, low));
3566 if (integer_zerop (low))
3568 if (! TREE_UNSIGNED (etype))
3570 etype = (*lang_hooks.types.unsigned_type) (etype);
3571 high = fold_convert (etype, high);
3572 exp = fold_convert (etype, exp);
3574 return build_range_check (type, exp, 1, 0, high);
3577 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3578 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3580 unsigned HOST_WIDE_INT lo;
3581 HOST_WIDE_INT hi;
3582 int prec;
3584 prec = TYPE_PRECISION (etype);
3585 if (prec <= HOST_BITS_PER_WIDE_INT)
3587 hi = 0;
3588 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3590 else
3592 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3593 lo = (unsigned HOST_WIDE_INT) -1;
3596 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3598 if (TREE_UNSIGNED (etype))
3600 etype = (*lang_hooks.types.signed_type) (etype);
3601 exp = fold_convert (etype, exp);
3603 return fold (build (GT_EXPR, type, exp,
3604 fold_convert (etype, integer_zero_node)));
3608 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3609 && ! TREE_OVERFLOW (value))
3610 return build_range_check (type,
3611 fold (build (MINUS_EXPR, etype, exp, low)),
3612 1, fold_convert (etype, integer_zero_node),
3613 value);
3615 return 0;
3618 /* Given two ranges, see if we can merge them into one. Return 1 if we
3619 can, 0 if we can't. Set the output range into the specified parameters. */
3621 static int
3622 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3623 tree high0, int in1_p, tree low1, tree high1)
3625 int no_overlap;
3626 int subset;
3627 int temp;
3628 tree tem;
3629 int in_p;
3630 tree low, high;
3631 int lowequal = ((low0 == 0 && low1 == 0)
3632 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3633 low0, 0, low1, 0)));
3634 int highequal = ((high0 == 0 && high1 == 0)
3635 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3636 high0, 1, high1, 1)));
3638 /* Make range 0 be the range that starts first, or ends last if they
3639 start at the same value. Swap them if it isn't. */
3640 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3641 low0, 0, low1, 0))
3642 || (lowequal
3643 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3644 high1, 1, high0, 1))))
3646 temp = in0_p, in0_p = in1_p, in1_p = temp;
3647 tem = low0, low0 = low1, low1 = tem;
3648 tem = high0, high0 = high1, high1 = tem;
3651 /* Now flag two cases, whether the ranges are disjoint or whether the
3652 second range is totally subsumed in the first. Note that the tests
3653 below are simplified by the ones above. */
3654 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3655 high0, 1, low1, 0));
3656 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3657 high1, 1, high0, 1));
3659 /* We now have four cases, depending on whether we are including or
3660 excluding the two ranges. */
3661 if (in0_p && in1_p)
3663 /* If they don't overlap, the result is false. If the second range
3664 is a subset it is the result. Otherwise, the range is from the start
3665 of the second to the end of the first. */
3666 if (no_overlap)
3667 in_p = 0, low = high = 0;
3668 else if (subset)
3669 in_p = 1, low = low1, high = high1;
3670 else
3671 in_p = 1, low = low1, high = high0;
3674 else if (in0_p && ! in1_p)
3676 /* If they don't overlap, the result is the first range. If they are
3677 equal, the result is false. If the second range is a subset of the
3678 first, and the ranges begin at the same place, we go from just after
3679 the end of the first range to the end of the second. If the second
3680 range is not a subset of the first, or if it is a subset and both
3681 ranges end at the same place, the range starts at the start of the
3682 first range and ends just before the second range.
3683 Otherwise, we can't describe this as a single range. */
3684 if (no_overlap)
3685 in_p = 1, low = low0, high = high0;
3686 else if (lowequal && highequal)
3687 in_p = 0, low = high = 0;
3688 else if (subset && lowequal)
3690 in_p = 1, high = high0;
3691 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3692 integer_one_node, 0);
3694 else if (! subset || highequal)
3696 in_p = 1, low = low0;
3697 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3698 integer_one_node, 0);
3700 else
3701 return 0;
3704 else if (! in0_p && in1_p)
3706 /* If they don't overlap, the result is the second range. If the second
3707 is a subset of the first, the result is false. Otherwise,
3708 the range starts just after the first range and ends at the
3709 end of the second. */
3710 if (no_overlap)
3711 in_p = 1, low = low1, high = high1;
3712 else if (subset || highequal)
3713 in_p = 0, low = high = 0;
3714 else
3716 in_p = 1, high = high1;
3717 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3718 integer_one_node, 0);
3722 else
3724 /* The case where we are excluding both ranges. Here the complex case
3725 is if they don't overlap. In that case, the only time we have a
3726 range is if they are adjacent. If the second is a subset of the
3727 first, the result is the first. Otherwise, the range to exclude
3728 starts at the beginning of the first range and ends at the end of the
3729 second. */
3730 if (no_overlap)
3732 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3733 range_binop (PLUS_EXPR, NULL_TREE,
3734 high0, 1,
3735 integer_one_node, 1),
3736 1, low1, 0)))
3737 in_p = 0, low = low0, high = high1;
3738 else
3739 return 0;
3741 else if (subset)
3742 in_p = 0, low = low0, high = high0;
3743 else
3744 in_p = 0, low = low0, high = high1;
3747 *pin_p = in_p, *plow = low, *phigh = high;
3748 return 1;
3751 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3752 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3753 #endif
3755 /* EXP is some logical combination of boolean tests. See if we can
3756 merge it into some range test. Return the new tree if so. */
3758 static tree
3759 fold_range_test (tree exp)
3761 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3762 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3763 int in0_p, in1_p, in_p;
3764 tree low0, low1, low, high0, high1, high;
3765 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3766 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3767 tree tem;
3769 /* If this is an OR operation, invert both sides; we will invert
3770 again at the end. */
3771 if (or_op)
3772 in0_p = ! in0_p, in1_p = ! in1_p;
3774 /* If both expressions are the same, if we can merge the ranges, and we
3775 can build the range test, return it or it inverted. If one of the
3776 ranges is always true or always false, consider it to be the same
3777 expression as the other. */
3778 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3779 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3780 in1_p, low1, high1)
3781 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3782 lhs != 0 ? lhs
3783 : rhs != 0 ? rhs : integer_zero_node,
3784 in_p, low, high))))
3785 return or_op ? invert_truthvalue (tem) : tem;
3787 /* On machines where the branch cost is expensive, if this is a
3788 short-circuited branch and the underlying object on both sides
3789 is the same, make a non-short-circuit operation. */
3790 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3791 && lhs != 0 && rhs != 0
3792 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3793 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3794 && operand_equal_p (lhs, rhs, 0))
3796 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3797 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3798 which cases we can't do this. */
3799 if (simple_operand_p (lhs))
3800 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3801 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3802 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3803 TREE_OPERAND (exp, 1));
3805 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3806 && ! CONTAINS_PLACEHOLDER_P (lhs))
3808 tree common = save_expr (lhs);
3810 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3811 or_op ? ! in0_p : in0_p,
3812 low0, high0))
3813 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3814 or_op ? ! in1_p : in1_p,
3815 low1, high1))))
3816 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3817 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3818 TREE_TYPE (exp), lhs, rhs);
3822 return 0;
3825 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3826 bit value. Arrange things so the extra bits will be set to zero if and
3827 only if C is signed-extended to its full width. If MASK is nonzero,
3828 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3830 static tree
3831 unextend (tree c, int p, int unsignedp, tree mask)
3833 tree type = TREE_TYPE (c);
3834 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3835 tree temp;
3837 if (p == modesize || unsignedp)
3838 return c;
3840 /* We work by getting just the sign bit into the low-order bit, then
3841 into the high-order bit, then sign-extend. We then XOR that value
3842 with C. */
3843 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3844 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3846 /* We must use a signed type in order to get an arithmetic right shift.
3847 However, we must also avoid introducing accidental overflows, so that
3848 a subsequent call to integer_zerop will work. Hence we must
3849 do the type conversion here. At this point, the constant is either
3850 zero or one, and the conversion to a signed type can never overflow.
3851 We could get an overflow if this conversion is done anywhere else. */
3852 if (TREE_UNSIGNED (type))
3853 temp = fold_convert ((*lang_hooks.types.signed_type) (type), temp);
3855 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3856 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3857 if (mask != 0)
3858 temp = const_binop (BIT_AND_EXPR, temp,
3859 fold_convert (TREE_TYPE (c), mask), 0);
3860 /* If necessary, convert the type back to match the type of C. */
3861 if (TREE_UNSIGNED (type))
3862 temp = fold_convert (type, temp);
3864 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3867 /* Find ways of folding logical expressions of LHS and RHS:
3868 Try to merge two comparisons to the same innermost item.
3869 Look for range tests like "ch >= '0' && ch <= '9'".
3870 Look for combinations of simple terms on machines with expensive branches
3871 and evaluate the RHS unconditionally.
3873 For example, if we have p->a == 2 && p->b == 4 and we can make an
3874 object large enough to span both A and B, we can do this with a comparison
3875 against the object ANDed with the a mask.
3877 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3878 operations to do this with one comparison.
3880 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3881 function and the one above.
3883 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3884 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3886 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3887 two operands.
3889 We return the simplified tree or 0 if no optimization is possible. */
3891 static tree
3892 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3894 /* If this is the "or" of two comparisons, we can do something if
3895 the comparisons are NE_EXPR. If this is the "and", we can do something
3896 if the comparisons are EQ_EXPR. I.e.,
3897 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3899 WANTED_CODE is this operation code. For single bit fields, we can
3900 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3901 comparison for one-bit fields. */
3903 enum tree_code wanted_code;
3904 enum tree_code lcode, rcode;
3905 tree ll_arg, lr_arg, rl_arg, rr_arg;
3906 tree ll_inner, lr_inner, rl_inner, rr_inner;
3907 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3908 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3909 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3910 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3911 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3912 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3913 enum machine_mode lnmode, rnmode;
3914 tree ll_mask, lr_mask, rl_mask, rr_mask;
3915 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3916 tree l_const, r_const;
3917 tree lntype, rntype, result;
3918 int first_bit, end_bit;
3919 int volatilep;
3921 /* Start by getting the comparison codes. Fail if anything is volatile.
3922 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3923 it were surrounded with a NE_EXPR. */
3925 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3926 return 0;
3928 lcode = TREE_CODE (lhs);
3929 rcode = TREE_CODE (rhs);
3931 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3932 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3934 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3935 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3937 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3938 return 0;
3940 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3941 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3943 ll_arg = TREE_OPERAND (lhs, 0);
3944 lr_arg = TREE_OPERAND (lhs, 1);
3945 rl_arg = TREE_OPERAND (rhs, 0);
3946 rr_arg = TREE_OPERAND (rhs, 1);
3948 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3949 if (simple_operand_p (ll_arg)
3950 && simple_operand_p (lr_arg)
3951 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3953 int compcode;
3955 if (operand_equal_p (ll_arg, rl_arg, 0)
3956 && operand_equal_p (lr_arg, rr_arg, 0))
3958 int lcompcode, rcompcode;
3960 lcompcode = comparison_to_compcode (lcode);
3961 rcompcode = comparison_to_compcode (rcode);
3962 compcode = (code == TRUTH_AND_EXPR)
3963 ? lcompcode & rcompcode
3964 : lcompcode | rcompcode;
3966 else if (operand_equal_p (ll_arg, rr_arg, 0)
3967 && operand_equal_p (lr_arg, rl_arg, 0))
3969 int lcompcode, rcompcode;
3971 rcode = swap_tree_comparison (rcode);
3972 lcompcode = comparison_to_compcode (lcode);
3973 rcompcode = comparison_to_compcode (rcode);
3974 compcode = (code == TRUTH_AND_EXPR)
3975 ? lcompcode & rcompcode
3976 : lcompcode | rcompcode;
3978 else
3979 compcode = -1;
3981 if (compcode == COMPCODE_TRUE)
3982 return fold_convert (truth_type, integer_one_node);
3983 else if (compcode == COMPCODE_FALSE)
3984 return fold_convert (truth_type, integer_zero_node);
3985 else if (compcode != -1)
3986 return build (compcode_to_comparison (compcode),
3987 truth_type, ll_arg, lr_arg);
3990 /* If the RHS can be evaluated unconditionally and its operands are
3991 simple, it wins to evaluate the RHS unconditionally on machines
3992 with expensive branches. In this case, this isn't a comparison
3993 that can be merged. Avoid doing this if the RHS is a floating-point
3994 comparison since those can trap. */
3996 if (BRANCH_COST >= 2
3997 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3998 && simple_operand_p (rl_arg)
3999 && simple_operand_p (rr_arg))
4001 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4002 if (code == TRUTH_OR_EXPR
4003 && lcode == NE_EXPR && integer_zerop (lr_arg)
4004 && rcode == NE_EXPR && integer_zerop (rr_arg)
4005 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4006 return build (NE_EXPR, truth_type,
4007 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4008 ll_arg, rl_arg),
4009 integer_zero_node);
4011 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4012 if (code == TRUTH_AND_EXPR
4013 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4014 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4015 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4016 return build (EQ_EXPR, truth_type,
4017 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4018 ll_arg, rl_arg),
4019 integer_zero_node);
4021 return build (code, truth_type, lhs, rhs);
4024 /* See if the comparisons can be merged. Then get all the parameters for
4025 each side. */
4027 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4028 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4029 return 0;
4031 volatilep = 0;
4032 ll_inner = decode_field_reference (ll_arg,
4033 &ll_bitsize, &ll_bitpos, &ll_mode,
4034 &ll_unsignedp, &volatilep, &ll_mask,
4035 &ll_and_mask);
4036 lr_inner = decode_field_reference (lr_arg,
4037 &lr_bitsize, &lr_bitpos, &lr_mode,
4038 &lr_unsignedp, &volatilep, &lr_mask,
4039 &lr_and_mask);
4040 rl_inner = decode_field_reference (rl_arg,
4041 &rl_bitsize, &rl_bitpos, &rl_mode,
4042 &rl_unsignedp, &volatilep, &rl_mask,
4043 &rl_and_mask);
4044 rr_inner = decode_field_reference (rr_arg,
4045 &rr_bitsize, &rr_bitpos, &rr_mode,
4046 &rr_unsignedp, &volatilep, &rr_mask,
4047 &rr_and_mask);
4049 /* It must be true that the inner operation on the lhs of each
4050 comparison must be the same if we are to be able to do anything.
4051 Then see if we have constants. If not, the same must be true for
4052 the rhs's. */
4053 if (volatilep || ll_inner == 0 || rl_inner == 0
4054 || ! operand_equal_p (ll_inner, rl_inner, 0))
4055 return 0;
4057 if (TREE_CODE (lr_arg) == INTEGER_CST
4058 && TREE_CODE (rr_arg) == INTEGER_CST)
4059 l_const = lr_arg, r_const = rr_arg;
4060 else if (lr_inner == 0 || rr_inner == 0
4061 || ! operand_equal_p (lr_inner, rr_inner, 0))
4062 return 0;
4063 else
4064 l_const = r_const = 0;
4066 /* If either comparison code is not correct for our logical operation,
4067 fail. However, we can convert a one-bit comparison against zero into
4068 the opposite comparison against that bit being set in the field. */
4070 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4071 if (lcode != wanted_code)
4073 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4075 /* Make the left operand unsigned, since we are only interested
4076 in the value of one bit. Otherwise we are doing the wrong
4077 thing below. */
4078 ll_unsignedp = 1;
4079 l_const = ll_mask;
4081 else
4082 return 0;
4085 /* This is analogous to the code for l_const above. */
4086 if (rcode != wanted_code)
4088 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4090 rl_unsignedp = 1;
4091 r_const = rl_mask;
4093 else
4094 return 0;
4097 /* After this point all optimizations will generate bit-field
4098 references, which we might not want. */
4099 if (! (*lang_hooks.can_use_bit_fields_p) ())
4100 return 0;
4102 /* See if we can find a mode that contains both fields being compared on
4103 the left. If we can't, fail. Otherwise, update all constants and masks
4104 to be relative to a field of that size. */
4105 first_bit = MIN (ll_bitpos, rl_bitpos);
4106 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4107 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4108 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4109 volatilep);
4110 if (lnmode == VOIDmode)
4111 return 0;
4113 lnbitsize = GET_MODE_BITSIZE (lnmode);
4114 lnbitpos = first_bit & ~ (lnbitsize - 1);
4115 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
4116 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4118 if (BYTES_BIG_ENDIAN)
4120 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4121 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4124 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4125 size_int (xll_bitpos), 0);
4126 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4127 size_int (xrl_bitpos), 0);
4129 if (l_const)
4131 l_const = fold_convert (lntype, l_const);
4132 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4133 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4134 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4135 fold (build1 (BIT_NOT_EXPR,
4136 lntype, ll_mask)),
4137 0)))
4139 warning ("comparison is always %d", wanted_code == NE_EXPR);
4141 return fold_convert (truth_type,
4142 wanted_code == NE_EXPR
4143 ? integer_one_node : integer_zero_node);
4146 if (r_const)
4148 r_const = fold_convert (lntype, r_const);
4149 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4150 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4151 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4152 fold (build1 (BIT_NOT_EXPR,
4153 lntype, rl_mask)),
4154 0)))
4156 warning ("comparison is always %d", wanted_code == NE_EXPR);
4158 return fold_convert (truth_type,
4159 wanted_code == NE_EXPR
4160 ? integer_one_node : integer_zero_node);
4164 /* If the right sides are not constant, do the same for it. Also,
4165 disallow this optimization if a size or signedness mismatch occurs
4166 between the left and right sides. */
4167 if (l_const == 0)
4169 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4170 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4171 /* Make sure the two fields on the right
4172 correspond to the left without being swapped. */
4173 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4174 return 0;
4176 first_bit = MIN (lr_bitpos, rr_bitpos);
4177 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4178 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4179 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4180 volatilep);
4181 if (rnmode == VOIDmode)
4182 return 0;
4184 rnbitsize = GET_MODE_BITSIZE (rnmode);
4185 rnbitpos = first_bit & ~ (rnbitsize - 1);
4186 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
4187 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4189 if (BYTES_BIG_ENDIAN)
4191 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4192 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4195 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4196 size_int (xlr_bitpos), 0);
4197 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4198 size_int (xrr_bitpos), 0);
4200 /* Make a mask that corresponds to both fields being compared.
4201 Do this for both items being compared. If the operands are the
4202 same size and the bits being compared are in the same position
4203 then we can do this by masking both and comparing the masked
4204 results. */
4205 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4206 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4207 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4209 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4210 ll_unsignedp || rl_unsignedp);
4211 if (! all_ones_mask_p (ll_mask, lnbitsize))
4212 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4214 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4215 lr_unsignedp || rr_unsignedp);
4216 if (! all_ones_mask_p (lr_mask, rnbitsize))
4217 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4219 return build (wanted_code, truth_type, lhs, rhs);
4222 /* There is still another way we can do something: If both pairs of
4223 fields being compared are adjacent, we may be able to make a wider
4224 field containing them both.
4226 Note that we still must mask the lhs/rhs expressions. Furthermore,
4227 the mask must be shifted to account for the shift done by
4228 make_bit_field_ref. */
4229 if ((ll_bitsize + ll_bitpos == rl_bitpos
4230 && lr_bitsize + lr_bitpos == rr_bitpos)
4231 || (ll_bitpos == rl_bitpos + rl_bitsize
4232 && lr_bitpos == rr_bitpos + rr_bitsize))
4234 tree type;
4236 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4237 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4238 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4239 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4241 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4242 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4243 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4244 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4246 /* Convert to the smaller type before masking out unwanted bits. */
4247 type = lntype;
4248 if (lntype != rntype)
4250 if (lnbitsize > rnbitsize)
4252 lhs = fold_convert (rntype, lhs);
4253 ll_mask = fold_convert (rntype, ll_mask);
4254 type = rntype;
4256 else if (lnbitsize < rnbitsize)
4258 rhs = fold_convert (lntype, rhs);
4259 lr_mask = fold_convert (lntype, lr_mask);
4260 type = lntype;
4264 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4265 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4267 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4268 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4270 return build (wanted_code, truth_type, lhs, rhs);
4273 return 0;
4276 /* Handle the case of comparisons with constants. If there is something in
4277 common between the masks, those bits of the constants must be the same.
4278 If not, the condition is always false. Test for this to avoid generating
4279 incorrect code below. */
4280 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4281 if (! integer_zerop (result)
4282 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4283 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4285 if (wanted_code == NE_EXPR)
4287 warning ("`or' of unmatched not-equal tests is always 1");
4288 return fold_convert (truth_type, integer_one_node);
4290 else
4292 warning ("`and' of mutually exclusive equal-tests is always 0");
4293 return fold_convert (truth_type, integer_zero_node);
4297 /* Construct the expression we will return. First get the component
4298 reference we will make. Unless the mask is all ones the width of
4299 that field, perform the mask operation. Then compare with the
4300 merged constant. */
4301 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4302 ll_unsignedp || rl_unsignedp);
4304 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4305 if (! all_ones_mask_p (ll_mask, lnbitsize))
4306 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4308 return build (wanted_code, truth_type, result,
4309 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4312 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4313 constant. */
4315 static tree
4316 optimize_minmax_comparison (tree t)
4318 tree type = TREE_TYPE (t);
4319 tree arg0 = TREE_OPERAND (t, 0);
4320 enum tree_code op_code;
4321 tree comp_const = TREE_OPERAND (t, 1);
4322 tree minmax_const;
4323 int consts_equal, consts_lt;
4324 tree inner;
4326 STRIP_SIGN_NOPS (arg0);
4328 op_code = TREE_CODE (arg0);
4329 minmax_const = TREE_OPERAND (arg0, 1);
4330 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4331 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4332 inner = TREE_OPERAND (arg0, 0);
4334 /* If something does not permit us to optimize, return the original tree. */
4335 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4336 || TREE_CODE (comp_const) != INTEGER_CST
4337 || TREE_CONSTANT_OVERFLOW (comp_const)
4338 || TREE_CODE (minmax_const) != INTEGER_CST
4339 || TREE_CONSTANT_OVERFLOW (minmax_const))
4340 return t;
4342 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4343 and GT_EXPR, doing the rest with recursive calls using logical
4344 simplifications. */
4345 switch (TREE_CODE (t))
4347 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4348 return
4349 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4351 case GE_EXPR:
4352 return
4353 fold (build (TRUTH_ORIF_EXPR, type,
4354 optimize_minmax_comparison
4355 (build (EQ_EXPR, type, arg0, comp_const)),
4356 optimize_minmax_comparison
4357 (build (GT_EXPR, type, arg0, comp_const))));
4359 case EQ_EXPR:
4360 if (op_code == MAX_EXPR && consts_equal)
4361 /* MAX (X, 0) == 0 -> X <= 0 */
4362 return fold (build (LE_EXPR, type, inner, comp_const));
4364 else if (op_code == MAX_EXPR && consts_lt)
4365 /* MAX (X, 0) == 5 -> X == 5 */
4366 return fold (build (EQ_EXPR, type, inner, comp_const));
4368 else if (op_code == MAX_EXPR)
4369 /* MAX (X, 0) == -1 -> false */
4370 return omit_one_operand (type, integer_zero_node, inner);
4372 else if (consts_equal)
4373 /* MIN (X, 0) == 0 -> X >= 0 */
4374 return fold (build (GE_EXPR, type, inner, comp_const));
4376 else if (consts_lt)
4377 /* MIN (X, 0) == 5 -> false */
4378 return omit_one_operand (type, integer_zero_node, inner);
4380 else
4381 /* MIN (X, 0) == -1 -> X == -1 */
4382 return fold (build (EQ_EXPR, type, inner, comp_const));
4384 case GT_EXPR:
4385 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4386 /* MAX (X, 0) > 0 -> X > 0
4387 MAX (X, 0) > 5 -> X > 5 */
4388 return fold (build (GT_EXPR, type, inner, comp_const));
4390 else if (op_code == MAX_EXPR)
4391 /* MAX (X, 0) > -1 -> true */
4392 return omit_one_operand (type, integer_one_node, inner);
4394 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4395 /* MIN (X, 0) > 0 -> false
4396 MIN (X, 0) > 5 -> false */
4397 return omit_one_operand (type, integer_zero_node, inner);
4399 else
4400 /* MIN (X, 0) > -1 -> X > -1 */
4401 return fold (build (GT_EXPR, type, inner, comp_const));
4403 default:
4404 return t;
4408 /* T is an integer expression that is being multiplied, divided, or taken a
4409 modulus (CODE says which and what kind of divide or modulus) by a
4410 constant C. See if we can eliminate that operation by folding it with
4411 other operations already in T. WIDE_TYPE, if non-null, is a type that
4412 should be used for the computation if wider than our type.
4414 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4415 (X * 2) + (Y * 4). We must, however, be assured that either the original
4416 expression would not overflow or that overflow is undefined for the type
4417 in the language in question.
4419 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4420 the machine has a multiply-accumulate insn or that this is part of an
4421 addressing calculation.
4423 If we return a non-null expression, it is an equivalent form of the
4424 original computation, but need not be in the original type. */
4426 static tree
4427 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4429 /* To avoid exponential search depth, refuse to allow recursion past
4430 three levels. Beyond that (1) it's highly unlikely that we'll find
4431 something interesting and (2) we've probably processed it before
4432 when we built the inner expression. */
4434 static int depth;
4435 tree ret;
4437 if (depth > 3)
4438 return NULL;
4440 depth++;
4441 ret = extract_muldiv_1 (t, c, code, wide_type);
4442 depth--;
4444 return ret;
4447 static tree
4448 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4450 tree type = TREE_TYPE (t);
4451 enum tree_code tcode = TREE_CODE (t);
4452 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4453 > GET_MODE_SIZE (TYPE_MODE (type)))
4454 ? wide_type : type);
4455 tree t1, t2;
4456 int same_p = tcode == code;
4457 tree op0 = NULL_TREE, op1 = NULL_TREE;
4459 /* Don't deal with constants of zero here; they confuse the code below. */
4460 if (integer_zerop (c))
4461 return NULL_TREE;
4463 if (TREE_CODE_CLASS (tcode) == '1')
4464 op0 = TREE_OPERAND (t, 0);
4466 if (TREE_CODE_CLASS (tcode) == '2')
4467 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4469 /* Note that we need not handle conditional operations here since fold
4470 already handles those cases. So just do arithmetic here. */
4471 switch (tcode)
4473 case INTEGER_CST:
4474 /* For a constant, we can always simplify if we are a multiply
4475 or (for divide and modulus) if it is a multiple of our constant. */
4476 if (code == MULT_EXPR
4477 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4478 return const_binop (code, fold_convert (ctype, t),
4479 fold_convert (ctype, c), 0);
4480 break;
4482 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4483 /* If op0 is an expression ... */
4484 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4485 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4486 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4487 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4488 /* ... and is unsigned, and its type is smaller than ctype,
4489 then we cannot pass through as widening. */
4490 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4491 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4492 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4493 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4494 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4495 /* ... or its type is larger than ctype,
4496 then we cannot pass through this truncation. */
4497 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4498 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4499 /* ... or signedness changes for division or modulus,
4500 then we cannot pass through this conversion. */
4501 || (code != MULT_EXPR
4502 && (TREE_UNSIGNED (ctype)
4503 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4504 break;
4506 /* Pass the constant down and see if we can make a simplification. If
4507 we can, replace this expression with the inner simplification for
4508 possible later conversion to our or some other type. */
4509 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4510 && TREE_CODE (t2) == INTEGER_CST
4511 && ! TREE_CONSTANT_OVERFLOW (t2)
4512 && (0 != (t1 = extract_muldiv (op0, t2, code,
4513 code == MULT_EXPR
4514 ? ctype : NULL_TREE))))
4515 return t1;
4516 break;
4518 case NEGATE_EXPR: case ABS_EXPR:
4519 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4520 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4521 break;
4523 case MIN_EXPR: case MAX_EXPR:
4524 /* If widening the type changes the signedness, then we can't perform
4525 this optimization as that changes the result. */
4526 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4527 break;
4529 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4530 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4531 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4533 if (tree_int_cst_sgn (c) < 0)
4534 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4536 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4537 fold_convert (ctype, t2)));
4539 break;
4541 case WITH_RECORD_EXPR:
4542 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4543 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4544 TREE_OPERAND (t, 1));
4545 break;
4547 case LSHIFT_EXPR: case RSHIFT_EXPR:
4548 /* If the second operand is constant, this is a multiplication
4549 or floor division, by a power of two, so we can treat it that
4550 way unless the multiplier or divisor overflows. */
4551 if (TREE_CODE (op1) == INTEGER_CST
4552 /* const_binop may not detect overflow correctly,
4553 so check for it explicitly here. */
4554 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4555 && TREE_INT_CST_HIGH (op1) == 0
4556 && 0 != (t1 = fold_convert (ctype,
4557 const_binop (LSHIFT_EXPR,
4558 size_one_node,
4559 op1, 0)))
4560 && ! TREE_OVERFLOW (t1))
4561 return extract_muldiv (build (tcode == LSHIFT_EXPR
4562 ? MULT_EXPR : FLOOR_DIV_EXPR,
4563 ctype, fold_convert (ctype, op0), t1),
4564 c, code, wide_type);
4565 break;
4567 case PLUS_EXPR: case MINUS_EXPR:
4568 /* See if we can eliminate the operation on both sides. If we can, we
4569 can return a new PLUS or MINUS. If we can't, the only remaining
4570 cases where we can do anything are if the second operand is a
4571 constant. */
4572 t1 = extract_muldiv (op0, c, code, wide_type);
4573 t2 = extract_muldiv (op1, c, code, wide_type);
4574 if (t1 != 0 && t2 != 0
4575 && (code == MULT_EXPR
4576 /* If not multiplication, we can only do this if both operands
4577 are divisible by c. */
4578 || (multiple_of_p (ctype, op0, c)
4579 && multiple_of_p (ctype, op1, c))))
4580 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4581 fold_convert (ctype, t2)));
4583 /* If this was a subtraction, negate OP1 and set it to be an addition.
4584 This simplifies the logic below. */
4585 if (tcode == MINUS_EXPR)
4586 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4588 if (TREE_CODE (op1) != INTEGER_CST)
4589 break;
4591 /* If either OP1 or C are negative, this optimization is not safe for
4592 some of the division and remainder types while for others we need
4593 to change the code. */
4594 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4596 if (code == CEIL_DIV_EXPR)
4597 code = FLOOR_DIV_EXPR;
4598 else if (code == FLOOR_DIV_EXPR)
4599 code = CEIL_DIV_EXPR;
4600 else if (code != MULT_EXPR
4601 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4602 break;
4605 /* If it's a multiply or a division/modulus operation of a multiple
4606 of our constant, do the operation and verify it doesn't overflow. */
4607 if (code == MULT_EXPR
4608 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4610 op1 = const_binop (code, fold_convert (ctype, op1),
4611 fold_convert (ctype, c), 0);
4612 /* We allow the constant to overflow with wrapping semantics. */
4613 if (op1 == 0
4614 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4615 break;
4617 else
4618 break;
4620 /* If we have an unsigned type is not a sizetype, we cannot widen
4621 the operation since it will change the result if the original
4622 computation overflowed. */
4623 if (TREE_UNSIGNED (ctype)
4624 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4625 && ctype != type)
4626 break;
4628 /* If we were able to eliminate our operation from the first side,
4629 apply our operation to the second side and reform the PLUS. */
4630 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4631 return fold (build (tcode, ctype, fold_convert (ctype, t1), op1));
4633 /* The last case is if we are a multiply. In that case, we can
4634 apply the distributive law to commute the multiply and addition
4635 if the multiplication of the constants doesn't overflow. */
4636 if (code == MULT_EXPR)
4637 return fold (build (tcode, ctype,
4638 fold (build (code, ctype,
4639 fold_convert (ctype, op0),
4640 fold_convert (ctype, c))),
4641 op1));
4643 break;
4645 case MULT_EXPR:
4646 /* We have a special case here if we are doing something like
4647 (C * 8) % 4 since we know that's zero. */
4648 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4649 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4650 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4651 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4652 return omit_one_operand (type, integer_zero_node, op0);
4654 /* ... fall through ... */
4656 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4657 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4658 /* If we can extract our operation from the LHS, do so and return a
4659 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4660 do something only if the second operand is a constant. */
4661 if (same_p
4662 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4663 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4664 fold_convert (ctype, op1)));
4665 else if (tcode == MULT_EXPR && code == MULT_EXPR
4666 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4667 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4668 fold_convert (ctype, t1)));
4669 else if (TREE_CODE (op1) != INTEGER_CST)
4670 return 0;
4672 /* If these are the same operation types, we can associate them
4673 assuming no overflow. */
4674 if (tcode == code
4675 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4676 fold_convert (ctype, c), 0))
4677 && ! TREE_OVERFLOW (t1))
4678 return fold (build (tcode, ctype, fold_convert (ctype, op0), t1));
4680 /* If these operations "cancel" each other, we have the main
4681 optimizations of this pass, which occur when either constant is a
4682 multiple of the other, in which case we replace this with either an
4683 operation or CODE or TCODE.
4685 If we have an unsigned type that is not a sizetype, we cannot do
4686 this since it will change the result if the original computation
4687 overflowed. */
4688 if ((! TREE_UNSIGNED (ctype)
4689 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4690 && ! flag_wrapv
4691 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4692 || (tcode == MULT_EXPR
4693 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4694 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4696 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4697 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4698 fold_convert (ctype,
4699 const_binop (TRUNC_DIV_EXPR,
4700 op1, c, 0))));
4701 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4702 return fold (build (code, ctype, fold_convert (ctype, op0),
4703 fold_convert (ctype,
4704 const_binop (TRUNC_DIV_EXPR,
4705 c, op1, 0))));
4707 break;
4709 default:
4710 break;
4713 return 0;
4716 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4717 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4718 that we may sometimes modify the tree. */
4720 static tree
4721 strip_compound_expr (tree t, tree s)
4723 enum tree_code code = TREE_CODE (t);
4725 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4726 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4727 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4728 return TREE_OPERAND (t, 1);
4730 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4731 don't bother handling any other types. */
4732 else if (code == COND_EXPR)
4734 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4735 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4736 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4738 else if (TREE_CODE_CLASS (code) == '1')
4739 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4740 else if (TREE_CODE_CLASS (code) == '<'
4741 || TREE_CODE_CLASS (code) == '2')
4743 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4744 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4747 return t;
4750 /* Return a node which has the indicated constant VALUE (either 0 or
4751 1), and is of the indicated TYPE. */
4753 static tree
4754 constant_boolean_node (int value, tree type)
4756 if (type == integer_type_node)
4757 return value ? integer_one_node : integer_zero_node;
4758 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4759 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4760 integer_zero_node);
4761 else
4763 tree t = build_int_2 (value, 0);
4765 TREE_TYPE (t) = type;
4766 return t;
4770 /* Utility function for the following routine, to see how complex a nesting of
4771 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4772 we don't care (to avoid spending too much time on complex expressions.). */
4774 static int
4775 count_cond (tree expr, int lim)
4777 int ctrue, cfalse;
4779 if (TREE_CODE (expr) != COND_EXPR)
4780 return 0;
4781 else if (lim <= 0)
4782 return 0;
4784 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4785 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4786 return MIN (lim, 1 + ctrue + cfalse);
4789 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4790 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4791 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4792 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4793 COND is the first argument to CODE; otherwise (as in the example
4794 given here), it is the second argument. TYPE is the type of the
4795 original expression. */
4797 static tree
4798 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4799 tree cond, tree arg, int cond_first_p)
4801 tree test, true_value, false_value;
4802 tree lhs = NULL_TREE;
4803 tree rhs = NULL_TREE;
4804 /* In the end, we'll produce a COND_EXPR. Both arms of the
4805 conditional expression will be binary operations. The left-hand
4806 side of the expression to be executed if the condition is true
4807 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4808 of the expression to be executed if the condition is true will be
4809 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4810 but apply to the expression to be executed if the conditional is
4811 false. */
4812 tree *true_lhs;
4813 tree *true_rhs;
4814 tree *false_lhs;
4815 tree *false_rhs;
4816 /* These are the codes to use for the left-hand side and right-hand
4817 side of the COND_EXPR. Normally, they are the same as CODE. */
4818 enum tree_code lhs_code = code;
4819 enum tree_code rhs_code = code;
4820 /* And these are the types of the expressions. */
4821 tree lhs_type = type;
4822 tree rhs_type = type;
4823 int save = 0;
4825 if (cond_first_p)
4827 true_rhs = false_rhs = &arg;
4828 true_lhs = &true_value;
4829 false_lhs = &false_value;
4831 else
4833 true_lhs = false_lhs = &arg;
4834 true_rhs = &true_value;
4835 false_rhs = &false_value;
4838 if (TREE_CODE (cond) == COND_EXPR)
4840 test = TREE_OPERAND (cond, 0);
4841 true_value = TREE_OPERAND (cond, 1);
4842 false_value = TREE_OPERAND (cond, 2);
4843 /* If this operand throws an expression, then it does not make
4844 sense to try to perform a logical or arithmetic operation
4845 involving it. Instead of building `a + throw 3' for example,
4846 we simply build `a, throw 3'. */
4847 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4849 if (! cond_first_p)
4851 lhs_code = COMPOUND_EXPR;
4852 lhs_type = void_type_node;
4854 else
4855 lhs = true_value;
4857 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4859 if (! cond_first_p)
4861 rhs_code = COMPOUND_EXPR;
4862 rhs_type = void_type_node;
4864 else
4865 rhs = false_value;
4868 else
4870 tree testtype = TREE_TYPE (cond);
4871 test = cond;
4872 true_value = fold_convert (testtype, integer_one_node);
4873 false_value = fold_convert (testtype, integer_zero_node);
4876 /* If ARG is complex we want to make sure we only evaluate it once. Though
4877 this is only required if it is volatile, it might be more efficient even
4878 if it is not. However, if we succeed in folding one part to a constant,
4879 we do not need to make this SAVE_EXPR. Since we do this optimization
4880 primarily to see if we do end up with constant and this SAVE_EXPR
4881 interferes with later optimizations, suppressing it when we can is
4882 important.
4884 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4885 do so. Don't try to see if the result is a constant if an arm is a
4886 COND_EXPR since we get exponential behavior in that case. */
4888 if (saved_expr_p (arg))
4889 save = 1;
4890 else if (lhs == 0 && rhs == 0
4891 && !TREE_CONSTANT (arg)
4892 && (*lang_hooks.decls.global_bindings_p) () == 0
4893 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4894 || TREE_SIDE_EFFECTS (arg)))
4896 if (TREE_CODE (true_value) != COND_EXPR)
4897 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4899 if (TREE_CODE (false_value) != COND_EXPR)
4900 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4902 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4903 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4905 arg = save_expr (arg);
4906 lhs = rhs = 0;
4907 save = saved_expr_p (arg);
4911 if (lhs == 0)
4912 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4913 if (rhs == 0)
4914 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4916 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4918 /* If ARG involves a SAVE_EXPR, we need to ensure it is evaluated
4919 ahead of the COND_EXPR we made. Otherwise we would have it only
4920 evaluated in one branch, with the other branch using the result
4921 but missing the evaluation code. Beware that the save_expr call
4922 above might not return a SAVE_EXPR, so testing the TREE_CODE
4923 of ARG is not enough to decide here.  */
4924 if (save)
4925 return build (COMPOUND_EXPR, type,
4926 fold_convert (void_type_node, arg),
4927 strip_compound_expr (test, arg));
4928 else
4929 return fold_convert (type, test);
4933 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4935 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4936 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4937 ADDEND is the same as X.
4939 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4940 and finite. The problematic cases are when X is zero, and its mode
4941 has signed zeros. In the case of rounding towards -infinity,
4942 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4943 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4945 static bool
4946 fold_real_zero_addition_p (tree type, tree addend, int negate)
4948 if (!real_zerop (addend))
4949 return false;
4951 /* Don't allow the fold with -fsignaling-nans. */
4952 if (HONOR_SNANS (TYPE_MODE (type)))
4953 return false;
4955 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4956 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4957 return true;
4959 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4960 if (TREE_CODE (addend) == REAL_CST
4961 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4962 negate = !negate;
4964 /* The mode has signed zeros, and we have to honor their sign.
4965 In this situation, there is only one case we can return true for.
4966 X - 0 is the same as X unless rounding towards -infinity is
4967 supported. */
4968 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4971 /* Subroutine of fold() that checks comparisons of built-in math
4972 functions against real constants.
4974 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4975 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4976 is the type of the result and ARG0 and ARG1 are the operands of the
4977 comparison. ARG1 must be a TREE_REAL_CST.
4979 The function returns the constant folded tree if a simplification
4980 can be made, and NULL_TREE otherwise. */
4982 static tree
4983 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4984 tree type, tree arg0, tree arg1)
4986 REAL_VALUE_TYPE c;
4988 if (fcode == BUILT_IN_SQRT
4989 || fcode == BUILT_IN_SQRTF
4990 || fcode == BUILT_IN_SQRTL)
4992 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4993 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4995 c = TREE_REAL_CST (arg1);
4996 if (REAL_VALUE_NEGATIVE (c))
4998 /* sqrt(x) < y is always false, if y is negative. */
4999 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5000 return omit_one_operand (type,
5001 fold_convert (type, integer_zero_node),
5002 arg);
5004 /* sqrt(x) > y is always true, if y is negative and we
5005 don't care about NaNs, i.e. negative values of x. */
5006 if (code == NE_EXPR || !HONOR_NANS (mode))
5007 return omit_one_operand (type,
5008 fold_convert (type, integer_one_node),
5009 arg);
5011 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5012 return fold (build (GE_EXPR, type, arg,
5013 build_real (TREE_TYPE (arg), dconst0)));
5015 else if (code == GT_EXPR || code == GE_EXPR)
5017 REAL_VALUE_TYPE c2;
5019 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5020 real_convert (&c2, mode, &c2);
5022 if (REAL_VALUE_ISINF (c2))
5024 /* sqrt(x) > y is x == +Inf, when y is very large. */
5025 if (HONOR_INFINITIES (mode))
5026 return fold (build (EQ_EXPR, type, arg,
5027 build_real (TREE_TYPE (arg), c2)));
5029 /* sqrt(x) > y is always false, when y is very large
5030 and we don't care about infinities. */
5031 return omit_one_operand (type,
5032 fold_convert (type, integer_zero_node),
5033 arg);
5036 /* sqrt(x) > c is the same as x > c*c. */
5037 return fold (build (code, type, arg,
5038 build_real (TREE_TYPE (arg), c2)));
5040 else if (code == LT_EXPR || code == LE_EXPR)
5042 REAL_VALUE_TYPE c2;
5044 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5045 real_convert (&c2, mode, &c2);
5047 if (REAL_VALUE_ISINF (c2))
5049 /* sqrt(x) < y is always true, when y is a very large
5050 value and we don't care about NaNs or Infinities. */
5051 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5052 return omit_one_operand (type,
5053 fold_convert (type, integer_one_node),
5054 arg);
5056 /* sqrt(x) < y is x != +Inf when y is very large and we
5057 don't care about NaNs. */
5058 if (! HONOR_NANS (mode))
5059 return fold (build (NE_EXPR, type, arg,
5060 build_real (TREE_TYPE (arg), c2)));
5062 /* sqrt(x) < y is x >= 0 when y is very large and we
5063 don't care about Infinities. */
5064 if (! HONOR_INFINITIES (mode))
5065 return fold (build (GE_EXPR, type, arg,
5066 build_real (TREE_TYPE (arg), dconst0)));
5068 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5069 if ((*lang_hooks.decls.global_bindings_p) () != 0
5070 || CONTAINS_PLACEHOLDER_P (arg))
5071 return NULL_TREE;
5073 arg = save_expr (arg);
5074 return fold (build (TRUTH_ANDIF_EXPR, type,
5075 fold (build (GE_EXPR, type, arg,
5076 build_real (TREE_TYPE (arg),
5077 dconst0))),
5078 fold (build (NE_EXPR, type, arg,
5079 build_real (TREE_TYPE (arg),
5080 c2)))));
5083 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5084 if (! HONOR_NANS (mode))
5085 return fold (build (code, type, arg,
5086 build_real (TREE_TYPE (arg), c2)));
5088 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5089 if ((*lang_hooks.decls.global_bindings_p) () == 0
5090 && ! CONTAINS_PLACEHOLDER_P (arg))
5092 arg = save_expr (arg);
5093 return fold (build (TRUTH_ANDIF_EXPR, type,
5094 fold (build (GE_EXPR, type, arg,
5095 build_real (TREE_TYPE (arg),
5096 dconst0))),
5097 fold (build (code, type, arg,
5098 build_real (TREE_TYPE (arg),
5099 c2)))));
5104 return NULL_TREE;
5107 /* Subroutine of fold() that optimizes comparisons against Infinities,
5108 either +Inf or -Inf.
5110 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5111 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5112 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5114 The function returns the constant folded tree if a simplification
5115 can be made, and NULL_TREE otherwise. */
5117 static tree
5118 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5120 enum machine_mode mode;
5121 REAL_VALUE_TYPE max;
5122 tree temp;
5123 bool neg;
5125 mode = TYPE_MODE (TREE_TYPE (arg0));
5127 /* For negative infinity swap the sense of the comparison. */
5128 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5129 if (neg)
5130 code = swap_tree_comparison (code);
5132 switch (code)
5134 case GT_EXPR:
5135 /* x > +Inf is always false, if with ignore sNANs. */
5136 if (HONOR_SNANS (mode))
5137 return NULL_TREE;
5138 return omit_one_operand (type,
5139 fold_convert (type, integer_zero_node),
5140 arg0);
5142 case LE_EXPR:
5143 /* x <= +Inf is always true, if we don't case about NaNs. */
5144 if (! HONOR_NANS (mode))
5145 return omit_one_operand (type,
5146 fold_convert (type, integer_one_node),
5147 arg0);
5149 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5150 if ((*lang_hooks.decls.global_bindings_p) () == 0
5151 && ! CONTAINS_PLACEHOLDER_P (arg0))
5153 arg0 = save_expr (arg0);
5154 return fold (build (EQ_EXPR, type, arg0, arg0));
5156 break;
5158 case EQ_EXPR:
5159 case GE_EXPR:
5160 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5161 real_maxval (&max, neg, mode);
5162 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
5163 arg0, build_real (TREE_TYPE (arg0), max)));
5165 case LT_EXPR:
5166 /* x < +Inf is always equal to x <= DBL_MAX. */
5167 real_maxval (&max, neg, mode);
5168 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5169 arg0, build_real (TREE_TYPE (arg0), max)));
5171 case NE_EXPR:
5172 /* x != +Inf is always equal to !(x > DBL_MAX). */
5173 real_maxval (&max, neg, mode);
5174 if (! HONOR_NANS (mode))
5175 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5176 arg0, build_real (TREE_TYPE (arg0), max)));
5177 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
5178 arg0, build_real (TREE_TYPE (arg0), max)));
5179 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5181 default:
5182 break;
5185 return NULL_TREE;
5188 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5189 equality/inequality test, then return a simplified form of
5190 the test using shifts and logical operations. Otherwise return
5191 NULL. TYPE is the desired result type. */
5193 tree
5194 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5195 tree result_type)
5197 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5198 operand 0. */
5199 if (code == TRUTH_NOT_EXPR)
5201 code = TREE_CODE (arg0);
5202 if (code != NE_EXPR && code != EQ_EXPR)
5203 return NULL_TREE;
5205 /* Extract the arguments of the EQ/NE. */
5206 arg1 = TREE_OPERAND (arg0, 1);
5207 arg0 = TREE_OPERAND (arg0, 0);
5209 /* This requires us to invert the code. */
5210 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5213 /* If this is testing a single bit, we can optimize the test. */
5214 if ((code == NE_EXPR || code == EQ_EXPR)
5215 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5216 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5218 tree inner = TREE_OPERAND (arg0, 0);
5219 tree type = TREE_TYPE (arg0);
5220 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5221 enum machine_mode operand_mode = TYPE_MODE (type);
5222 int ops_unsigned;
5223 tree signed_type, unsigned_type, intermediate_type;
5224 tree arg00;
5226 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5227 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5228 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5229 if (arg00 != NULL_TREE)
5231 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
5232 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
5233 fold_convert (stype, arg00),
5234 fold_convert (stype, integer_zero_node)));
5237 /* At this point, we know that arg0 is not testing the sign bit. */
5238 if (TYPE_PRECISION (type) - 1 == bitnum)
5239 abort ();
5241 /* Otherwise we have (A & C) != 0 where C is a single bit,
5242 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5243 Similarly for (A & C) == 0. */
5245 /* If INNER is a right shift of a constant and it plus BITNUM does
5246 not overflow, adjust BITNUM and INNER. */
5247 if (TREE_CODE (inner) == RSHIFT_EXPR
5248 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5249 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5250 && bitnum < TYPE_PRECISION (type)
5251 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5252 bitnum - TYPE_PRECISION (type)))
5254 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5255 inner = TREE_OPERAND (inner, 0);
5258 /* If we are going to be able to omit the AND below, we must do our
5259 operations as unsigned. If we must use the AND, we have a choice.
5260 Normally unsigned is faster, but for some machines signed is. */
5261 #ifdef LOAD_EXTEND_OP
5262 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5263 #else
5264 ops_unsigned = 1;
5265 #endif
5267 signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
5268 unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
5269 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5270 inner = fold_convert (intermediate_type, inner);
5272 if (bitnum != 0)
5273 inner = build (RSHIFT_EXPR, intermediate_type,
5274 inner, size_int (bitnum));
5276 if (code == EQ_EXPR)
5277 inner = build (BIT_XOR_EXPR, intermediate_type,
5278 inner, integer_one_node);
5280 /* Put the AND last so it can combine with more things. */
5281 inner = build (BIT_AND_EXPR, intermediate_type,
5282 inner, integer_one_node);
5284 /* Make sure to return the proper type. */
5285 inner = fold_convert (result_type, inner);
5287 return inner;
5289 return NULL_TREE;
5292 /* Check whether we are allowed to reorder operands arg0 and arg1,
5293 such that the evaluation of arg1 occurs before arg0. */
5295 static bool
5296 reorder_operands_p (tree arg0, tree arg1)
5298 if (! flag_evaluation_order)
5299 return true;
5300 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5301 return true;
5302 return ! TREE_SIDE_EFFECTS (arg0)
5303 && ! TREE_SIDE_EFFECTS (arg1);
5306 /* Test whether it is preferable two swap two operands, ARG0 and
5307 ARG1, for example because ARG0 is an integer constant and ARG1
5308 isn't. If REORDER is true, only recommend swapping if we can
5309 evaluate the operands in reverse order. */
5311 static bool
5312 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5314 STRIP_SIGN_NOPS (arg0);
5315 STRIP_SIGN_NOPS (arg1);
5317 if (TREE_CODE (arg1) == INTEGER_CST)
5318 return 0;
5319 if (TREE_CODE (arg0) == INTEGER_CST)
5320 return 1;
5322 if (TREE_CODE (arg1) == REAL_CST)
5323 return 0;
5324 if (TREE_CODE (arg0) == REAL_CST)
5325 return 1;
5327 if (TREE_CODE (arg1) == COMPLEX_CST)
5328 return 0;
5329 if (TREE_CODE (arg0) == COMPLEX_CST)
5330 return 1;
5332 if (TREE_CONSTANT (arg1))
5333 return 0;
5334 if (TREE_CONSTANT (arg0))
5335 return 1;
5337 if (optimize_size)
5338 return 0;
5340 if (reorder && flag_evaluation_order
5341 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5342 return 0;
5344 if (DECL_P (arg1))
5345 return 0;
5346 if (DECL_P (arg0))
5347 return 1;
5349 return 0;
5352 /* Perform constant folding and related simplification of EXPR.
5353 The related simplifications include x*1 => x, x*0 => 0, etc.,
5354 and application of the associative law.
5355 NOP_EXPR conversions may be removed freely (as long as we
5356 are careful not to change the C type of the overall expression)
5357 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5358 but we can constant-fold them if they have constant operands. */
5360 #ifdef ENABLE_FOLD_CHECKING
5361 # define fold(x) fold_1 (x)
5362 static tree fold_1 (tree);
5363 static
5364 #endif
5365 tree
5366 fold (tree expr)
5368 tree t = expr, orig_t;
5369 tree t1 = NULL_TREE;
5370 tree tem;
5371 tree type = TREE_TYPE (expr);
5372 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5373 enum tree_code code = TREE_CODE (t);
5374 int kind = TREE_CODE_CLASS (code);
5375 int invert;
5376 /* WINS will be nonzero when the switch is done
5377 if all operands are constant. */
5378 int wins = 1;
5380 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5381 Likewise for a SAVE_EXPR that's already been evaluated. */
5382 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5383 return t;
5385 /* Return right away if a constant. */
5386 if (kind == 'c')
5387 return t;
5389 orig_t = t;
5391 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5393 tree subop;
5395 /* Special case for conversion ops that can have fixed point args. */
5396 arg0 = TREE_OPERAND (t, 0);
5398 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5399 if (arg0 != 0)
5400 STRIP_SIGN_NOPS (arg0);
5402 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5403 subop = TREE_REALPART (arg0);
5404 else
5405 subop = arg0;
5407 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5408 && TREE_CODE (subop) != REAL_CST)
5409 /* Note that TREE_CONSTANT isn't enough:
5410 static var addresses are constant but we can't
5411 do arithmetic on them. */
5412 wins = 0;
5414 else if (IS_EXPR_CODE_CLASS (kind))
5416 int len = first_rtl_op (code);
5417 int i;
5418 for (i = 0; i < len; i++)
5420 tree op = TREE_OPERAND (t, i);
5421 tree subop;
5423 if (op == 0)
5424 continue; /* Valid for CALL_EXPR, at least. */
5426 if (kind == '<' || code == RSHIFT_EXPR)
5428 /* Signedness matters here. Perhaps we can refine this
5429 later. */
5430 STRIP_SIGN_NOPS (op);
5432 else
5433 /* Strip any conversions that don't change the mode. */
5434 STRIP_NOPS (op);
5436 if (TREE_CODE (op) == COMPLEX_CST)
5437 subop = TREE_REALPART (op);
5438 else
5439 subop = op;
5441 if (TREE_CODE (subop) != INTEGER_CST
5442 && TREE_CODE (subop) != REAL_CST)
5443 /* Note that TREE_CONSTANT isn't enough:
5444 static var addresses are constant but we can't
5445 do arithmetic on them. */
5446 wins = 0;
5448 if (i == 0)
5449 arg0 = op;
5450 else if (i == 1)
5451 arg1 = op;
5455 /* If this is a commutative operation, and ARG0 is a constant, move it
5456 to ARG1 to reduce the number of tests below. */
5457 if (commutative_tree_code (code)
5458 && tree_swap_operands_p (arg0, arg1, true))
5459 return fold (build (code, type, arg1, arg0));
5461 /* Now WINS is set as described above,
5462 ARG0 is the first operand of EXPR,
5463 and ARG1 is the second operand (if it has more than one operand).
5465 First check for cases where an arithmetic operation is applied to a
5466 compound, conditional, or comparison operation. Push the arithmetic
5467 operation inside the compound or conditional to see if any folding
5468 can then be done. Convert comparison to conditional for this purpose.
5469 The also optimizes non-constant cases that used to be done in
5470 expand_expr.
5472 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5473 one of the operands is a comparison and the other is a comparison, a
5474 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5475 code below would make the expression more complex. Change it to a
5476 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5477 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5479 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5480 || code == EQ_EXPR || code == NE_EXPR)
5481 && ((truth_value_p (TREE_CODE (arg0))
5482 && (truth_value_p (TREE_CODE (arg1))
5483 || (TREE_CODE (arg1) == BIT_AND_EXPR
5484 && integer_onep (TREE_OPERAND (arg1, 1)))))
5485 || (truth_value_p (TREE_CODE (arg1))
5486 && (truth_value_p (TREE_CODE (arg0))
5487 || (TREE_CODE (arg0) == BIT_AND_EXPR
5488 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5490 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5491 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5492 : TRUTH_XOR_EXPR,
5493 type, arg0, arg1));
5495 if (code == EQ_EXPR)
5496 t = invert_truthvalue (t);
5498 return t;
5501 if (TREE_CODE_CLASS (code) == '1')
5503 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5504 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5505 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5506 else if (TREE_CODE (arg0) == COND_EXPR)
5508 tree arg01 = TREE_OPERAND (arg0, 1);
5509 tree arg02 = TREE_OPERAND (arg0, 2);
5510 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5511 arg01 = fold (build1 (code, type, arg01));
5512 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5513 arg02 = fold (build1 (code, type, arg02));
5514 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5515 arg01, arg02));
5517 /* If this was a conversion, and all we did was to move into
5518 inside the COND_EXPR, bring it back out. But leave it if
5519 it is a conversion from integer to integer and the
5520 result precision is no wider than a word since such a
5521 conversion is cheap and may be optimized away by combine,
5522 while it couldn't if it were outside the COND_EXPR. Then return
5523 so we don't get into an infinite recursion loop taking the
5524 conversion out and then back in. */
5526 if ((code == NOP_EXPR || code == CONVERT_EXPR
5527 || code == NON_LVALUE_EXPR)
5528 && TREE_CODE (t) == COND_EXPR
5529 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5530 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5531 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5532 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5533 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5534 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5535 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5536 && (INTEGRAL_TYPE_P
5537 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5538 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5539 t = build1 (code, type,
5540 build (COND_EXPR,
5541 TREE_TYPE (TREE_OPERAND
5542 (TREE_OPERAND (t, 1), 0)),
5543 TREE_OPERAND (t, 0),
5544 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5545 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5546 return t;
5548 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5549 return fold (build (COND_EXPR, type, arg0,
5550 fold (build1 (code, type, integer_one_node)),
5551 fold (build1 (code, type, integer_zero_node))));
5553 else if (TREE_CODE_CLASS (code) == '<'
5554 && TREE_CODE (arg0) == COMPOUND_EXPR)
5555 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5556 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5557 else if (TREE_CODE_CLASS (code) == '<'
5558 && TREE_CODE (arg1) == COMPOUND_EXPR)
5559 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5560 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5561 else if (TREE_CODE_CLASS (code) == '2'
5562 || TREE_CODE_CLASS (code) == '<')
5564 if (TREE_CODE (arg1) == COMPOUND_EXPR
5565 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5566 && ! TREE_SIDE_EFFECTS (arg0))
5567 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5568 fold (build (code, type,
5569 arg0, TREE_OPERAND (arg1, 1))));
5570 else if ((TREE_CODE (arg1) == COND_EXPR
5571 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5572 && TREE_CODE_CLASS (code) != '<'))
5573 && (TREE_CODE (arg0) != COND_EXPR
5574 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5575 && (! TREE_SIDE_EFFECTS (arg0)
5576 || ((*lang_hooks.decls.global_bindings_p) () == 0
5577 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5578 return
5579 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5580 /*cond_first_p=*/0);
5581 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5582 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5583 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5584 else if ((TREE_CODE (arg0) == COND_EXPR
5585 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5586 && TREE_CODE_CLASS (code) != '<'))
5587 && (TREE_CODE (arg1) != COND_EXPR
5588 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5589 && (! TREE_SIDE_EFFECTS (arg1)
5590 || ((*lang_hooks.decls.global_bindings_p) () == 0
5591 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5592 return
5593 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5594 /*cond_first_p=*/1);
5597 switch (code)
5599 case INTEGER_CST:
5600 case REAL_CST:
5601 case VECTOR_CST:
5602 case STRING_CST:
5603 case COMPLEX_CST:
5604 case CONSTRUCTOR:
5605 return t;
5607 case CONST_DECL:
5608 return fold (DECL_INITIAL (t));
5610 case NOP_EXPR:
5611 case FLOAT_EXPR:
5612 case CONVERT_EXPR:
5613 case FIX_TRUNC_EXPR:
5614 case FIX_CEIL_EXPR:
5615 case FIX_FLOOR_EXPR:
5616 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5617 return TREE_OPERAND (t, 0);
5619 /* Handle cases of two conversions in a row. */
5620 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5621 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5623 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5624 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5625 tree final_type = TREE_TYPE (t);
5626 int inside_int = INTEGRAL_TYPE_P (inside_type);
5627 int inside_ptr = POINTER_TYPE_P (inside_type);
5628 int inside_float = FLOAT_TYPE_P (inside_type);
5629 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5630 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5631 int inter_int = INTEGRAL_TYPE_P (inter_type);
5632 int inter_ptr = POINTER_TYPE_P (inter_type);
5633 int inter_float = FLOAT_TYPE_P (inter_type);
5634 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5635 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5636 int final_int = INTEGRAL_TYPE_P (final_type);
5637 int final_ptr = POINTER_TYPE_P (final_type);
5638 int final_float = FLOAT_TYPE_P (final_type);
5639 unsigned int final_prec = TYPE_PRECISION (final_type);
5640 int final_unsignedp = TREE_UNSIGNED (final_type);
5642 /* In addition to the cases of two conversions in a row
5643 handled below, if we are converting something to its own
5644 type via an object of identical or wider precision, neither
5645 conversion is needed. */
5646 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5647 && ((inter_int && final_int) || (inter_float && final_float))
5648 && inter_prec >= final_prec)
5649 return fold (build1 (code, final_type,
5650 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5652 /* Likewise, if the intermediate and final types are either both
5653 float or both integer, we don't need the middle conversion if
5654 it is wider than the final type and doesn't change the signedness
5655 (for integers). Avoid this if the final type is a pointer
5656 since then we sometimes need the inner conversion. Likewise if
5657 the outer has a precision not equal to the size of its mode. */
5658 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5659 || (inter_float && inside_float))
5660 && inter_prec >= inside_prec
5661 && (inter_float || inter_unsignedp == inside_unsignedp)
5662 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5663 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5664 && ! final_ptr)
5665 return fold (build1 (code, final_type,
5666 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5668 /* If we have a sign-extension of a zero-extended value, we can
5669 replace that by a single zero-extension. */
5670 if (inside_int && inter_int && final_int
5671 && inside_prec < inter_prec && inter_prec < final_prec
5672 && inside_unsignedp && !inter_unsignedp)
5673 return fold (build1 (code, final_type,
5674 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5676 /* Two conversions in a row are not needed unless:
5677 - some conversion is floating-point (overstrict for now), or
5678 - the intermediate type is narrower than both initial and
5679 final, or
5680 - the intermediate type and innermost type differ in signedness,
5681 and the outermost type is wider than the intermediate, or
5682 - the initial type is a pointer type and the precisions of the
5683 intermediate and final types differ, or
5684 - the final type is a pointer type and the precisions of the
5685 initial and intermediate types differ. */
5686 if (! inside_float && ! inter_float && ! final_float
5687 && (inter_prec > inside_prec || inter_prec > final_prec)
5688 && ! (inside_int && inter_int
5689 && inter_unsignedp != inside_unsignedp
5690 && inter_prec < final_prec)
5691 && ((inter_unsignedp && inter_prec > inside_prec)
5692 == (final_unsignedp && final_prec > inter_prec))
5693 && ! (inside_ptr && inter_prec != final_prec)
5694 && ! (final_ptr && inside_prec != inter_prec)
5695 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5696 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5697 && ! final_ptr)
5698 return fold (build1 (code, final_type,
5699 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5702 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5703 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5704 /* Detect assigning a bitfield. */
5705 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5706 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5708 /* Don't leave an assignment inside a conversion
5709 unless assigning a bitfield. */
5710 tree prev = TREE_OPERAND (t, 0);
5711 if (t == orig_t)
5712 t = copy_node (t);
5713 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5714 /* First do the assignment, then return converted constant. */
5715 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5716 TREE_USED (t) = 1;
5717 return t;
5720 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5721 constants (if x has signed type, the sign bit cannot be set
5722 in c). This folds extension into the BIT_AND_EXPR. */
5723 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5724 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5725 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5726 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5728 tree and = TREE_OPERAND (t, 0);
5729 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5730 int change = 0;
5732 if (TREE_UNSIGNED (TREE_TYPE (and))
5733 || (TYPE_PRECISION (TREE_TYPE (t))
5734 <= TYPE_PRECISION (TREE_TYPE (and))))
5735 change = 1;
5736 else if (TYPE_PRECISION (TREE_TYPE (and1))
5737 <= HOST_BITS_PER_WIDE_INT
5738 && host_integerp (and1, 1))
5740 unsigned HOST_WIDE_INT cst;
5742 cst = tree_low_cst (and1, 1);
5743 cst &= (HOST_WIDE_INT) -1
5744 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5745 change = (cst == 0);
5746 #ifdef LOAD_EXTEND_OP
5747 if (change
5748 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5749 == ZERO_EXTEND))
5751 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5752 and0 = fold_convert (uns, and0);
5753 and1 = fold_convert (uns, and1);
5755 #endif
5757 if (change)
5758 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5759 fold_convert (TREE_TYPE (t), and0),
5760 fold_convert (TREE_TYPE (t), and1)));
5763 tem = fold_convert_const (code, TREE_TYPE (t), arg0);
5764 return tem ? tem : t;
5766 case VIEW_CONVERT_EXPR:
5767 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5768 return build1 (VIEW_CONVERT_EXPR, type,
5769 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5770 return t;
5772 case COMPONENT_REF:
5773 if (TREE_CODE (arg0) == CONSTRUCTOR
5774 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5776 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5777 if (m)
5778 t = TREE_VALUE (m);
5780 return t;
5782 case RANGE_EXPR:
5783 if (TREE_CONSTANT (t) != wins)
5785 if (t == orig_t)
5786 t = copy_node (t);
5787 TREE_CONSTANT (t) = wins;
5789 return t;
5791 case NEGATE_EXPR:
5792 if (negate_expr_p (arg0))
5793 return fold_convert (type, negate_expr (arg0));
5794 return t;
5796 case ABS_EXPR:
5797 if (wins)
5799 if (TREE_CODE (arg0) == INTEGER_CST)
5801 /* If the value is unsigned, then the absolute value is
5802 the same as the ordinary value. */
5803 if (TREE_UNSIGNED (type))
5804 return arg0;
5805 /* Similarly, if the value is non-negative. */
5806 else if (INT_CST_LT (integer_minus_one_node, arg0))
5807 return arg0;
5808 /* If the value is negative, then the absolute value is
5809 its negation. */
5810 else
5812 unsigned HOST_WIDE_INT low;
5813 HOST_WIDE_INT high;
5814 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5815 TREE_INT_CST_HIGH (arg0),
5816 &low, &high);
5817 t = build_int_2 (low, high);
5818 TREE_TYPE (t) = type;
5819 TREE_OVERFLOW (t)
5820 = (TREE_OVERFLOW (arg0)
5821 | force_fit_type (t, overflow));
5822 TREE_CONSTANT_OVERFLOW (t)
5823 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5826 else if (TREE_CODE (arg0) == REAL_CST)
5828 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5829 t = build_real (type,
5830 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5833 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5834 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5835 /* Convert fabs((double)float) into (double)fabsf(float). */
5836 else if (TREE_CODE (arg0) == NOP_EXPR
5837 && TREE_CODE (type) == REAL_TYPE)
5839 tree targ0 = strip_float_extensions (arg0);
5840 if (targ0 != arg0)
5841 return fold_convert (type, fold (build1 (ABS_EXPR,
5842 TREE_TYPE (targ0),
5843 targ0)));
5845 else if (tree_expr_nonnegative_p (arg0))
5846 return arg0;
5847 return t;
5849 case CONJ_EXPR:
5850 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5851 return fold_convert (type, arg0);
5852 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5853 return build (COMPLEX_EXPR, type,
5854 TREE_OPERAND (arg0, 0),
5855 negate_expr (TREE_OPERAND (arg0, 1)));
5856 else if (TREE_CODE (arg0) == COMPLEX_CST)
5857 return build_complex (type, TREE_REALPART (arg0),
5858 negate_expr (TREE_IMAGPART (arg0)));
5859 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5860 return fold (build (TREE_CODE (arg0), type,
5861 fold (build1 (CONJ_EXPR, type,
5862 TREE_OPERAND (arg0, 0))),
5863 fold (build1 (CONJ_EXPR,
5864 type, TREE_OPERAND (arg0, 1)))));
5865 else if (TREE_CODE (arg0) == CONJ_EXPR)
5866 return TREE_OPERAND (arg0, 0);
5867 return t;
5869 case BIT_NOT_EXPR:
5870 if (wins)
5872 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5873 ~ TREE_INT_CST_HIGH (arg0));
5874 TREE_TYPE (t) = type;
5875 force_fit_type (t, 0);
5876 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5877 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5879 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5880 return TREE_OPERAND (arg0, 0);
5881 return t;
5883 case PLUS_EXPR:
5884 /* A + (-B) -> A - B */
5885 if (TREE_CODE (arg1) == NEGATE_EXPR)
5886 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5887 /* (-A) + B -> B - A */
5888 if (TREE_CODE (arg0) == NEGATE_EXPR)
5889 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5890 else if (! FLOAT_TYPE_P (type))
5892 if (integer_zerop (arg1))
5893 return non_lvalue (fold_convert (type, arg0));
5895 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5896 with a constant, and the two constants have no bits in common,
5897 we should treat this as a BIT_IOR_EXPR since this may produce more
5898 simplifications. */
5899 if (TREE_CODE (arg0) == BIT_AND_EXPR
5900 && TREE_CODE (arg1) == BIT_AND_EXPR
5901 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5902 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5903 && integer_zerop (const_binop (BIT_AND_EXPR,
5904 TREE_OPERAND (arg0, 1),
5905 TREE_OPERAND (arg1, 1), 0)))
5907 code = BIT_IOR_EXPR;
5908 goto bit_ior;
5911 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5912 (plus (plus (mult) (mult)) (foo)) so that we can
5913 take advantage of the factoring cases below. */
5914 if ((TREE_CODE (arg0) == PLUS_EXPR
5915 && TREE_CODE (arg1) == MULT_EXPR)
5916 || (TREE_CODE (arg1) == PLUS_EXPR
5917 && TREE_CODE (arg0) == MULT_EXPR))
5919 tree parg0, parg1, parg, marg;
5921 if (TREE_CODE (arg0) == PLUS_EXPR)
5922 parg = arg0, marg = arg1;
5923 else
5924 parg = arg1, marg = arg0;
5925 parg0 = TREE_OPERAND (parg, 0);
5926 parg1 = TREE_OPERAND (parg, 1);
5927 STRIP_NOPS (parg0);
5928 STRIP_NOPS (parg1);
5930 if (TREE_CODE (parg0) == MULT_EXPR
5931 && TREE_CODE (parg1) != MULT_EXPR)
5932 return fold (build (PLUS_EXPR, type,
5933 fold (build (PLUS_EXPR, type,
5934 fold_convert (type, parg0),
5935 fold_convert (type, marg))),
5936 fold_convert (type, parg1)));
5937 if (TREE_CODE (parg0) != MULT_EXPR
5938 && TREE_CODE (parg1) == MULT_EXPR)
5939 return fold (build (PLUS_EXPR, type,
5940 fold (build (PLUS_EXPR, type,
5941 fold_convert (type, parg1),
5942 fold_convert (type, marg))),
5943 fold_convert (type, parg0)));
5946 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5948 tree arg00, arg01, arg10, arg11;
5949 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5951 /* (A * C) + (B * C) -> (A+B) * C.
5952 We are most concerned about the case where C is a constant,
5953 but other combinations show up during loop reduction. Since
5954 it is not difficult, try all four possibilities. */
5956 arg00 = TREE_OPERAND (arg0, 0);
5957 arg01 = TREE_OPERAND (arg0, 1);
5958 arg10 = TREE_OPERAND (arg1, 0);
5959 arg11 = TREE_OPERAND (arg1, 1);
5960 same = NULL_TREE;
5962 if (operand_equal_p (arg01, arg11, 0))
5963 same = arg01, alt0 = arg00, alt1 = arg10;
5964 else if (operand_equal_p (arg00, arg10, 0))
5965 same = arg00, alt0 = arg01, alt1 = arg11;
5966 else if (operand_equal_p (arg00, arg11, 0))
5967 same = arg00, alt0 = arg01, alt1 = arg10;
5968 else if (operand_equal_p (arg01, arg10, 0))
5969 same = arg01, alt0 = arg00, alt1 = arg11;
5971 /* No identical multiplicands; see if we can find a common
5972 power-of-two factor in non-power-of-two multiplies. This
5973 can help in multi-dimensional array access. */
5974 else if (TREE_CODE (arg01) == INTEGER_CST
5975 && TREE_CODE (arg11) == INTEGER_CST
5976 && TREE_INT_CST_HIGH (arg01) == 0
5977 && TREE_INT_CST_HIGH (arg11) == 0)
5979 HOST_WIDE_INT int01, int11, tmp;
5980 int01 = TREE_INT_CST_LOW (arg01);
5981 int11 = TREE_INT_CST_LOW (arg11);
5983 /* Move min of absolute values to int11. */
5984 if ((int01 >= 0 ? int01 : -int01)
5985 < (int11 >= 0 ? int11 : -int11))
5987 tmp = int01, int01 = int11, int11 = tmp;
5988 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5989 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5992 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5994 alt0 = fold (build (MULT_EXPR, type, arg00,
5995 build_int_2 (int01 / int11, 0)));
5996 alt1 = arg10;
5997 same = arg11;
6001 if (same)
6002 return fold (build (MULT_EXPR, type,
6003 fold (build (PLUS_EXPR, type, alt0, alt1)),
6004 same));
6007 else
6009 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6010 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6011 return non_lvalue (fold_convert (type, arg0));
6013 /* Likewise if the operands are reversed. */
6014 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6015 return non_lvalue (fold_convert (type, arg1));
6017 /* Convert x+x into x*2.0. */
6018 if (operand_equal_p (arg0, arg1, 0)
6019 && SCALAR_FLOAT_TYPE_P (type))
6020 return fold (build (MULT_EXPR, type, arg0,
6021 build_real (type, dconst2)));
6023 /* Convert x*c+x into x*(c+1). */
6024 if (flag_unsafe_math_optimizations
6025 && TREE_CODE (arg0) == MULT_EXPR
6026 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6027 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6028 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6030 REAL_VALUE_TYPE c;
6032 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6033 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6034 return fold (build (MULT_EXPR, type, arg1,
6035 build_real (type, c)));
6038 /* Convert x+x*c into x*(c+1). */
6039 if (flag_unsafe_math_optimizations
6040 && TREE_CODE (arg1) == MULT_EXPR
6041 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6042 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6043 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6045 REAL_VALUE_TYPE c;
6047 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6048 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6049 return fold (build (MULT_EXPR, type, arg0,
6050 build_real (type, c)));
6053 /* Convert x*c1+x*c2 into x*(c1+c2). */
6054 if (flag_unsafe_math_optimizations
6055 && TREE_CODE (arg0) == MULT_EXPR
6056 && TREE_CODE (arg1) == MULT_EXPR
6057 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6058 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6059 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6060 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6061 && operand_equal_p (TREE_OPERAND (arg0, 0),
6062 TREE_OPERAND (arg1, 0), 0))
6064 REAL_VALUE_TYPE c1, c2;
6066 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6067 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6068 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6069 return fold (build (MULT_EXPR, type,
6070 TREE_OPERAND (arg0, 0),
6071 build_real (type, c1)));
6075 bit_rotate:
6076 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6077 is a rotate of A by C1 bits. */
6078 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6079 is a rotate of A by B bits. */
6081 enum tree_code code0, code1;
6082 code0 = TREE_CODE (arg0);
6083 code1 = TREE_CODE (arg1);
6084 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6085 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6086 && operand_equal_p (TREE_OPERAND (arg0, 0),
6087 TREE_OPERAND (arg1, 0), 0)
6088 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6090 tree tree01, tree11;
6091 enum tree_code code01, code11;
6093 tree01 = TREE_OPERAND (arg0, 1);
6094 tree11 = TREE_OPERAND (arg1, 1);
6095 STRIP_NOPS (tree01);
6096 STRIP_NOPS (tree11);
6097 code01 = TREE_CODE (tree01);
6098 code11 = TREE_CODE (tree11);
6099 if (code01 == INTEGER_CST
6100 && code11 == INTEGER_CST
6101 && TREE_INT_CST_HIGH (tree01) == 0
6102 && TREE_INT_CST_HIGH (tree11) == 0
6103 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6104 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6105 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6106 code0 == LSHIFT_EXPR ? tree01 : tree11);
6107 else if (code11 == MINUS_EXPR)
6109 tree tree110, tree111;
6110 tree110 = TREE_OPERAND (tree11, 0);
6111 tree111 = TREE_OPERAND (tree11, 1);
6112 STRIP_NOPS (tree110);
6113 STRIP_NOPS (tree111);
6114 if (TREE_CODE (tree110) == INTEGER_CST
6115 && 0 == compare_tree_int (tree110,
6116 TYPE_PRECISION
6117 (TREE_TYPE (TREE_OPERAND
6118 (arg0, 0))))
6119 && operand_equal_p (tree01, tree111, 0))
6120 return build ((code0 == LSHIFT_EXPR
6121 ? LROTATE_EXPR
6122 : RROTATE_EXPR),
6123 type, TREE_OPERAND (arg0, 0), tree01);
6125 else if (code01 == MINUS_EXPR)
6127 tree tree010, tree011;
6128 tree010 = TREE_OPERAND (tree01, 0);
6129 tree011 = TREE_OPERAND (tree01, 1);
6130 STRIP_NOPS (tree010);
6131 STRIP_NOPS (tree011);
6132 if (TREE_CODE (tree010) == INTEGER_CST
6133 && 0 == compare_tree_int (tree010,
6134 TYPE_PRECISION
6135 (TREE_TYPE (TREE_OPERAND
6136 (arg0, 0))))
6137 && operand_equal_p (tree11, tree011, 0))
6138 return build ((code0 != LSHIFT_EXPR
6139 ? LROTATE_EXPR
6140 : RROTATE_EXPR),
6141 type, TREE_OPERAND (arg0, 0), tree11);
6146 associate:
6147 /* In most languages, can't associate operations on floats through
6148 parentheses. Rather than remember where the parentheses were, we
6149 don't associate floats at all, unless the user has specified
6150 -funsafe-math-optimizations. */
6152 if (! wins
6153 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6155 tree var0, con0, lit0, minus_lit0;
6156 tree var1, con1, lit1, minus_lit1;
6158 /* Split both trees into variables, constants, and literals. Then
6159 associate each group together, the constants with literals,
6160 then the result with variables. This increases the chances of
6161 literals being recombined later and of generating relocatable
6162 expressions for the sum of a constant and literal. */
6163 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6164 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6165 code == MINUS_EXPR);
6167 /* Only do something if we found more than two objects. Otherwise,
6168 nothing has changed and we risk infinite recursion. */
6169 if (2 < ((var0 != 0) + (var1 != 0)
6170 + (con0 != 0) + (con1 != 0)
6171 + (lit0 != 0) + (lit1 != 0)
6172 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6174 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6175 if (code == MINUS_EXPR)
6176 code = PLUS_EXPR;
6178 var0 = associate_trees (var0, var1, code, type);
6179 con0 = associate_trees (con0, con1, code, type);
6180 lit0 = associate_trees (lit0, lit1, code, type);
6181 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6183 /* Preserve the MINUS_EXPR if the negative part of the literal is
6184 greater than the positive part. Otherwise, the multiplicative
6185 folding code (i.e extract_muldiv) may be fooled in case
6186 unsigned constants are subtracted, like in the following
6187 example: ((X*2 + 4) - 8U)/2. */
6188 if (minus_lit0 && lit0)
6190 if (TREE_CODE (lit0) == INTEGER_CST
6191 && TREE_CODE (minus_lit0) == INTEGER_CST
6192 && tree_int_cst_lt (lit0, minus_lit0))
6194 minus_lit0 = associate_trees (minus_lit0, lit0,
6195 MINUS_EXPR, type);
6196 lit0 = 0;
6198 else
6200 lit0 = associate_trees (lit0, minus_lit0,
6201 MINUS_EXPR, type);
6202 minus_lit0 = 0;
6205 if (minus_lit0)
6207 if (con0 == 0)
6208 return fold_convert (type,
6209 associate_trees (var0, minus_lit0,
6210 MINUS_EXPR, type));
6211 else
6213 con0 = associate_trees (con0, minus_lit0,
6214 MINUS_EXPR, type);
6215 return fold_convert (type,
6216 associate_trees (var0, con0,
6217 PLUS_EXPR, type));
6221 con0 = associate_trees (con0, lit0, code, type);
6222 return fold_convert (type, associate_trees (var0, con0,
6223 code, type));
6227 binary:
6228 if (wins)
6229 t1 = const_binop (code, arg0, arg1, 0);
6230 if (t1 != NULL_TREE)
6232 /* The return value should always have
6233 the same type as the original expression. */
6234 if (TREE_TYPE (t1) != TREE_TYPE (t))
6235 t1 = fold_convert (TREE_TYPE (t), t1);
6237 return t1;
6239 return t;
6241 case MINUS_EXPR:
6242 /* A - (-B) -> A + B */
6243 if (TREE_CODE (arg1) == NEGATE_EXPR)
6244 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6245 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6246 if (TREE_CODE (arg0) == NEGATE_EXPR
6247 && (FLOAT_TYPE_P (type)
6248 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6249 && negate_expr_p (arg1)
6250 && reorder_operands_p (arg0, arg1))
6251 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
6252 TREE_OPERAND (arg0, 0)));
6254 if (! FLOAT_TYPE_P (type))
6256 if (! wins && integer_zerop (arg0))
6257 return negate_expr (fold_convert (type, arg1));
6258 if (integer_zerop (arg1))
6259 return non_lvalue (fold_convert (type, arg0));
6261 /* Fold A - (A & B) into ~B & A. */
6262 if (!TREE_SIDE_EFFECTS (arg0)
6263 && TREE_CODE (arg1) == BIT_AND_EXPR)
6265 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6266 return fold (build (BIT_AND_EXPR, type,
6267 fold (build1 (BIT_NOT_EXPR, type,
6268 TREE_OPERAND (arg1, 0))),
6269 arg0));
6270 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6271 return fold (build (BIT_AND_EXPR, type,
6272 fold (build1 (BIT_NOT_EXPR, type,
6273 TREE_OPERAND (arg1, 1))),
6274 arg0));
6277 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6278 any power of 2 minus 1. */
6279 if (TREE_CODE (arg0) == BIT_AND_EXPR
6280 && TREE_CODE (arg1) == BIT_AND_EXPR
6281 && operand_equal_p (TREE_OPERAND (arg0, 0),
6282 TREE_OPERAND (arg1, 0), 0))
6284 tree mask0 = TREE_OPERAND (arg0, 1);
6285 tree mask1 = TREE_OPERAND (arg1, 1);
6286 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6288 if (operand_equal_p (tem, mask1, 0))
6290 tem = fold (build (BIT_XOR_EXPR, type,
6291 TREE_OPERAND (arg0, 0), mask1));
6292 return fold (build (MINUS_EXPR, type, tem, mask1));
6297 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6298 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6299 return non_lvalue (fold_convert (type, arg0));
6301 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6302 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6303 (-ARG1 + ARG0) reduces to -ARG1. */
6304 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6305 return negate_expr (fold_convert (type, arg1));
6307 /* Fold &x - &x. This can happen from &x.foo - &x.
6308 This is unsafe for certain floats even in non-IEEE formats.
6309 In IEEE, it is unsafe because it does wrong for NaNs.
6310 Also note that operand_equal_p is always false if an operand
6311 is volatile. */
6313 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6314 && operand_equal_p (arg0, arg1, 0))
6315 return fold_convert (type, integer_zero_node);
6317 /* A - B -> A + (-B) if B is easily negatable. */
6318 if (!wins && negate_expr_p (arg1)
6319 && (FLOAT_TYPE_P (type)
6320 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6321 return fold (build (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6323 if (TREE_CODE (arg0) == MULT_EXPR
6324 && TREE_CODE (arg1) == MULT_EXPR
6325 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6327 /* (A * C) - (B * C) -> (A-B) * C. */
6328 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6329 TREE_OPERAND (arg1, 1), 0))
6330 return fold (build (MULT_EXPR, type,
6331 fold (build (MINUS_EXPR, type,
6332 TREE_OPERAND (arg0, 0),
6333 TREE_OPERAND (arg1, 0))),
6334 TREE_OPERAND (arg0, 1)));
6335 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6336 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6337 TREE_OPERAND (arg1, 0), 0))
6338 return fold (build (MULT_EXPR, type,
6339 TREE_OPERAND (arg0, 0),
6340 fold (build (MINUS_EXPR, type,
6341 TREE_OPERAND (arg0, 1),
6342 TREE_OPERAND (arg1, 1)))));
6345 goto associate;
6347 case MULT_EXPR:
6348 /* (-A) * (-B) -> A * B */
6349 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6350 return fold (build (MULT_EXPR, type,
6351 TREE_OPERAND (arg0, 0),
6352 negate_expr (arg1)));
6353 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6354 return fold (build (MULT_EXPR, type,
6355 negate_expr (arg0),
6356 TREE_OPERAND (arg1, 0)));
6358 if (! FLOAT_TYPE_P (type))
6360 if (integer_zerop (arg1))
6361 return omit_one_operand (type, arg1, arg0);
6362 if (integer_onep (arg1))
6363 return non_lvalue (fold_convert (type, arg0));
6365 /* (a * (1 << b)) is (a << b) */
6366 if (TREE_CODE (arg1) == LSHIFT_EXPR
6367 && integer_onep (TREE_OPERAND (arg1, 0)))
6368 return fold (build (LSHIFT_EXPR, type, arg0,
6369 TREE_OPERAND (arg1, 1)));
6370 if (TREE_CODE (arg0) == LSHIFT_EXPR
6371 && integer_onep (TREE_OPERAND (arg0, 0)))
6372 return fold (build (LSHIFT_EXPR, type, arg1,
6373 TREE_OPERAND (arg0, 1)));
6375 if (TREE_CODE (arg1) == INTEGER_CST
6376 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6377 fold_convert (type, arg1),
6378 code, NULL_TREE)))
6379 return fold_convert (type, tem);
6382 else
6384 /* Maybe fold x * 0 to 0. The expressions aren't the same
6385 when x is NaN, since x * 0 is also NaN. Nor are they the
6386 same in modes with signed zeros, since multiplying a
6387 negative value by 0 gives -0, not +0. */
6388 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6389 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6390 && real_zerop (arg1))
6391 return omit_one_operand (type, arg1, arg0);
6392 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6393 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6394 && real_onep (arg1))
6395 return non_lvalue (fold_convert (type, arg0));
6397 /* Transform x * -1.0 into -x. */
6398 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6399 && real_minus_onep (arg1))
6400 return fold (build1 (NEGATE_EXPR, type, arg0));
6402 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6403 if (flag_unsafe_math_optimizations
6404 && TREE_CODE (arg0) == RDIV_EXPR
6405 && TREE_CODE (arg1) == REAL_CST
6406 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6408 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6409 arg1, 0);
6410 if (tem)
6411 return fold (build (RDIV_EXPR, type, tem,
6412 TREE_OPERAND (arg0, 1)));
6415 if (flag_unsafe_math_optimizations)
6417 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6418 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6420 /* Optimizations of sqrt(...)*sqrt(...). */
6421 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
6422 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
6423 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
6425 tree sqrtfn, arg, arglist;
6426 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6427 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6429 /* Optimize sqrt(x)*sqrt(x) as x. */
6430 if (operand_equal_p (arg00, arg10, 0)
6431 && ! HONOR_SNANS (TYPE_MODE (type)))
6432 return arg00;
6434 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6435 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6436 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6437 arglist = build_tree_list (NULL_TREE, arg);
6438 return build_function_call_expr (sqrtfn, arglist);
6441 /* Optimize expN(x)*expN(y) as expN(x+y). */
6442 if (fcode0 == fcode1
6443 && (fcode0 == BUILT_IN_EXP
6444 || fcode0 == BUILT_IN_EXPF
6445 || fcode0 == BUILT_IN_EXPL
6446 || fcode0 == BUILT_IN_EXP2
6447 || fcode0 == BUILT_IN_EXP2F
6448 || fcode0 == BUILT_IN_EXP2L
6449 || fcode0 == BUILT_IN_EXP10
6450 || fcode0 == BUILT_IN_EXP10F
6451 || fcode0 == BUILT_IN_EXP10L
6452 || fcode0 == BUILT_IN_POW10
6453 || fcode0 == BUILT_IN_POW10F
6454 || fcode0 == BUILT_IN_POW10L))
6456 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6457 tree arg = build (PLUS_EXPR, type,
6458 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6459 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6460 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6461 return build_function_call_expr (expfn, arglist);
6464 /* Optimizations of pow(...)*pow(...). */
6465 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6466 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6467 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6469 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6470 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6471 1)));
6472 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6473 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6474 1)));
6476 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6477 if (operand_equal_p (arg01, arg11, 0))
6479 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6480 tree arg = build (MULT_EXPR, type, arg00, arg10);
6481 tree arglist = tree_cons (NULL_TREE, fold (arg),
6482 build_tree_list (NULL_TREE,
6483 arg01));
6484 return build_function_call_expr (powfn, arglist);
6487 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6488 if (operand_equal_p (arg00, arg10, 0))
6490 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6491 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6492 tree arglist = tree_cons (NULL_TREE, arg00,
6493 build_tree_list (NULL_TREE,
6494 arg));
6495 return build_function_call_expr (powfn, arglist);
6499 /* Optimize tan(x)*cos(x) as sin(x). */
6500 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6501 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6502 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6503 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6504 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6505 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6506 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6507 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6509 tree sinfn;
6511 switch (fcode0)
6513 case BUILT_IN_TAN:
6514 case BUILT_IN_COS:
6515 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6516 break;
6517 case BUILT_IN_TANF:
6518 case BUILT_IN_COSF:
6519 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6520 break;
6521 case BUILT_IN_TANL:
6522 case BUILT_IN_COSL:
6523 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6524 break;
6525 default:
6526 sinfn = NULL_TREE;
6529 if (sinfn != NULL_TREE)
6530 return build_function_call_expr (sinfn,
6531 TREE_OPERAND (arg0, 1));
6534 /* Optimize x*pow(x,c) as pow(x,c+1). */
6535 if (fcode1 == BUILT_IN_POW
6536 || fcode1 == BUILT_IN_POWF
6537 || fcode1 == BUILT_IN_POWL)
6539 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6540 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6541 1)));
6542 if (TREE_CODE (arg11) == REAL_CST
6543 && ! TREE_CONSTANT_OVERFLOW (arg11)
6544 && operand_equal_p (arg0, arg10, 0))
6546 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6547 REAL_VALUE_TYPE c;
6548 tree arg, arglist;
6550 c = TREE_REAL_CST (arg11);
6551 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6552 arg = build_real (type, c);
6553 arglist = build_tree_list (NULL_TREE, arg);
6554 arglist = tree_cons (NULL_TREE, arg0, arglist);
6555 return build_function_call_expr (powfn, arglist);
6559 /* Optimize pow(x,c)*x as pow(x,c+1). */
6560 if (fcode0 == BUILT_IN_POW
6561 || fcode0 == BUILT_IN_POWF
6562 || fcode0 == BUILT_IN_POWL)
6564 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6565 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6566 1)));
6567 if (TREE_CODE (arg01) == REAL_CST
6568 && ! TREE_CONSTANT_OVERFLOW (arg01)
6569 && operand_equal_p (arg1, arg00, 0))
6571 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6572 REAL_VALUE_TYPE c;
6573 tree arg, arglist;
6575 c = TREE_REAL_CST (arg01);
6576 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6577 arg = build_real (type, c);
6578 arglist = build_tree_list (NULL_TREE, arg);
6579 arglist = tree_cons (NULL_TREE, arg1, arglist);
6580 return build_function_call_expr (powfn, arglist);
6584 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6585 if (! optimize_size
6586 && operand_equal_p (arg0, arg1, 0))
6588 tree powfn;
6590 if (type == double_type_node)
6591 powfn = implicit_built_in_decls[BUILT_IN_POW];
6592 else if (type == float_type_node)
6593 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6594 else if (type == long_double_type_node)
6595 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6596 else
6597 powfn = NULL_TREE;
6599 if (powfn)
6601 tree arg = build_real (type, dconst2);
6602 tree arglist = build_tree_list (NULL_TREE, arg);
6603 arglist = tree_cons (NULL_TREE, arg0, arglist);
6604 return build_function_call_expr (powfn, arglist);
6609 goto associate;
6611 case BIT_IOR_EXPR:
6612 bit_ior:
6613 if (integer_all_onesp (arg1))
6614 return omit_one_operand (type, arg1, arg0);
6615 if (integer_zerop (arg1))
6616 return non_lvalue (fold_convert (type, arg0));
6617 t1 = distribute_bit_expr (code, type, arg0, arg1);
6618 if (t1 != NULL_TREE)
6619 return t1;
6621 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6623 This results in more efficient code for machines without a NAND
6624 instruction. Combine will canonicalize to the first form
6625 which will allow use of NAND instructions provided by the
6626 backend if they exist. */
6627 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6628 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6630 return fold (build1 (BIT_NOT_EXPR, type,
6631 build (BIT_AND_EXPR, type,
6632 TREE_OPERAND (arg0, 0),
6633 TREE_OPERAND (arg1, 0))));
6636 /* See if this can be simplified into a rotate first. If that
6637 is unsuccessful continue in the association code. */
6638 goto bit_rotate;
6640 case BIT_XOR_EXPR:
6641 if (integer_zerop (arg1))
6642 return non_lvalue (fold_convert (type, arg0));
6643 if (integer_all_onesp (arg1))
6644 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6646 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6647 with a constant, and the two constants have no bits in common,
6648 we should treat this as a BIT_IOR_EXPR since this may produce more
6649 simplifications. */
6650 if (TREE_CODE (arg0) == BIT_AND_EXPR
6651 && TREE_CODE (arg1) == BIT_AND_EXPR
6652 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6653 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6654 && integer_zerop (const_binop (BIT_AND_EXPR,
6655 TREE_OPERAND (arg0, 1),
6656 TREE_OPERAND (arg1, 1), 0)))
6658 code = BIT_IOR_EXPR;
6659 goto bit_ior;
6662 /* See if this can be simplified into a rotate first. If that
6663 is unsuccessful continue in the association code. */
6664 goto bit_rotate;
6666 case BIT_AND_EXPR:
6667 if (integer_all_onesp (arg1))
6668 return non_lvalue (fold_convert (type, arg0));
6669 if (integer_zerop (arg1))
6670 return omit_one_operand (type, arg1, arg0);
6671 t1 = distribute_bit_expr (code, type, arg0, arg1);
6672 if (t1 != NULL_TREE)
6673 return t1;
6674 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6675 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6676 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6678 unsigned int prec
6679 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6681 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6682 && (~TREE_INT_CST_LOW (arg1)
6683 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6684 return fold_convert (type, TREE_OPERAND (arg0, 0));
6687 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6689 This results in more efficient code for machines without a NOR
6690 instruction. Combine will canonicalize to the first form
6691 which will allow use of NOR instructions provided by the
6692 backend if they exist. */
6693 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6694 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6696 return fold (build1 (BIT_NOT_EXPR, type,
6697 build (BIT_IOR_EXPR, type,
6698 TREE_OPERAND (arg0, 0),
6699 TREE_OPERAND (arg1, 0))));
6702 goto associate;
6704 case RDIV_EXPR:
6705 /* Don't touch a floating-point divide by zero unless the mode
6706 of the constant can represent infinity. */
6707 if (TREE_CODE (arg1) == REAL_CST
6708 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6709 && real_zerop (arg1))
6710 return t;
6712 /* (-A) / (-B) -> A / B */
6713 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6714 return fold (build (RDIV_EXPR, type,
6715 TREE_OPERAND (arg0, 0),
6716 negate_expr (arg1)));
6717 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6718 return fold (build (RDIV_EXPR, type,
6719 negate_expr (arg0),
6720 TREE_OPERAND (arg1, 0)));
6722 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6723 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6724 && real_onep (arg1))
6725 return non_lvalue (fold_convert (type, arg0));
6727 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6728 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6729 && real_minus_onep (arg1))
6730 return non_lvalue (fold_convert (type, negate_expr (arg0)));
6732 /* If ARG1 is a constant, we can convert this to a multiply by the
6733 reciprocal. This does not have the same rounding properties,
6734 so only do this if -funsafe-math-optimizations. We can actually
6735 always safely do it if ARG1 is a power of two, but it's hard to
6736 tell if it is or not in a portable manner. */
6737 if (TREE_CODE (arg1) == REAL_CST)
6739 if (flag_unsafe_math_optimizations
6740 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6741 arg1, 0)))
6742 return fold (build (MULT_EXPR, type, arg0, tem));
6743 /* Find the reciprocal if optimizing and the result is exact. */
6744 if (optimize)
6746 REAL_VALUE_TYPE r;
6747 r = TREE_REAL_CST (arg1);
6748 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6750 tem = build_real (type, r);
6751 return fold (build (MULT_EXPR, type, arg0, tem));
6755 /* Convert A/B/C to A/(B*C). */
6756 if (flag_unsafe_math_optimizations
6757 && TREE_CODE (arg0) == RDIV_EXPR)
6758 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6759 fold (build (MULT_EXPR, type,
6760 TREE_OPERAND (arg0, 1), arg1))));
6762 /* Convert A/(B/C) to (A/B)*C. */
6763 if (flag_unsafe_math_optimizations
6764 && TREE_CODE (arg1) == RDIV_EXPR)
6765 return fold (build (MULT_EXPR, type,
6766 fold (build (RDIV_EXPR, type, arg0,
6767 TREE_OPERAND (arg1, 0))),
6768 TREE_OPERAND (arg1, 1)));
6770 /* Convert C1/(X*C2) into (C1/C2)/X. */
6771 if (flag_unsafe_math_optimizations
6772 && TREE_CODE (arg1) == MULT_EXPR
6773 && TREE_CODE (arg0) == REAL_CST
6774 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6776 tree tem = const_binop (RDIV_EXPR, arg0,
6777 TREE_OPERAND (arg1, 1), 0);
6778 if (tem)
6779 return fold (build (RDIV_EXPR, type, tem,
6780 TREE_OPERAND (arg1, 0)));
6783 if (flag_unsafe_math_optimizations)
6785 enum built_in_function fcode = builtin_mathfn_code (arg1);
6786 /* Optimize x/expN(y) into x*expN(-y). */
6787 if (fcode == BUILT_IN_EXP
6788 || fcode == BUILT_IN_EXPF
6789 || fcode == BUILT_IN_EXPL
6790 || fcode == BUILT_IN_EXP2
6791 || fcode == BUILT_IN_EXP2F
6792 || fcode == BUILT_IN_EXP2L
6793 || fcode == BUILT_IN_EXP10
6794 || fcode == BUILT_IN_EXP10F
6795 || fcode == BUILT_IN_EXP10L
6796 || fcode == BUILT_IN_POW10
6797 || fcode == BUILT_IN_POW10F
6798 || fcode == BUILT_IN_POW10L)
6800 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6801 tree arg = build1 (NEGATE_EXPR, type,
6802 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6803 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6804 arg1 = build_function_call_expr (expfn, arglist);
6805 return fold (build (MULT_EXPR, type, arg0, arg1));
6808 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6809 if (fcode == BUILT_IN_POW
6810 || fcode == BUILT_IN_POWF
6811 || fcode == BUILT_IN_POWL)
6813 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6814 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6815 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6816 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6817 tree arglist = tree_cons(NULL_TREE, arg10,
6818 build_tree_list (NULL_TREE, neg11));
6819 arg1 = build_function_call_expr (powfn, arglist);
6820 return fold (build (MULT_EXPR, type, arg0, arg1));
6824 if (flag_unsafe_math_optimizations)
6826 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6827 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6829 /* Optimize sin(x)/cos(x) as tan(x). */
6830 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6831 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6832 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6833 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6834 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6836 tree tanfn;
6838 if (fcode0 == BUILT_IN_SIN)
6839 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6840 else if (fcode0 == BUILT_IN_SINF)
6841 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6842 else if (fcode0 == BUILT_IN_SINL)
6843 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6844 else
6845 tanfn = NULL_TREE;
6847 if (tanfn != NULL_TREE)
6848 return build_function_call_expr (tanfn,
6849 TREE_OPERAND (arg0, 1));
6852 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6853 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6854 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6855 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6856 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6857 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6859 tree tanfn;
6861 if (fcode0 == BUILT_IN_COS)
6862 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6863 else if (fcode0 == BUILT_IN_COSF)
6864 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6865 else if (fcode0 == BUILT_IN_COSL)
6866 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6867 else
6868 tanfn = NULL_TREE;
6870 if (tanfn != NULL_TREE)
6872 tree tmp = TREE_OPERAND (arg0, 1);
6873 tmp = build_function_call_expr (tanfn, tmp);
6874 return fold (build (RDIV_EXPR, type,
6875 build_real (type, dconst1),
6876 tmp));
6880 /* Optimize pow(x,c)/x as pow(x,c-1). */
6881 if (fcode0 == BUILT_IN_POW
6882 || fcode0 == BUILT_IN_POWF
6883 || fcode0 == BUILT_IN_POWL)
6885 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6886 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6887 if (TREE_CODE (arg01) == REAL_CST
6888 && ! TREE_CONSTANT_OVERFLOW (arg01)
6889 && operand_equal_p (arg1, arg00, 0))
6891 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6892 REAL_VALUE_TYPE c;
6893 tree arg, arglist;
6895 c = TREE_REAL_CST (arg01);
6896 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6897 arg = build_real (type, c);
6898 arglist = build_tree_list (NULL_TREE, arg);
6899 arglist = tree_cons (NULL_TREE, arg1, arglist);
6900 return build_function_call_expr (powfn, arglist);
6904 goto binary;
6906 case TRUNC_DIV_EXPR:
6907 case ROUND_DIV_EXPR:
6908 case FLOOR_DIV_EXPR:
6909 case CEIL_DIV_EXPR:
6910 case EXACT_DIV_EXPR:
6911 if (integer_onep (arg1))
6912 return non_lvalue (fold_convert (type, arg0));
6913 if (integer_zerop (arg1))
6914 return t;
6916 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6917 operation, EXACT_DIV_EXPR.
6919 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6920 At one time others generated faster code, it's not clear if they do
6921 after the last round to changes to the DIV code in expmed.c. */
6922 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6923 && multiple_of_p (type, arg0, arg1))
6924 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6926 if (TREE_CODE (arg1) == INTEGER_CST
6927 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6928 code, NULL_TREE)))
6929 return fold_convert (type, tem);
6931 goto binary;
6933 case CEIL_MOD_EXPR:
6934 case FLOOR_MOD_EXPR:
6935 case ROUND_MOD_EXPR:
6936 case TRUNC_MOD_EXPR:
6937 if (integer_onep (arg1))
6938 return omit_one_operand (type, integer_zero_node, arg0);
6939 if (integer_zerop (arg1))
6940 return t;
6942 if (TREE_CODE (arg1) == INTEGER_CST
6943 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6944 code, NULL_TREE)))
6945 return fold_convert (type, tem);
6947 goto binary;
6949 case LROTATE_EXPR:
6950 case RROTATE_EXPR:
6951 if (integer_all_onesp (arg0))
6952 return omit_one_operand (type, arg0, arg1);
6953 goto shift;
6955 case RSHIFT_EXPR:
6956 /* Optimize -1 >> x for arithmetic right shifts. */
6957 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6958 return omit_one_operand (type, arg0, arg1);
6959 /* ... fall through ... */
6961 case LSHIFT_EXPR:
6962 shift:
6963 if (integer_zerop (arg1))
6964 return non_lvalue (fold_convert (type, arg0));
6965 if (integer_zerop (arg0))
6966 return omit_one_operand (type, arg0, arg1);
6968 /* Since negative shift count is not well-defined,
6969 don't try to compute it in the compiler. */
6970 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6971 return t;
6972 /* Rewrite an LROTATE_EXPR by a constant into an
6973 RROTATE_EXPR by a new constant. */
6974 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6976 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
6977 tem = fold_convert (TREE_TYPE (arg1), tem);
6978 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
6979 return fold (build (RROTATE_EXPR, type, arg0, tem));
6982 /* If we have a rotate of a bit operation with the rotate count and
6983 the second operand of the bit operation both constant,
6984 permute the two operations. */
6985 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6986 && (TREE_CODE (arg0) == BIT_AND_EXPR
6987 || TREE_CODE (arg0) == BIT_IOR_EXPR
6988 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6989 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6990 return fold (build (TREE_CODE (arg0), type,
6991 fold (build (code, type,
6992 TREE_OPERAND (arg0, 0), arg1)),
6993 fold (build (code, type,
6994 TREE_OPERAND (arg0, 1), arg1))));
6996 /* Two consecutive rotates adding up to the width of the mode can
6997 be ignored. */
6998 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6999 && TREE_CODE (arg0) == RROTATE_EXPR
7000 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7001 && TREE_INT_CST_HIGH (arg1) == 0
7002 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7003 && ((TREE_INT_CST_LOW (arg1)
7004 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7005 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7006 return TREE_OPERAND (arg0, 0);
7008 goto binary;
7010 case MIN_EXPR:
7011 if (operand_equal_p (arg0, arg1, 0))
7012 return omit_one_operand (type, arg0, arg1);
7013 if (INTEGRAL_TYPE_P (type)
7014 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
7015 return omit_one_operand (type, arg1, arg0);
7016 goto associate;
7018 case MAX_EXPR:
7019 if (operand_equal_p (arg0, arg1, 0))
7020 return omit_one_operand (type, arg0, arg1);
7021 if (INTEGRAL_TYPE_P (type)
7022 && TYPE_MAX_VALUE (type)
7023 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
7024 return omit_one_operand (type, arg1, arg0);
7025 goto associate;
7027 case TRUTH_NOT_EXPR:
7028 /* Note that the operand of this must be an int
7029 and its values must be 0 or 1.
7030 ("true" is a fixed value perhaps depending on the language,
7031 but we don't handle values other than 1 correctly yet.) */
7032 tem = invert_truthvalue (arg0);
7033 /* Avoid infinite recursion. */
7034 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7036 tem = fold_single_bit_test (code, arg0, arg1, type);
7037 if (tem)
7038 return tem;
7039 return t;
7041 return fold_convert (type, tem);
7043 case TRUTH_ANDIF_EXPR:
7044 /* Note that the operands of this must be ints
7045 and their values must be 0 or 1.
7046 ("true" is a fixed value perhaps depending on the language.) */
7047 /* If first arg is constant zero, return it. */
7048 if (integer_zerop (arg0))
7049 return fold_convert (type, arg0);
7050 case TRUTH_AND_EXPR:
7051 /* If either arg is constant true, drop it. */
7052 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7053 return non_lvalue (fold_convert (type, arg1));
7054 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7055 /* Preserve sequence points. */
7056 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7057 return non_lvalue (fold_convert (type, arg0));
7058 /* If second arg is constant zero, result is zero, but first arg
7059 must be evaluated. */
7060 if (integer_zerop (arg1))
7061 return omit_one_operand (type, arg1, arg0);
7062 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7063 case will be handled here. */
7064 if (integer_zerop (arg0))
7065 return omit_one_operand (type, arg0, arg1);
7067 truth_andor:
7068 /* We only do these simplifications if we are optimizing. */
7069 if (!optimize)
7070 return t;
7072 /* Check for things like (A || B) && (A || C). We can convert this
7073 to A || (B && C). Note that either operator can be any of the four
7074 truth and/or operations and the transformation will still be
7075 valid. Also note that we only care about order for the
7076 ANDIF and ORIF operators. If B contains side effects, this
7077 might change the truth-value of A. */
7078 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7079 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7080 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7081 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7082 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7083 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7085 tree a00 = TREE_OPERAND (arg0, 0);
7086 tree a01 = TREE_OPERAND (arg0, 1);
7087 tree a10 = TREE_OPERAND (arg1, 0);
7088 tree a11 = TREE_OPERAND (arg1, 1);
7089 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7090 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7091 && (code == TRUTH_AND_EXPR
7092 || code == TRUTH_OR_EXPR));
7094 if (operand_equal_p (a00, a10, 0))
7095 return fold (build (TREE_CODE (arg0), type, a00,
7096 fold (build (code, type, a01, a11))));
7097 else if (commutative && operand_equal_p (a00, a11, 0))
7098 return fold (build (TREE_CODE (arg0), type, a00,
7099 fold (build (code, type, a01, a10))));
7100 else if (commutative && operand_equal_p (a01, a10, 0))
7101 return fold (build (TREE_CODE (arg0), type, a01,
7102 fold (build (code, type, a00, a11))));
7104 /* This case if tricky because we must either have commutative
7105 operators or else A10 must not have side-effects. */
7107 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7108 && operand_equal_p (a01, a11, 0))
7109 return fold (build (TREE_CODE (arg0), type,
7110 fold (build (code, type, a00, a10)),
7111 a01));
7114 /* See if we can build a range comparison. */
7115 if (0 != (tem = fold_range_test (t)))
7116 return tem;
7118 /* Check for the possibility of merging component references. If our
7119 lhs is another similar operation, try to merge its rhs with our
7120 rhs. Then try to merge our lhs and rhs. */
7121 if (TREE_CODE (arg0) == code
7122 && 0 != (tem = fold_truthop (code, type,
7123 TREE_OPERAND (arg0, 1), arg1)))
7124 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7126 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7127 return tem;
7129 return t;
7131 case TRUTH_ORIF_EXPR:
7132 /* Note that the operands of this must be ints
7133 and their values must be 0 or true.
7134 ("true" is a fixed value perhaps depending on the language.) */
7135 /* If first arg is constant true, return it. */
7136 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7137 return fold_convert (type, arg0);
7138 case TRUTH_OR_EXPR:
7139 /* If either arg is constant zero, drop it. */
7140 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7141 return non_lvalue (fold_convert (type, arg1));
7142 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7143 /* Preserve sequence points. */
7144 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7145 return non_lvalue (fold_convert (type, arg0));
7146 /* If second arg is constant true, result is true, but we must
7147 evaluate first arg. */
7148 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7149 return omit_one_operand (type, arg1, arg0);
7150 /* Likewise for first arg, but note this only occurs here for
7151 TRUTH_OR_EXPR. */
7152 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7153 return omit_one_operand (type, arg0, arg1);
7154 goto truth_andor;
7156 case TRUTH_XOR_EXPR:
7157 /* If either arg is constant zero, drop it. */
7158 if (integer_zerop (arg0))
7159 return non_lvalue (fold_convert (type, arg1));
7160 if (integer_zerop (arg1))
7161 return non_lvalue (fold_convert (type, arg0));
7162 /* If either arg is constant true, this is a logical inversion. */
7163 if (integer_onep (arg0))
7164 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7165 if (integer_onep (arg1))
7166 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7167 return t;
7169 case EQ_EXPR:
7170 case NE_EXPR:
7171 case LT_EXPR:
7172 case GT_EXPR:
7173 case LE_EXPR:
7174 case GE_EXPR:
7175 /* If one arg is a real or integer constant, put it last. */
7176 if (tree_swap_operands_p (arg0, arg1, true))
7177 return fold (build (swap_tree_comparison (code), type, arg1, arg0));
7179 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7181 tree targ0 = strip_float_extensions (arg0);
7182 tree targ1 = strip_float_extensions (arg1);
7183 tree newtype = TREE_TYPE (targ0);
7185 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7186 newtype = TREE_TYPE (targ1);
7188 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7189 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7190 return fold (build (code, type, fold_convert (newtype, targ0),
7191 fold_convert (newtype, targ1)));
7193 /* (-a) CMP (-b) -> b CMP a */
7194 if (TREE_CODE (arg0) == NEGATE_EXPR
7195 && TREE_CODE (arg1) == NEGATE_EXPR)
7196 return fold (build (code, type, TREE_OPERAND (arg1, 0),
7197 TREE_OPERAND (arg0, 0)));
7199 if (TREE_CODE (arg1) == REAL_CST)
7201 REAL_VALUE_TYPE cst;
7202 cst = TREE_REAL_CST (arg1);
7204 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7205 if (TREE_CODE (arg0) == NEGATE_EXPR)
7206 return
7207 fold (build (swap_tree_comparison (code), type,
7208 TREE_OPERAND (arg0, 0),
7209 build_real (TREE_TYPE (arg1),
7210 REAL_VALUE_NEGATE (cst))));
7212 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7213 /* a CMP (-0) -> a CMP 0 */
7214 if (REAL_VALUE_MINUS_ZERO (cst))
7215 return fold (build (code, type, arg0,
7216 build_real (TREE_TYPE (arg1), dconst0)));
7218 /* x != NaN is always true, other ops are always false. */
7219 if (REAL_VALUE_ISNAN (cst)
7220 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7222 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7223 return omit_one_operand (type, fold_convert (type, t), arg0);
7226 /* Fold comparisons against infinity. */
7227 if (REAL_VALUE_ISINF (cst))
7229 tem = fold_inf_compare (code, type, arg0, arg1);
7230 if (tem != NULL_TREE)
7231 return tem;
7235 /* If this is a comparison of a real constant with a PLUS_EXPR
7236 or a MINUS_EXPR of a real constant, we can convert it into a
7237 comparison with a revised real constant as long as no overflow
7238 occurs when unsafe_math_optimizations are enabled. */
7239 if (flag_unsafe_math_optimizations
7240 && TREE_CODE (arg1) == REAL_CST
7241 && (TREE_CODE (arg0) == PLUS_EXPR
7242 || TREE_CODE (arg0) == MINUS_EXPR)
7243 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7244 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7245 ? MINUS_EXPR : PLUS_EXPR,
7246 arg1, TREE_OPERAND (arg0, 1), 0))
7247 && ! TREE_CONSTANT_OVERFLOW (tem))
7248 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7250 /* Likewise, we can simplify a comparison of a real constant with
7251 a MINUS_EXPR whose first operand is also a real constant, i.e.
7252 (c1 - x) < c2 becomes x > c1-c2. */
7253 if (flag_unsafe_math_optimizations
7254 && TREE_CODE (arg1) == REAL_CST
7255 && TREE_CODE (arg0) == MINUS_EXPR
7256 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7257 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7258 arg1, 0))
7259 && ! TREE_CONSTANT_OVERFLOW (tem))
7260 return fold (build (swap_tree_comparison (code), type,
7261 TREE_OPERAND (arg0, 1), tem));
7263 /* Fold comparisons against built-in math functions. */
7264 if (TREE_CODE (arg1) == REAL_CST
7265 && flag_unsafe_math_optimizations
7266 && ! flag_errno_math)
7268 enum built_in_function fcode = builtin_mathfn_code (arg0);
7270 if (fcode != END_BUILTINS)
7272 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7273 if (tem != NULL_TREE)
7274 return tem;
7279 /* Convert foo++ == CONST into ++foo == CONST + INCR.
7280 First, see if one arg is constant; find the constant arg
7281 and the other one. */
7283 tree constop = 0, varop = NULL_TREE;
7284 int constopnum = -1;
7286 if (TREE_CONSTANT (arg1))
7287 constopnum = 1, constop = arg1, varop = arg0;
7288 if (TREE_CONSTANT (arg0))
7289 constopnum = 0, constop = arg0, varop = arg1;
7291 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
7293 /* This optimization is invalid for ordered comparisons
7294 if CONST+INCR overflows or if foo+incr might overflow.
7295 This optimization is invalid for floating point due to rounding.
7296 For pointer types we assume overflow doesn't happen. */
7297 if (POINTER_TYPE_P (TREE_TYPE (varop))
7298 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7299 && (code == EQ_EXPR || code == NE_EXPR)))
7301 tree newconst
7302 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
7303 constop, TREE_OPERAND (varop, 1)));
7305 /* Do not overwrite the current varop to be a preincrement,
7306 create a new node so that we won't confuse our caller who
7307 might create trees and throw them away, reusing the
7308 arguments that they passed to build. This shows up in
7309 the THEN or ELSE parts of ?: being postincrements. */
7310 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
7311 TREE_OPERAND (varop, 0),
7312 TREE_OPERAND (varop, 1));
7314 /* If VAROP is a reference to a bitfield, we must mask
7315 the constant by the width of the field. */
7316 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7317 && DECL_BIT_FIELD(TREE_OPERAND
7318 (TREE_OPERAND (varop, 0), 1)))
7320 int size
7321 = TREE_INT_CST_LOW (DECL_SIZE
7322 (TREE_OPERAND
7323 (TREE_OPERAND (varop, 0), 1)));
7324 tree mask, unsigned_type;
7325 unsigned int precision;
7326 tree folded_compare;
7328 /* First check whether the comparison would come out
7329 always the same. If we don't do that we would
7330 change the meaning with the masking. */
7331 if (constopnum == 0)
7332 folded_compare = fold (build (code, type, constop,
7333 TREE_OPERAND (varop, 0)));
7334 else
7335 folded_compare = fold (build (code, type,
7336 TREE_OPERAND (varop, 0),
7337 constop));
7338 if (integer_zerop (folded_compare)
7339 || integer_onep (folded_compare))
7340 return omit_one_operand (type, folded_compare, varop);
7342 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7343 precision = TYPE_PRECISION (unsigned_type);
7344 mask = build_int_2 (~0, ~0);
7345 TREE_TYPE (mask) = unsigned_type;
7346 force_fit_type (mask, 0);
7347 mask = const_binop (RSHIFT_EXPR, mask,
7348 size_int (precision - size), 0);
7349 newconst = fold (build (BIT_AND_EXPR,
7350 TREE_TYPE (varop), newconst,
7351 fold_convert (TREE_TYPE (varop),
7352 mask)));
7355 t = build (code, type,
7356 (constopnum == 0) ? newconst : varop,
7357 (constopnum == 1) ? newconst : varop);
7358 return t;
7361 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
7363 if (POINTER_TYPE_P (TREE_TYPE (varop))
7364 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7365 && (code == EQ_EXPR || code == NE_EXPR)))
7367 tree newconst
7368 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
7369 constop, TREE_OPERAND (varop, 1)));
7371 /* Do not overwrite the current varop to be a predecrement,
7372 create a new node so that we won't confuse our caller who
7373 might create trees and throw them away, reusing the
7374 arguments that they passed to build. This shows up in
7375 the THEN or ELSE parts of ?: being postdecrements. */
7376 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
7377 TREE_OPERAND (varop, 0),
7378 TREE_OPERAND (varop, 1));
7380 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7381 && DECL_BIT_FIELD(TREE_OPERAND
7382 (TREE_OPERAND (varop, 0), 1)))
7384 int size
7385 = TREE_INT_CST_LOW (DECL_SIZE
7386 (TREE_OPERAND
7387 (TREE_OPERAND (varop, 0), 1)));
7388 tree mask, unsigned_type;
7389 unsigned int precision;
7390 tree folded_compare;
7392 if (constopnum == 0)
7393 folded_compare = fold (build (code, type, constop,
7394 TREE_OPERAND (varop, 0)));
7395 else
7396 folded_compare = fold (build (code, type,
7397 TREE_OPERAND (varop, 0),
7398 constop));
7399 if (integer_zerop (folded_compare)
7400 || integer_onep (folded_compare))
7401 return omit_one_operand (type, folded_compare, varop);
7403 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7404 precision = TYPE_PRECISION (unsigned_type);
7405 mask = build_int_2 (~0, ~0);
7406 TREE_TYPE (mask) = TREE_TYPE (varop);
7407 force_fit_type (mask, 0);
7408 mask = const_binop (RSHIFT_EXPR, mask,
7409 size_int (precision - size), 0);
7410 newconst = fold (build (BIT_AND_EXPR,
7411 TREE_TYPE (varop), newconst,
7412 fold_convert (TREE_TYPE (varop),
7413 mask)));
7416 t = build (code, type,
7417 (constopnum == 0) ? newconst : varop,
7418 (constopnum == 1) ? newconst : varop);
7419 return t;
7424 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7425 This transformation affects the cases which are handled in later
7426 optimizations involving comparisons with non-negative constants. */
7427 if (TREE_CODE (arg1) == INTEGER_CST
7428 && TREE_CODE (arg0) != INTEGER_CST
7429 && tree_int_cst_sgn (arg1) > 0)
7431 switch (code)
7433 case GE_EXPR:
7434 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7435 return fold (build (GT_EXPR, type, arg0, arg1));
7437 case LT_EXPR:
7438 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7439 return fold (build (LE_EXPR, type, arg0, arg1));
7441 default:
7442 break;
7446 /* Comparisons with the highest or lowest possible integer of
7447 the specified size will have known values. */
7449 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7451 if (TREE_CODE (arg1) == INTEGER_CST
7452 && ! TREE_CONSTANT_OVERFLOW (arg1)
7453 && width <= HOST_BITS_PER_WIDE_INT
7454 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7455 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7457 unsigned HOST_WIDE_INT signed_max;
7458 unsigned HOST_WIDE_INT max, min;
7460 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7462 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7464 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7465 min = 0;
7467 else
7469 max = signed_max;
7470 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7473 if (TREE_INT_CST_HIGH (arg1) == 0
7474 && TREE_INT_CST_LOW (arg1) == max)
7475 switch (code)
7477 case GT_EXPR:
7478 return omit_one_operand (type,
7479 fold_convert (type,
7480 integer_zero_node),
7481 arg0);
7482 case GE_EXPR:
7483 return fold (build (EQ_EXPR, type, arg0, arg1));
7485 case LE_EXPR:
7486 return omit_one_operand (type,
7487 fold_convert (type,
7488 integer_one_node),
7489 arg0);
7490 case LT_EXPR:
7491 return fold (build (NE_EXPR, type, arg0, arg1));
7493 /* The GE_EXPR and LT_EXPR cases above are not normally
7494 reached because of previous transformations. */
7496 default:
7497 break;
7499 else if (TREE_INT_CST_HIGH (arg1) == 0
7500 && TREE_INT_CST_LOW (arg1) == max - 1)
7501 switch (code)
7503 case GT_EXPR:
7504 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7505 return fold (build (EQ_EXPR, type, arg0, arg1));
7506 case LE_EXPR:
7507 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7508 return fold (build (NE_EXPR, type, arg0, arg1));
7509 default:
7510 break;
7512 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7513 && TREE_INT_CST_LOW (arg1) == min)
7514 switch (code)
7516 case LT_EXPR:
7517 return omit_one_operand (type,
7518 fold_convert (type,
7519 integer_zero_node),
7520 arg0);
7521 case LE_EXPR:
7522 return fold (build (EQ_EXPR, type, arg0, arg1));
7524 case GE_EXPR:
7525 return omit_one_operand (type,
7526 fold_convert (type,
7527 integer_one_node),
7528 arg0);
7529 case GT_EXPR:
7530 return fold (build (NE_EXPR, type, arg0, arg1));
7532 default:
7533 break;
7535 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7536 && TREE_INT_CST_LOW (arg1) == min + 1)
7537 switch (code)
7539 case GE_EXPR:
7540 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7541 return fold (build (NE_EXPR, type, arg0, arg1));
7542 case LT_EXPR:
7543 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7544 return fold (build (EQ_EXPR, type, arg0, arg1));
7545 default:
7546 break;
7549 else if (TREE_INT_CST_HIGH (arg1) == 0
7550 && TREE_INT_CST_LOW (arg1) == signed_max
7551 && TREE_UNSIGNED (TREE_TYPE (arg1))
7552 /* signed_type does not work on pointer types. */
7553 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7555 /* The following case also applies to X < signed_max+1
7556 and X >= signed_max+1 because previous transformations. */
7557 if (code == LE_EXPR || code == GT_EXPR)
7559 tree st0, st1;
7560 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7561 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7562 return fold
7563 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7564 type, fold_convert (st0, arg0),
7565 fold_convert (st1, integer_zero_node)));
7571 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7572 a MINUS_EXPR of a constant, we can convert it into a comparison with
7573 a revised constant as long as no overflow occurs. */
7574 if ((code == EQ_EXPR || code == NE_EXPR)
7575 && TREE_CODE (arg1) == INTEGER_CST
7576 && (TREE_CODE (arg0) == PLUS_EXPR
7577 || TREE_CODE (arg0) == MINUS_EXPR)
7578 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7579 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7580 ? MINUS_EXPR : PLUS_EXPR,
7581 arg1, TREE_OPERAND (arg0, 1), 0))
7582 && ! TREE_CONSTANT_OVERFLOW (tem))
7583 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7585 /* Similarly for a NEGATE_EXPR. */
7586 else if ((code == EQ_EXPR || code == NE_EXPR)
7587 && TREE_CODE (arg0) == NEGATE_EXPR
7588 && TREE_CODE (arg1) == INTEGER_CST
7589 && 0 != (tem = negate_expr (arg1))
7590 && TREE_CODE (tem) == INTEGER_CST
7591 && ! TREE_CONSTANT_OVERFLOW (tem))
7592 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7594 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7595 for !=. Don't do this for ordered comparisons due to overflow. */
7596 else if ((code == NE_EXPR || code == EQ_EXPR)
7597 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7598 return fold (build (code, type,
7599 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7601 /* If we are widening one operand of an integer comparison,
7602 see if the other operand is similarly being widened. Perhaps we
7603 can do the comparison in the narrower type. */
7604 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7605 && TREE_CODE (arg0) == NOP_EXPR
7606 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7607 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7608 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7609 || (TREE_CODE (t1) == INTEGER_CST
7610 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7611 return fold (build (code, type, tem,
7612 fold_convert (TREE_TYPE (tem), t1)));
7614 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7615 constant, we can simplify it. */
7616 else if (TREE_CODE (arg1) == INTEGER_CST
7617 && (TREE_CODE (arg0) == MIN_EXPR
7618 || TREE_CODE (arg0) == MAX_EXPR)
7619 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7620 return optimize_minmax_comparison (t);
7622 /* If we are comparing an ABS_EXPR with a constant, we can
7623 convert all the cases into explicit comparisons, but they may
7624 well not be faster than doing the ABS and one comparison.
7625 But ABS (X) <= C is a range comparison, which becomes a subtraction
7626 and a comparison, and is probably faster. */
7627 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7628 && TREE_CODE (arg0) == ABS_EXPR
7629 && ! TREE_SIDE_EFFECTS (arg0)
7630 && (0 != (tem = negate_expr (arg1)))
7631 && TREE_CODE (tem) == INTEGER_CST
7632 && ! TREE_CONSTANT_OVERFLOW (tem))
7633 return fold (build (TRUTH_ANDIF_EXPR, type,
7634 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7635 build (LE_EXPR, type,
7636 TREE_OPERAND (arg0, 0), arg1)));
7638 /* If this is an EQ or NE comparison with zero and ARG0 is
7639 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7640 two operations, but the latter can be done in one less insn
7641 on machines that have only two-operand insns or on which a
7642 constant cannot be the first operand. */
7643 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7644 && TREE_CODE (arg0) == BIT_AND_EXPR)
7646 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7647 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7648 return
7649 fold (build (code, type,
7650 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7651 build (RSHIFT_EXPR,
7652 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7653 TREE_OPERAND (arg0, 1),
7654 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7655 fold_convert (TREE_TYPE (arg0),
7656 integer_one_node)),
7657 arg1));
7658 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7659 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7660 return
7661 fold (build (code, type,
7662 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7663 build (RSHIFT_EXPR,
7664 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7665 TREE_OPERAND (arg0, 0),
7666 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7667 fold_convert (TREE_TYPE (arg0),
7668 integer_one_node)),
7669 arg1));
7672 /* If this is an NE or EQ comparison of zero against the result of a
7673 signed MOD operation whose second operand is a power of 2, make
7674 the MOD operation unsigned since it is simpler and equivalent. */
7675 if ((code == NE_EXPR || code == EQ_EXPR)
7676 && integer_zerop (arg1)
7677 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7678 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7679 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7680 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7681 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7682 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7684 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7685 tree newmod = build (TREE_CODE (arg0), newtype,
7686 fold_convert (newtype,
7687 TREE_OPERAND (arg0, 0)),
7688 fold_convert (newtype,
7689 TREE_OPERAND (arg0, 1)));
7691 return build (code, type, newmod, fold_convert (newtype, arg1));
7694 /* If this is an NE comparison of zero with an AND of one, remove the
7695 comparison since the AND will give the correct value. */
7696 if (code == NE_EXPR && integer_zerop (arg1)
7697 && TREE_CODE (arg0) == BIT_AND_EXPR
7698 && integer_onep (TREE_OPERAND (arg0, 1)))
7699 return fold_convert (type, arg0);
7701 /* If we have (A & C) == C where C is a power of 2, convert this into
7702 (A & C) != 0. Similarly for NE_EXPR. */
7703 if ((code == EQ_EXPR || code == NE_EXPR)
7704 && TREE_CODE (arg0) == BIT_AND_EXPR
7705 && integer_pow2p (TREE_OPERAND (arg0, 1))
7706 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7707 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7708 arg0, integer_zero_node));
7710 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7711 2, then fold the expression into shifts and logical operations. */
7712 tem = fold_single_bit_test (code, arg0, arg1, type);
7713 if (tem)
7714 return tem;
7716 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7717 Similarly for NE_EXPR. */
7718 if ((code == EQ_EXPR || code == NE_EXPR)
7719 && TREE_CODE (arg0) == BIT_AND_EXPR
7720 && TREE_CODE (arg1) == INTEGER_CST
7721 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7723 tree dandnotc
7724 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7725 arg1, build1 (BIT_NOT_EXPR,
7726 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7727 TREE_OPERAND (arg0, 1))));
7728 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7729 if (integer_nonzerop (dandnotc))
7730 return omit_one_operand (type, rslt, arg0);
7733 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7734 Similarly for NE_EXPR. */
7735 if ((code == EQ_EXPR || code == NE_EXPR)
7736 && TREE_CODE (arg0) == BIT_IOR_EXPR
7737 && TREE_CODE (arg1) == INTEGER_CST
7738 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7740 tree candnotd
7741 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7742 TREE_OPERAND (arg0, 1),
7743 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7744 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7745 if (integer_nonzerop (candnotd))
7746 return omit_one_operand (type, rslt, arg0);
7749 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7750 and similarly for >= into !=. */
7751 if ((code == LT_EXPR || code == GE_EXPR)
7752 && TREE_UNSIGNED (TREE_TYPE (arg0))
7753 && TREE_CODE (arg1) == LSHIFT_EXPR
7754 && integer_onep (TREE_OPERAND (arg1, 0)))
7755 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7756 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7757 TREE_OPERAND (arg1, 1)),
7758 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7760 else if ((code == LT_EXPR || code == GE_EXPR)
7761 && TREE_UNSIGNED (TREE_TYPE (arg0))
7762 && (TREE_CODE (arg1) == NOP_EXPR
7763 || TREE_CODE (arg1) == CONVERT_EXPR)
7764 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7765 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7766 return
7767 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7768 fold_convert (TREE_TYPE (arg0),
7769 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7770 TREE_OPERAND (TREE_OPERAND (arg1, 0),
7771 1))),
7772 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7774 /* Simplify comparison of something with itself. (For IEEE
7775 floating-point, we can only do some of these simplifications.) */
7776 if (operand_equal_p (arg0, arg1, 0))
7778 switch (code)
7780 case EQ_EXPR:
7781 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7782 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7783 return constant_boolean_node (1, type);
7784 break;
7786 case GE_EXPR:
7787 case LE_EXPR:
7788 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7789 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7790 return constant_boolean_node (1, type);
7791 return fold (build (EQ_EXPR, type, arg0, arg1));
7793 case NE_EXPR:
7794 /* For NE, we can only do this simplification if integer
7795 or we don't honor IEEE floating point NaNs. */
7796 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7797 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7798 break;
7799 /* ... fall through ... */
7800 case GT_EXPR:
7801 case LT_EXPR:
7802 return constant_boolean_node (0, type);
7803 default:
7804 abort ();
7808 /* If we are comparing an expression that just has comparisons
7809 of two integer values, arithmetic expressions of those comparisons,
7810 and constants, we can simplify it. There are only three cases
7811 to check: the two values can either be equal, the first can be
7812 greater, or the second can be greater. Fold the expression for
7813 those three values. Since each value must be 0 or 1, we have
7814 eight possibilities, each of which corresponds to the constant 0
7815 or 1 or one of the six possible comparisons.
7817 This handles common cases like (a > b) == 0 but also handles
7818 expressions like ((x > y) - (y > x)) > 0, which supposedly
7819 occur in macroized code. */
7821 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7823 tree cval1 = 0, cval2 = 0;
7824 int save_p = 0;
7826 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7827 /* Don't handle degenerate cases here; they should already
7828 have been handled anyway. */
7829 && cval1 != 0 && cval2 != 0
7830 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7831 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7832 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7833 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7834 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7835 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7836 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7838 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7839 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7841 /* We can't just pass T to eval_subst in case cval1 or cval2
7842 was the same as ARG1. */
7844 tree high_result
7845 = fold (build (code, type,
7846 eval_subst (arg0, cval1, maxval, cval2, minval),
7847 arg1));
7848 tree equal_result
7849 = fold (build (code, type,
7850 eval_subst (arg0, cval1, maxval, cval2, maxval),
7851 arg1));
7852 tree low_result
7853 = fold (build (code, type,
7854 eval_subst (arg0, cval1, minval, cval2, maxval),
7855 arg1));
7857 /* All three of these results should be 0 or 1. Confirm they
7858 are. Then use those values to select the proper code
7859 to use. */
7861 if ((integer_zerop (high_result)
7862 || integer_onep (high_result))
7863 && (integer_zerop (equal_result)
7864 || integer_onep (equal_result))
7865 && (integer_zerop (low_result)
7866 || integer_onep (low_result)))
7868 /* Make a 3-bit mask with the high-order bit being the
7869 value for `>', the next for '=', and the low for '<'. */
7870 switch ((integer_onep (high_result) * 4)
7871 + (integer_onep (equal_result) * 2)
7872 + integer_onep (low_result))
7874 case 0:
7875 /* Always false. */
7876 return omit_one_operand (type, integer_zero_node, arg0);
7877 case 1:
7878 code = LT_EXPR;
7879 break;
7880 case 2:
7881 code = EQ_EXPR;
7882 break;
7883 case 3:
7884 code = LE_EXPR;
7885 break;
7886 case 4:
7887 code = GT_EXPR;
7888 break;
7889 case 5:
7890 code = NE_EXPR;
7891 break;
7892 case 6:
7893 code = GE_EXPR;
7894 break;
7895 case 7:
7896 /* Always true. */
7897 return omit_one_operand (type, integer_one_node, arg0);
7900 t = build (code, type, cval1, cval2);
7901 if (save_p)
7902 return save_expr (t);
7903 else
7904 return fold (t);
7909 /* If this is a comparison of a field, we may be able to simplify it. */
7910 if (((TREE_CODE (arg0) == COMPONENT_REF
7911 && (*lang_hooks.can_use_bit_fields_p) ())
7912 || TREE_CODE (arg0) == BIT_FIELD_REF)
7913 && (code == EQ_EXPR || code == NE_EXPR)
7914 /* Handle the constant case even without -O
7915 to make sure the warnings are given. */
7916 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7918 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7919 if (t1)
7920 return t1;
7923 /* If this is a comparison of complex values and either or both sides
7924 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7925 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7926 This may prevent needless evaluations. */
7927 if ((code == EQ_EXPR || code == NE_EXPR)
7928 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7929 && (TREE_CODE (arg0) == COMPLEX_EXPR
7930 || TREE_CODE (arg1) == COMPLEX_EXPR
7931 || TREE_CODE (arg0) == COMPLEX_CST
7932 || TREE_CODE (arg1) == COMPLEX_CST))
7934 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7935 tree real0, imag0, real1, imag1;
7937 arg0 = save_expr (arg0);
7938 arg1 = save_expr (arg1);
7939 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7940 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7941 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7942 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7944 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7945 : TRUTH_ORIF_EXPR),
7946 type,
7947 fold (build (code, type, real0, real1)),
7948 fold (build (code, type, imag0, imag1))));
7951 /* Optimize comparisons of strlen vs zero to a compare of the
7952 first character of the string vs zero. To wit,
7953 strlen(ptr) == 0 => *ptr == 0
7954 strlen(ptr) != 0 => *ptr != 0
7955 Other cases should reduce to one of these two (or a constant)
7956 due to the return value of strlen being unsigned. */
7957 if ((code == EQ_EXPR || code == NE_EXPR)
7958 && integer_zerop (arg1)
7959 && TREE_CODE (arg0) == CALL_EXPR)
7961 tree fndecl = get_callee_fndecl (arg0);
7962 tree arglist;
7964 if (fndecl
7965 && DECL_BUILT_IN (fndecl)
7966 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7967 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7968 && (arglist = TREE_OPERAND (arg0, 1))
7969 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7970 && ! TREE_CHAIN (arglist))
7971 return fold (build (code, type,
7972 build1 (INDIRECT_REF, char_type_node,
7973 TREE_VALUE(arglist)),
7974 integer_zero_node));
7977 /* From here on, the only cases we handle are when the result is
7978 known to be a constant.
7980 To compute GT, swap the arguments and do LT.
7981 To compute GE, do LT and invert the result.
7982 To compute LE, swap the arguments, do LT and invert the result.
7983 To compute NE, do EQ and invert the result.
7985 Therefore, the code below must handle only EQ and LT. */
7987 if (code == LE_EXPR || code == GT_EXPR)
7989 tem = arg0, arg0 = arg1, arg1 = tem;
7990 code = swap_tree_comparison (code);
7993 /* Note that it is safe to invert for real values here because we
7994 will check below in the one case that it matters. */
7996 t1 = NULL_TREE;
7997 invert = 0;
7998 if (code == NE_EXPR || code == GE_EXPR)
8000 invert = 1;
8001 code = invert_tree_comparison (code);
8004 /* Compute a result for LT or EQ if args permit;
8005 otherwise return T. */
8006 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8008 if (code == EQ_EXPR)
8009 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
8010 else
8011 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
8012 ? INT_CST_LT_UNSIGNED (arg0, arg1)
8013 : INT_CST_LT (arg0, arg1)),
8017 #if 0 /* This is no longer useful, but breaks some real code. */
8018 /* Assume a nonexplicit constant cannot equal an explicit one,
8019 since such code would be undefined anyway.
8020 Exception: on sysvr4, using #pragma weak,
8021 a label can come out as 0. */
8022 else if (TREE_CODE (arg1) == INTEGER_CST
8023 && !integer_zerop (arg1)
8024 && TREE_CONSTANT (arg0)
8025 && TREE_CODE (arg0) == ADDR_EXPR
8026 && code == EQ_EXPR)
8027 t1 = build_int_2 (0, 0);
8028 #endif
8029 /* Two real constants can be compared explicitly. */
8030 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8032 /* If either operand is a NaN, the result is false with two
8033 exceptions: First, an NE_EXPR is true on NaNs, but that case
8034 is already handled correctly since we will be inverting the
8035 result for NE_EXPR. Second, if we had inverted a LE_EXPR
8036 or a GE_EXPR into a LT_EXPR, we must return true so that it
8037 will be inverted into false. */
8039 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8040 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
8041 t1 = build_int_2 (invert && code == LT_EXPR, 0);
8043 else if (code == EQ_EXPR)
8044 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
8045 TREE_REAL_CST (arg1)),
8047 else
8048 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
8049 TREE_REAL_CST (arg1)),
8053 if (t1 == NULL_TREE)
8054 return t;
8056 if (invert)
8057 TREE_INT_CST_LOW (t1) ^= 1;
8059 TREE_TYPE (t1) = type;
8060 if (TREE_CODE (type) == BOOLEAN_TYPE)
8061 return (*lang_hooks.truthvalue_conversion) (t1);
8062 return t1;
8064 case COND_EXPR:
8065 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8066 so all simple results must be passed through pedantic_non_lvalue. */
8067 if (TREE_CODE (arg0) == INTEGER_CST)
8069 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8070 /* Only optimize constant conditions when the selected branch
8071 has the same type as the COND_EXPR. This avoids optimizing
8072 away "c ? x : throw", where the throw has a void type. */
8073 if (! VOID_TYPE_P (TREE_TYPE (tem))
8074 || VOID_TYPE_P (TREE_TYPE (t)))
8075 return pedantic_non_lvalue (tem);
8076 return t;
8078 if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
8079 return pedantic_omit_one_operand (type, arg1, arg0);
8081 /* If we have A op B ? A : C, we may be able to convert this to a
8082 simpler expression, depending on the operation and the values
8083 of B and C. Signed zeros prevent all of these transformations,
8084 for reasons given above each one. */
8086 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8087 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8088 arg1, TREE_OPERAND (arg0, 1))
8089 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8091 tree arg2 = TREE_OPERAND (t, 2);
8092 enum tree_code comp_code = TREE_CODE (arg0);
8094 STRIP_NOPS (arg2);
8096 /* If we have A op 0 ? A : -A, consider applying the following
8097 transformations:
8099 A == 0? A : -A same as -A
8100 A != 0? A : -A same as A
8101 A >= 0? A : -A same as abs (A)
8102 A > 0? A : -A same as abs (A)
8103 A <= 0? A : -A same as -abs (A)
8104 A < 0? A : -A same as -abs (A)
8106 None of these transformations work for modes with signed
8107 zeros. If A is +/-0, the first two transformations will
8108 change the sign of the result (from +0 to -0, or vice
8109 versa). The last four will fix the sign of the result,
8110 even though the original expressions could be positive or
8111 negative, depending on the sign of A.
8113 Note that all these transformations are correct if A is
8114 NaN, since the two alternatives (A and -A) are also NaNs. */
8115 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
8116 ? real_zerop (TREE_OPERAND (arg0, 1))
8117 : integer_zerop (TREE_OPERAND (arg0, 1)))
8118 && TREE_CODE (arg2) == NEGATE_EXPR
8119 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
8120 switch (comp_code)
8122 case EQ_EXPR:
8123 tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
8124 tem = fold_convert (type, negate_expr (tem));
8125 return pedantic_non_lvalue (tem);
8126 case NE_EXPR:
8127 return pedantic_non_lvalue (fold_convert (type, arg1));
8128 case GE_EXPR:
8129 case GT_EXPR:
8130 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8131 arg1 = fold_convert ((*lang_hooks.types.signed_type)
8132 (TREE_TYPE (arg1)), arg1);
8133 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8134 return pedantic_non_lvalue (fold_convert (type, arg1));
8135 case LE_EXPR:
8136 case LT_EXPR:
8137 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8138 arg1 = fold_convert ((lang_hooks.types.signed_type)
8139 (TREE_TYPE (arg1)), arg1);
8140 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8141 arg1 = negate_expr (fold_convert (type, arg1));
8142 return pedantic_non_lvalue (arg1);
8143 default:
8144 abort ();
8147 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
8148 A == 0 ? A : 0 is always 0 unless A is -0. Note that
8149 both transformations are correct when A is NaN: A != 0
8150 is then true, and A == 0 is false. */
8152 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
8154 if (comp_code == NE_EXPR)
8155 return pedantic_non_lvalue (fold_convert (type, arg1));
8156 else if (comp_code == EQ_EXPR)
8157 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8160 /* Try some transformations of A op B ? A : B.
8162 A == B? A : B same as B
8163 A != B? A : B same as A
8164 A >= B? A : B same as max (A, B)
8165 A > B? A : B same as max (B, A)
8166 A <= B? A : B same as min (A, B)
8167 A < B? A : B same as min (B, A)
8169 As above, these transformations don't work in the presence
8170 of signed zeros. For example, if A and B are zeros of
8171 opposite sign, the first two transformations will change
8172 the sign of the result. In the last four, the original
8173 expressions give different results for (A=+0, B=-0) and
8174 (A=-0, B=+0), but the transformed expressions do not.
8176 The first two transformations are correct if either A or B
8177 is a NaN. In the first transformation, the condition will
8178 be false, and B will indeed be chosen. In the case of the
8179 second transformation, the condition A != B will be true,
8180 and A will be chosen.
8182 The conversions to max() and min() are not correct if B is
8183 a number and A is not. The conditions in the original
8184 expressions will be false, so all four give B. The min()
8185 and max() versions would give a NaN instead. */
8186 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8187 arg2, TREE_OPERAND (arg0, 0)))
8189 tree comp_op0 = TREE_OPERAND (arg0, 0);
8190 tree comp_op1 = TREE_OPERAND (arg0, 1);
8191 tree comp_type = TREE_TYPE (comp_op0);
8193 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8194 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8196 comp_type = type;
8197 comp_op0 = arg1;
8198 comp_op1 = arg2;
8201 switch (comp_code)
8203 case EQ_EXPR:
8204 return pedantic_non_lvalue (fold_convert (type, arg2));
8205 case NE_EXPR:
8206 return pedantic_non_lvalue (fold_convert (type, arg1));
8207 case LE_EXPR:
8208 case LT_EXPR:
8209 /* In C++ a ?: expression can be an lvalue, so put the
8210 operand which will be used if they are equal first
8211 so that we can convert this back to the
8212 corresponding COND_EXPR. */
8213 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8214 return pedantic_non_lvalue (fold_convert
8215 (type, fold (build (MIN_EXPR, comp_type,
8216 (comp_code == LE_EXPR
8217 ? comp_op0 : comp_op1),
8218 (comp_code == LE_EXPR
8219 ? comp_op1 : comp_op0)))));
8220 break;
8221 case GE_EXPR:
8222 case GT_EXPR:
8223 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8224 return pedantic_non_lvalue (fold_convert
8225 (type, fold (build (MAX_EXPR, comp_type,
8226 (comp_code == GE_EXPR
8227 ? comp_op0 : comp_op1),
8228 (comp_code == GE_EXPR
8229 ? comp_op1 : comp_op0)))));
8230 break;
8231 default:
8232 abort ();
8236 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8237 we might still be able to simplify this. For example,
8238 if C1 is one less or one more than C2, this might have started
8239 out as a MIN or MAX and been transformed by this function.
8240 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8242 if (INTEGRAL_TYPE_P (type)
8243 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8244 && TREE_CODE (arg2) == INTEGER_CST)
8245 switch (comp_code)
8247 case EQ_EXPR:
8248 /* We can replace A with C1 in this case. */
8249 arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8250 return fold (build (code, type, TREE_OPERAND (t, 0), arg1,
8251 TREE_OPERAND (t, 2)));
8253 case LT_EXPR:
8254 /* If C1 is C2 + 1, this is min(A, C2). */
8255 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8256 && operand_equal_p (TREE_OPERAND (arg0, 1),
8257 const_binop (PLUS_EXPR, arg2,
8258 integer_one_node, 0), 1))
8259 return pedantic_non_lvalue
8260 (fold (build (MIN_EXPR, type, arg1, arg2)));
8261 break;
8263 case LE_EXPR:
8264 /* If C1 is C2 - 1, this is min(A, C2). */
8265 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8266 && operand_equal_p (TREE_OPERAND (arg0, 1),
8267 const_binop (MINUS_EXPR, arg2,
8268 integer_one_node, 0), 1))
8269 return pedantic_non_lvalue
8270 (fold (build (MIN_EXPR, type, arg1, arg2)));
8271 break;
8273 case GT_EXPR:
8274 /* If C1 is C2 - 1, this is max(A, C2). */
8275 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8276 && operand_equal_p (TREE_OPERAND (arg0, 1),
8277 const_binop (MINUS_EXPR, arg2,
8278 integer_one_node, 0), 1))
8279 return pedantic_non_lvalue
8280 (fold (build (MAX_EXPR, type, arg1, arg2)));
8281 break;
8283 case GE_EXPR:
8284 /* If C1 is C2 + 1, this is max(A, C2). */
8285 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8286 && operand_equal_p (TREE_OPERAND (arg0, 1),
8287 const_binop (PLUS_EXPR, arg2,
8288 integer_one_node, 0), 1))
8289 return pedantic_non_lvalue
8290 (fold (build (MAX_EXPR, type, arg1, arg2)));
8291 break;
8292 case NE_EXPR:
8293 break;
8294 default:
8295 abort ();
8299 /* If the second operand is simpler than the third, swap them
8300 since that produces better jump optimization results. */
8301 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8302 TREE_OPERAND (t, 2), false))
8304 /* See if this can be inverted. If it can't, possibly because
8305 it was a floating-point inequality comparison, don't do
8306 anything. */
8307 tem = invert_truthvalue (arg0);
8309 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8310 return fold (build (code, type, tem,
8311 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8314 /* Convert A ? 1 : 0 to simply A. */
8315 if (integer_onep (TREE_OPERAND (t, 1))
8316 && integer_zerop (TREE_OPERAND (t, 2))
8317 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8318 call to fold will try to move the conversion inside
8319 a COND, which will recurse. In that case, the COND_EXPR
8320 is probably the best choice, so leave it alone. */
8321 && type == TREE_TYPE (arg0))
8322 return pedantic_non_lvalue (arg0);
8324 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8325 over COND_EXPR in cases such as floating point comparisons. */
8326 if (integer_zerop (TREE_OPERAND (t, 1))
8327 && integer_onep (TREE_OPERAND (t, 2))
8328 && truth_value_p (TREE_CODE (arg0)))
8329 return pedantic_non_lvalue (fold_convert (type,
8330 invert_truthvalue (arg0)));
8332 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8333 operation is simply A & 2. */
8335 if (integer_zerop (TREE_OPERAND (t, 2))
8336 && TREE_CODE (arg0) == NE_EXPR
8337 && integer_zerop (TREE_OPERAND (arg0, 1))
8338 && integer_pow2p (arg1)
8339 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8340 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8341 arg1, 1))
8342 return pedantic_non_lvalue (fold_convert (type,
8343 TREE_OPERAND (arg0, 0)));
8345 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8346 if (integer_zerop (TREE_OPERAND (t, 2))
8347 && truth_value_p (TREE_CODE (arg0))
8348 && truth_value_p (TREE_CODE (arg1)))
8349 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8350 arg0, arg1)));
8352 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8353 if (integer_onep (TREE_OPERAND (t, 2))
8354 && truth_value_p (TREE_CODE (arg0))
8355 && truth_value_p (TREE_CODE (arg1)))
8357 /* Only perform transformation if ARG0 is easily inverted. */
8358 tem = invert_truthvalue (arg0);
8359 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8360 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8361 tem, arg1)));
8364 return t;
8366 case COMPOUND_EXPR:
8367 /* When pedantic, a compound expression can be neither an lvalue
8368 nor an integer constant expression. */
8369 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8370 return t;
8371 /* Don't let (0, 0) be null pointer constant. */
8372 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8373 : fold_convert (type, arg1);
8374 return pedantic_non_lvalue (tem);
8376 case COMPLEX_EXPR:
8377 if (wins)
8378 return build_complex (type, arg0, arg1);
8379 return t;
8381 case REALPART_EXPR:
8382 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8383 return t;
8384 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8385 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8386 TREE_OPERAND (arg0, 1));
8387 else if (TREE_CODE (arg0) == COMPLEX_CST)
8388 return TREE_REALPART (arg0);
8389 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8390 return fold (build (TREE_CODE (arg0), type,
8391 fold (build1 (REALPART_EXPR, type,
8392 TREE_OPERAND (arg0, 0))),
8393 fold (build1 (REALPART_EXPR,
8394 type, TREE_OPERAND (arg0, 1)))));
8395 return t;
8397 case IMAGPART_EXPR:
8398 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8399 return fold_convert (type, integer_zero_node);
8400 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8401 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8402 TREE_OPERAND (arg0, 0));
8403 else if (TREE_CODE (arg0) == COMPLEX_CST)
8404 return TREE_IMAGPART (arg0);
8405 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8406 return fold (build (TREE_CODE (arg0), type,
8407 fold (build1 (IMAGPART_EXPR, type,
8408 TREE_OPERAND (arg0, 0))),
8409 fold (build1 (IMAGPART_EXPR, type,
8410 TREE_OPERAND (arg0, 1)))));
8411 return t;
8413 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8414 appropriate. */
8415 case CLEANUP_POINT_EXPR:
8416 if (! has_cleanups (arg0))
8417 return TREE_OPERAND (t, 0);
8420 enum tree_code code0 = TREE_CODE (arg0);
8421 int kind0 = TREE_CODE_CLASS (code0);
8422 tree arg00 = TREE_OPERAND (arg0, 0);
8423 tree arg01;
8425 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8426 return fold (build1 (code0, type,
8427 fold (build1 (CLEANUP_POINT_EXPR,
8428 TREE_TYPE (arg00), arg00))));
8430 if (kind0 == '<' || kind0 == '2'
8431 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8432 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8433 || code0 == TRUTH_XOR_EXPR)
8435 arg01 = TREE_OPERAND (arg0, 1);
8437 if (TREE_CONSTANT (arg00)
8438 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8439 && ! has_cleanups (arg00)))
8440 return fold (build (code0, type, arg00,
8441 fold (build1 (CLEANUP_POINT_EXPR,
8442 TREE_TYPE (arg01), arg01))));
8444 if (TREE_CONSTANT (arg01))
8445 return fold (build (code0, type,
8446 fold (build1 (CLEANUP_POINT_EXPR,
8447 TREE_TYPE (arg00), arg00)),
8448 arg01));
8451 return t;
8454 case CALL_EXPR:
8455 /* Check for a built-in function. */
8456 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8457 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8458 == FUNCTION_DECL)
8459 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8461 tree tmp = fold_builtin (expr);
8462 if (tmp)
8463 return tmp;
8465 return t;
8467 default:
8468 return t;
8469 } /* switch (code) */
8472 #ifdef ENABLE_FOLD_CHECKING
8473 #undef fold
8475 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8476 static void fold_check_failed (tree, tree);
8477 void print_fold_checksum (tree);
8479 /* When --enable-checking=fold, compute a digest of expr before
8480 and after actual fold call to see if fold did not accidentally
8481 change original expr. */
8483 tree
8484 fold (tree expr)
8486 tree ret;
8487 struct md5_ctx ctx;
8488 unsigned char checksum_before[16], checksum_after[16];
8489 htab_t ht;
8491 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8492 md5_init_ctx (&ctx);
8493 fold_checksum_tree (expr, &ctx, ht);
8494 md5_finish_ctx (&ctx, checksum_before);
8495 htab_empty (ht);
8497 ret = fold_1 (expr);
8499 md5_init_ctx (&ctx);
8500 fold_checksum_tree (expr, &ctx, ht);
8501 md5_finish_ctx (&ctx, checksum_after);
8502 htab_delete (ht);
8504 if (memcmp (checksum_before, checksum_after, 16))
8505 fold_check_failed (expr, ret);
8507 return ret;
8510 void
8511 print_fold_checksum (tree expr)
8513 struct md5_ctx ctx;
8514 unsigned char checksum[16], cnt;
8515 htab_t ht;
8517 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8518 md5_init_ctx (&ctx);
8519 fold_checksum_tree (expr, &ctx, ht);
8520 md5_finish_ctx (&ctx, checksum);
8521 htab_delete (ht);
8522 for (cnt = 0; cnt < 16; ++cnt)
8523 fprintf (stderr, "%02x", checksum[cnt]);
8524 putc ('\n', stderr);
8527 static void
8528 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8530 internal_error ("fold check: original tree changed by fold");
8533 static void
8534 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8536 void **slot;
8537 enum tree_code code;
8538 char buf[sizeof (struct tree_decl)];
8539 int i, len;
8541 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8542 > sizeof (struct tree_decl)
8543 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8544 abort ();
8545 if (expr == NULL)
8546 return;
8547 slot = htab_find_slot (ht, expr, INSERT);
8548 if (*slot != NULL)
8549 return;
8550 *slot = expr;
8551 code = TREE_CODE (expr);
8552 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8554 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8555 memcpy (buf, expr, tree_size (expr));
8556 expr = (tree) buf;
8557 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8559 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8561 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8562 memcpy (buf, expr, tree_size (expr));
8563 expr = (tree) buf;
8564 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8566 else if (TREE_CODE_CLASS (code) == 't'
8567 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8569 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8570 memcpy (buf, expr, tree_size (expr));
8571 expr = (tree) buf;
8572 TYPE_POINTER_TO (expr) = NULL;
8573 TYPE_REFERENCE_TO (expr) = NULL;
8575 md5_process_bytes (expr, tree_size (expr), ctx);
8576 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8577 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8578 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8579 len = TREE_CODE_LENGTH (code);
8580 switch (TREE_CODE_CLASS (code))
8582 case 'c':
8583 switch (code)
8585 case STRING_CST:
8586 md5_process_bytes (TREE_STRING_POINTER (expr),
8587 TREE_STRING_LENGTH (expr), ctx);
8588 break;
8589 case COMPLEX_CST:
8590 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8591 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8592 break;
8593 case VECTOR_CST:
8594 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8595 break;
8596 default:
8597 break;
8599 break;
8600 case 'x':
8601 switch (code)
8603 case TREE_LIST:
8604 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8605 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8606 break;
8607 case TREE_VEC:
8608 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8609 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8610 break;
8611 default:
8612 break;
8614 break;
8615 case 'e':
8616 switch (code)
8618 case SAVE_EXPR: len = 2; break;
8619 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8620 case RTL_EXPR: len = 0; break;
8621 case WITH_CLEANUP_EXPR: len = 2; break;
8622 default: break;
8624 /* Fall through. */
8625 case 'r':
8626 case '<':
8627 case '1':
8628 case '2':
8629 case 's':
8630 for (i = 0; i < len; ++i)
8631 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8632 break;
8633 case 'd':
8634 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8635 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8636 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8637 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8638 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8639 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8640 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8641 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8642 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8643 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8644 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8645 break;
8646 case 't':
8647 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8648 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8649 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8650 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8651 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8652 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8653 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8654 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8655 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8656 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8657 break;
8658 default:
8659 break;
8663 #endif
8665 /* Perform constant folding and related simplification of initializer
8666 expression EXPR. This behaves identically to "fold" but ignores
8667 potential run-time traps and exceptions that fold must preserve. */
8669 tree
8670 fold_initializer (tree expr)
8672 int saved_signaling_nans = flag_signaling_nans;
8673 int saved_trapping_math = flag_trapping_math;
8674 int saved_trapv = flag_trapv;
8675 tree result;
8677 flag_signaling_nans = 0;
8678 flag_trapping_math = 0;
8679 flag_trapv = 0;
8681 result = fold (expr);
8683 flag_signaling_nans = saved_signaling_nans;
8684 flag_trapping_math = saved_trapping_math;
8685 flag_trapv = saved_trapv;
8687 return result;
8690 /* Determine if first argument is a multiple of second argument. Return 0 if
8691 it is not, or we cannot easily determined it to be.
8693 An example of the sort of thing we care about (at this point; this routine
8694 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8695 fold cases do now) is discovering that
8697 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8699 is a multiple of
8701 SAVE_EXPR (J * 8)
8703 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8705 This code also handles discovering that
8707 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8709 is a multiple of 8 so we don't have to worry about dealing with a
8710 possible remainder.
8712 Note that we *look* inside a SAVE_EXPR only to determine how it was
8713 calculated; it is not safe for fold to do much of anything else with the
8714 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8715 at run time. For example, the latter example above *cannot* be implemented
8716 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8717 evaluation time of the original SAVE_EXPR is not necessarily the same at
8718 the time the new expression is evaluated. The only optimization of this
8719 sort that would be valid is changing
8721 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8723 divided by 8 to
8725 SAVE_EXPR (I) * SAVE_EXPR (J)
8727 (where the same SAVE_EXPR (J) is used in the original and the
8728 transformed version). */
8730 static int
8731 multiple_of_p (tree type, tree top, tree bottom)
8733 if (operand_equal_p (top, bottom, 0))
8734 return 1;
8736 if (TREE_CODE (type) != INTEGER_TYPE)
8737 return 0;
8739 switch (TREE_CODE (top))
8741 case MULT_EXPR:
8742 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8743 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8745 case PLUS_EXPR:
8746 case MINUS_EXPR:
8747 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8748 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8750 case LSHIFT_EXPR:
8751 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8753 tree op1, t1;
8755 op1 = TREE_OPERAND (top, 1);
8756 /* const_binop may not detect overflow correctly,
8757 so check for it explicitly here. */
8758 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8759 > TREE_INT_CST_LOW (op1)
8760 && TREE_INT_CST_HIGH (op1) == 0
8761 && 0 != (t1 = fold_convert (type,
8762 const_binop (LSHIFT_EXPR,
8763 size_one_node,
8764 op1, 0)))
8765 && ! TREE_OVERFLOW (t1))
8766 return multiple_of_p (type, t1, bottom);
8768 return 0;
8770 case NOP_EXPR:
8771 /* Can't handle conversions from non-integral or wider integral type. */
8772 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8773 || (TYPE_PRECISION (type)
8774 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8775 return 0;
8777 /* .. fall through ... */
8779 case SAVE_EXPR:
8780 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8782 case INTEGER_CST:
8783 if (TREE_CODE (bottom) != INTEGER_CST
8784 || (TREE_UNSIGNED (type)
8785 && (tree_int_cst_sgn (top) < 0
8786 || tree_int_cst_sgn (bottom) < 0)))
8787 return 0;
8788 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8789 top, bottom, 0));
8791 default:
8792 return 0;
8796 /* Return true if `t' is known to be non-negative. */
8799 tree_expr_nonnegative_p (tree t)
8801 switch (TREE_CODE (t))
8803 case ABS_EXPR:
8804 return 1;
8806 case INTEGER_CST:
8807 return tree_int_cst_sgn (t) >= 0;
8809 case REAL_CST:
8810 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8812 case PLUS_EXPR:
8813 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8814 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8815 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8817 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8818 both unsigned and at least 2 bits shorter than the result. */
8819 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8820 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8821 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8823 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8824 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8825 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8826 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8828 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8829 TYPE_PRECISION (inner2)) + 1;
8830 return prec < TYPE_PRECISION (TREE_TYPE (t));
8833 break;
8835 case MULT_EXPR:
8836 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8838 /* x * x for floating point x is always non-negative. */
8839 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8840 return 1;
8841 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8842 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8845 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8846 both unsigned and their total bits is shorter than the result. */
8847 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8848 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8849 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8851 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8852 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8853 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8854 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8855 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8856 < TYPE_PRECISION (TREE_TYPE (t));
8858 return 0;
8860 case TRUNC_DIV_EXPR:
8861 case CEIL_DIV_EXPR:
8862 case FLOOR_DIV_EXPR:
8863 case ROUND_DIV_EXPR:
8864 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8865 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8867 case TRUNC_MOD_EXPR:
8868 case CEIL_MOD_EXPR:
8869 case FLOOR_MOD_EXPR:
8870 case ROUND_MOD_EXPR:
8871 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8873 case RDIV_EXPR:
8874 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8875 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8877 case NOP_EXPR:
8879 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8880 tree outer_type = TREE_TYPE (t);
8882 if (TREE_CODE (outer_type) == REAL_TYPE)
8884 if (TREE_CODE (inner_type) == REAL_TYPE)
8885 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8886 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8888 if (TREE_UNSIGNED (inner_type))
8889 return 1;
8890 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8893 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8895 if (TREE_CODE (inner_type) == REAL_TYPE)
8896 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8897 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8898 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8899 && TREE_UNSIGNED (inner_type);
8902 break;
8904 case COND_EXPR:
8905 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8906 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8907 case COMPOUND_EXPR:
8908 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8909 case MIN_EXPR:
8910 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8911 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8912 case MAX_EXPR:
8913 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8914 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8915 case MODIFY_EXPR:
8916 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8917 case BIND_EXPR:
8918 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8919 case SAVE_EXPR:
8920 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8921 case NON_LVALUE_EXPR:
8922 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8923 case FLOAT_EXPR:
8924 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8925 case RTL_EXPR:
8926 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8928 case CALL_EXPR:
8930 tree fndecl = get_callee_fndecl (t);
8931 tree arglist = TREE_OPERAND (t, 1);
8932 if (fndecl
8933 && DECL_BUILT_IN (fndecl)
8934 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8935 switch (DECL_FUNCTION_CODE (fndecl))
8937 case BUILT_IN_CABS:
8938 case BUILT_IN_CABSL:
8939 case BUILT_IN_CABSF:
8940 case BUILT_IN_EXP:
8941 case BUILT_IN_EXPF:
8942 case BUILT_IN_EXPL:
8943 case BUILT_IN_EXP2:
8944 case BUILT_IN_EXP2F:
8945 case BUILT_IN_EXP2L:
8946 case BUILT_IN_EXP10:
8947 case BUILT_IN_EXP10F:
8948 case BUILT_IN_EXP10L:
8949 case BUILT_IN_FABS:
8950 case BUILT_IN_FABSF:
8951 case BUILT_IN_FABSL:
8952 case BUILT_IN_FFS:
8953 case BUILT_IN_FFSL:
8954 case BUILT_IN_FFSLL:
8955 case BUILT_IN_PARITY:
8956 case BUILT_IN_PARITYL:
8957 case BUILT_IN_PARITYLL:
8958 case BUILT_IN_POPCOUNT:
8959 case BUILT_IN_POPCOUNTL:
8960 case BUILT_IN_POPCOUNTLL:
8961 case BUILT_IN_POW10:
8962 case BUILT_IN_POW10F:
8963 case BUILT_IN_POW10L:
8964 case BUILT_IN_SQRT:
8965 case BUILT_IN_SQRTF:
8966 case BUILT_IN_SQRTL:
8967 return 1;
8969 case BUILT_IN_ATAN:
8970 case BUILT_IN_ATANF:
8971 case BUILT_IN_ATANL:
8972 case BUILT_IN_CEIL:
8973 case BUILT_IN_CEILF:
8974 case BUILT_IN_CEILL:
8975 case BUILT_IN_FLOOR:
8976 case BUILT_IN_FLOORF:
8977 case BUILT_IN_FLOORL:
8978 case BUILT_IN_NEARBYINT:
8979 case BUILT_IN_NEARBYINTF:
8980 case BUILT_IN_NEARBYINTL:
8981 case BUILT_IN_ROUND:
8982 case BUILT_IN_ROUNDF:
8983 case BUILT_IN_ROUNDL:
8984 case BUILT_IN_TRUNC:
8985 case BUILT_IN_TRUNCF:
8986 case BUILT_IN_TRUNCL:
8987 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8989 case BUILT_IN_POW:
8990 case BUILT_IN_POWF:
8991 case BUILT_IN_POWL:
8992 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8994 default:
8995 break;
8999 /* ... fall through ... */
9001 default:
9002 if (truth_value_p (TREE_CODE (t)))
9003 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9004 return 1;
9007 /* We don't know sign of `t', so be conservative and return false. */
9008 return 0;
9011 /* Return true if `r' is known to be non-negative.
9012 Only handles constants at the moment. */
9015 rtl_expr_nonnegative_p (rtx r)
9017 switch (GET_CODE (r))
9019 case CONST_INT:
9020 return INTVAL (r) >= 0;
9022 case CONST_DOUBLE:
9023 if (GET_MODE (r) == VOIDmode)
9024 return CONST_DOUBLE_HIGH (r) >= 0;
9025 return 0;
9027 case CONST_VECTOR:
9029 int units, i;
9030 rtx elt;
9032 units = CONST_VECTOR_NUNITS (r);
9034 for (i = 0; i < units; ++i)
9036 elt = CONST_VECTOR_ELT (r, i);
9037 if (!rtl_expr_nonnegative_p (elt))
9038 return 0;
9041 return 1;
9044 case SYMBOL_REF:
9045 case LABEL_REF:
9046 /* These are always nonnegative. */
9047 return 1;
9049 default:
9050 return 0;
9054 #include "gt-fold-const.h"