svn merge -r102224:107263 svn+ssh://gcc.gnu.org/svn/gcc/branches/gcc-3_4-branch
[official-gcc.git] / gcc / fold-const.c
blob394d1cad4e696cdecc4f81a470e52ee34619fcb2
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_mathfn_p (enum built_in_function);
64 static bool negate_expr_p (tree);
65 static tree negate_expr (tree);
66 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
67 static tree associate_trees (tree, tree, enum tree_code, tree);
68 static tree int_const_binop (enum tree_code, tree, tree, int);
69 static tree const_binop (enum tree_code, tree, tree, int);
70 static hashval_t size_htab_hash (const void *);
71 static int size_htab_eq (const void *, const void *);
72 static tree fold_convert_const (enum tree_code, tree, tree);
73 static tree fold_convert (tree, tree);
74 static enum tree_code invert_tree_comparison (enum tree_code);
75 static enum tree_code swap_tree_comparison (enum tree_code);
76 static int comparison_to_compcode (enum tree_code);
77 static enum tree_code compcode_to_comparison (int);
78 static int truth_value_p (enum tree_code);
79 static int operand_equal_for_comparison_p (tree, tree, tree);
80 static int twoval_comparison_p (tree, tree *, tree *, int *);
81 static tree eval_subst (tree, tree, tree, tree, tree);
82 static tree pedantic_omit_one_operand (tree, tree, tree);
83 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
84 static tree make_bit_field_ref (tree, tree, int, int, int);
85 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
86 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
87 enum machine_mode *, int *, int *,
88 tree *, tree *);
89 static int all_ones_mask_p (tree, int);
90 static tree sign_bit_p (tree, tree);
91 static int simple_operand_p (tree);
92 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
93 static tree make_range (tree, int *, tree *, tree *);
94 static tree build_range_check (tree, tree, int, tree, tree);
95 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
96 tree);
97 static tree fold_range_test (tree);
98 static tree unextend (tree, int, int, tree);
99 static tree fold_truthop (enum tree_code, tree, tree, tree);
100 static tree optimize_minmax_comparison (tree);
101 static tree extract_muldiv (tree, tree, enum tree_code, tree);
102 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
103 static tree strip_compound_expr (tree, tree);
104 static int multiple_of_p (tree, tree, tree);
105 static tree constant_boolean_node (int, tree);
106 static int count_cond (tree, int);
107 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
108 tree, int);
109 static bool fold_real_zero_addition_p (tree, tree, int);
110 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
111 tree, tree, tree);
112 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
113 static bool reorder_operands_p (tree, tree);
114 static bool tree_swap_operands_p (tree, tree, bool);
116 /* The following constants represent a bit based encoding of GCC's
117 comparison operators. This encoding simplifies transformations
118 on relational comparison operators, such as AND and OR. */
119 #define COMPCODE_FALSE 0
120 #define COMPCODE_LT 1
121 #define COMPCODE_EQ 2
122 #define COMPCODE_LE 3
123 #define COMPCODE_GT 4
124 #define COMPCODE_NE 5
125 #define COMPCODE_GE 6
126 #define COMPCODE_TRUE 7
128 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
129 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
130 and SUM1. Then this yields nonzero if overflow occurred during the
131 addition.
133 Overflow occurs if A and B have the same sign, but A and SUM differ in
134 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
135 sign. */
136 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
138 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
139 We do that by representing the two-word integer in 4 words, with only
140 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
141 number. The value of the word is LOWPART + HIGHPART * BASE. */
143 #define LOWPART(x) \
144 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
145 #define HIGHPART(x) \
146 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
147 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
149 /* Unpack a two-word integer into 4 words.
150 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
151 WORDS points to the array of HOST_WIDE_INTs. */
153 static void
154 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
156 words[0] = LOWPART (low);
157 words[1] = HIGHPART (low);
158 words[2] = LOWPART (hi);
159 words[3] = HIGHPART (hi);
162 /* Pack an array of 4 words into a two-word integer.
163 WORDS points to the array of words.
164 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
166 static void
167 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
168 HOST_WIDE_INT *hi)
170 *low = words[0] + words[1] * BASE;
171 *hi = words[2] + words[3] * BASE;
174 /* Make the integer constant T valid for its type by setting to 0 or 1 all
175 the bits in the constant that don't belong in the type.
177 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
178 nonzero, a signed overflow has already occurred in calculating T, so
179 propagate it. */
182 force_fit_type (tree t, int overflow)
184 unsigned HOST_WIDE_INT low;
185 HOST_WIDE_INT high;
186 unsigned int prec;
188 if (TREE_CODE (t) == REAL_CST)
190 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
191 Consider doing it via real_convert now. */
192 return overflow;
195 else if (TREE_CODE (t) != INTEGER_CST)
196 return overflow;
198 low = TREE_INT_CST_LOW (t);
199 high = TREE_INT_CST_HIGH (t);
201 if (POINTER_TYPE_P (TREE_TYPE (t))
202 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
203 prec = POINTER_SIZE;
204 else
205 prec = TYPE_PRECISION (TREE_TYPE (t));
207 /* First clear all bits that are beyond the type's precision. */
209 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
211 else if (prec > HOST_BITS_PER_WIDE_INT)
212 TREE_INT_CST_HIGH (t)
213 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
214 else
216 TREE_INT_CST_HIGH (t) = 0;
217 if (prec < HOST_BITS_PER_WIDE_INT)
218 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
221 /* Unsigned types do not suffer sign extension or overflow unless they
222 are a sizetype. */
223 if (TREE_UNSIGNED (TREE_TYPE (t))
224 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
225 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
226 return overflow;
228 /* If the value's sign bit is set, extend the sign. */
229 if (prec != 2 * HOST_BITS_PER_WIDE_INT
230 && (prec > HOST_BITS_PER_WIDE_INT
231 ? 0 != (TREE_INT_CST_HIGH (t)
232 & ((HOST_WIDE_INT) 1
233 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
234 : 0 != (TREE_INT_CST_LOW (t)
235 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
237 /* Value is negative:
238 set to 1 all the bits that are outside this type's precision. */
239 if (prec > HOST_BITS_PER_WIDE_INT)
240 TREE_INT_CST_HIGH (t)
241 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
242 else
244 TREE_INT_CST_HIGH (t) = -1;
245 if (prec < HOST_BITS_PER_WIDE_INT)
246 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
250 /* Return nonzero if signed overflow occurred. */
251 return
252 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
253 != 0);
256 /* Add two doubleword integers with doubleword result.
257 Each argument is given as two `HOST_WIDE_INT' pieces.
258 One argument is L1 and H1; the other, L2 and H2.
259 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
262 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
263 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
264 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
266 unsigned HOST_WIDE_INT l;
267 HOST_WIDE_INT h;
269 l = l1 + l2;
270 h = h1 + h2 + (l < l1);
272 *lv = l;
273 *hv = h;
274 return OVERFLOW_SUM_SIGN (h1, h2, h);
277 /* Negate a doubleword integer with doubleword result.
278 Return nonzero if the operation overflows, assuming it's signed.
279 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
280 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
283 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
284 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
286 if (l1 == 0)
288 *lv = 0;
289 *hv = - h1;
290 return (*hv & h1) < 0;
292 else
294 *lv = -l1;
295 *hv = ~h1;
296 return 0;
300 /* Multiply two doubleword integers with doubleword result.
301 Return nonzero if the operation overflows, assuming it's signed.
302 Each argument is given as two `HOST_WIDE_INT' pieces.
303 One argument is L1 and H1; the other, L2 and H2.
304 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
307 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
308 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
309 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
311 HOST_WIDE_INT arg1[4];
312 HOST_WIDE_INT arg2[4];
313 HOST_WIDE_INT prod[4 * 2];
314 unsigned HOST_WIDE_INT carry;
315 int i, j, k;
316 unsigned HOST_WIDE_INT toplow, neglow;
317 HOST_WIDE_INT tophigh, neghigh;
319 encode (arg1, l1, h1);
320 encode (arg2, l2, h2);
322 memset (prod, 0, sizeof prod);
324 for (i = 0; i < 4; i++)
326 carry = 0;
327 for (j = 0; j < 4; j++)
329 k = i + j;
330 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
331 carry += arg1[i] * arg2[j];
332 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
333 carry += prod[k];
334 prod[k] = LOWPART (carry);
335 carry = HIGHPART (carry);
337 prod[i + 4] = carry;
340 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
342 /* Check for overflow by calculating the top half of the answer in full;
343 it should agree with the low half's sign bit. */
344 decode (prod + 4, &toplow, &tophigh);
345 if (h1 < 0)
347 neg_double (l2, h2, &neglow, &neghigh);
348 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
350 if (h2 < 0)
352 neg_double (l1, h1, &neglow, &neghigh);
353 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
355 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
358 /* Shift the doubleword integer in L1, H1 left by COUNT places
359 keeping only PREC bits of result.
360 Shift right if COUNT is negative.
361 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
362 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
364 void
365 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
366 HOST_WIDE_INT count, unsigned int prec,
367 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
369 unsigned HOST_WIDE_INT signmask;
371 if (count < 0)
373 rshift_double (l1, h1, -count, prec, lv, hv, arith);
374 return;
377 #ifdef SHIFT_COUNT_TRUNCATED
378 if (SHIFT_COUNT_TRUNCATED)
379 count %= prec;
380 #endif
382 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
384 /* Shifting by the host word size is undefined according to the
385 ANSI standard, so we must handle this as a special case. */
386 *hv = 0;
387 *lv = 0;
389 else if (count >= HOST_BITS_PER_WIDE_INT)
391 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
392 *lv = 0;
394 else
396 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
397 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
398 *lv = l1 << count;
401 /* Sign extend all bits that are beyond the precision. */
403 signmask = -((prec > HOST_BITS_PER_WIDE_INT
404 ? ((unsigned HOST_WIDE_INT) *hv
405 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
406 : (*lv >> (prec - 1))) & 1);
408 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
410 else if (prec >= HOST_BITS_PER_WIDE_INT)
412 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
413 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
415 else
417 *hv = signmask;
418 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
419 *lv |= signmask << prec;
423 /* Shift the doubleword integer in L1, H1 right by COUNT places
424 keeping only PREC bits of result. COUNT must be positive.
425 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
426 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
428 void
429 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
430 HOST_WIDE_INT count, unsigned int prec,
431 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
432 int arith)
434 unsigned HOST_WIDE_INT signmask;
436 signmask = (arith
437 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
438 : 0);
440 #ifdef SHIFT_COUNT_TRUNCATED
441 if (SHIFT_COUNT_TRUNCATED)
442 count %= prec;
443 #endif
445 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
447 /* Shifting by the host word size is undefined according to the
448 ANSI standard, so we must handle this as a special case. */
449 *hv = 0;
450 *lv = 0;
452 else if (count >= HOST_BITS_PER_WIDE_INT)
454 *hv = 0;
455 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
457 else
459 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
460 *lv = ((l1 >> count)
461 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
464 /* Zero / sign extend all bits that are beyond the precision. */
466 if (count >= (HOST_WIDE_INT)prec)
468 *hv = signmask;
469 *lv = signmask;
471 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
473 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
475 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
476 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
478 else
480 *hv = signmask;
481 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
482 *lv |= signmask << (prec - count);
486 /* Rotate the doubleword integer in L1, H1 left by COUNT places
487 keeping only PREC bits of result.
488 Rotate right if COUNT is negative.
489 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
491 void
492 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
493 HOST_WIDE_INT count, unsigned int prec,
494 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
496 unsigned HOST_WIDE_INT s1l, s2l;
497 HOST_WIDE_INT s1h, s2h;
499 count %= prec;
500 if (count < 0)
501 count += prec;
503 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
504 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
505 *lv = s1l | s2l;
506 *hv = s1h | s2h;
509 /* Rotate the doubleword integer in L1, H1 left by COUNT places
510 keeping only PREC bits of result. COUNT must be positive.
511 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
513 void
514 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
515 HOST_WIDE_INT count, unsigned int prec,
516 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
518 unsigned HOST_WIDE_INT s1l, s2l;
519 HOST_WIDE_INT s1h, s2h;
521 count %= prec;
522 if (count < 0)
523 count += prec;
525 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
526 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
527 *lv = s1l | s2l;
528 *hv = s1h | s2h;
531 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
532 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
533 CODE is a tree code for a kind of division, one of
534 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
535 or EXACT_DIV_EXPR
536 It controls how the quotient is rounded to an integer.
537 Return nonzero if the operation overflows.
538 UNS nonzero says do unsigned division. */
541 div_and_round_double (enum tree_code code, int uns,
542 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
543 HOST_WIDE_INT hnum_orig,
544 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
545 HOST_WIDE_INT hden_orig,
546 unsigned HOST_WIDE_INT *lquo,
547 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
548 HOST_WIDE_INT *hrem)
550 int quo_neg = 0;
551 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
552 HOST_WIDE_INT den[4], quo[4];
553 int i, j;
554 unsigned HOST_WIDE_INT work;
555 unsigned HOST_WIDE_INT carry = 0;
556 unsigned HOST_WIDE_INT lnum = lnum_orig;
557 HOST_WIDE_INT hnum = hnum_orig;
558 unsigned HOST_WIDE_INT lden = lden_orig;
559 HOST_WIDE_INT hden = hden_orig;
560 int overflow = 0;
562 if (hden == 0 && lden == 0)
563 overflow = 1, lden = 1;
565 /* Calculate quotient sign and convert operands to unsigned. */
566 if (!uns)
568 if (hnum < 0)
570 quo_neg = ~ quo_neg;
571 /* (minimum integer) / (-1) is the only overflow case. */
572 if (neg_double (lnum, hnum, &lnum, &hnum)
573 && ((HOST_WIDE_INT) lden & hden) == -1)
574 overflow = 1;
576 if (hden < 0)
578 quo_neg = ~ quo_neg;
579 neg_double (lden, hden, &lden, &hden);
583 if (hnum == 0 && hden == 0)
584 { /* single precision */
585 *hquo = *hrem = 0;
586 /* This unsigned division rounds toward zero. */
587 *lquo = lnum / lden;
588 goto finish_up;
591 if (hnum == 0)
592 { /* trivial case: dividend < divisor */
593 /* hden != 0 already checked. */
594 *hquo = *lquo = 0;
595 *hrem = hnum;
596 *lrem = lnum;
597 goto finish_up;
600 memset (quo, 0, sizeof quo);
602 memset (num, 0, sizeof num); /* to zero 9th element */
603 memset (den, 0, sizeof den);
605 encode (num, lnum, hnum);
606 encode (den, lden, hden);
608 /* Special code for when the divisor < BASE. */
609 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
611 /* hnum != 0 already checked. */
612 for (i = 4 - 1; i >= 0; i--)
614 work = num[i] + carry * BASE;
615 quo[i] = work / lden;
616 carry = work % lden;
619 else
621 /* Full double precision division,
622 with thanks to Don Knuth's "Seminumerical Algorithms". */
623 int num_hi_sig, den_hi_sig;
624 unsigned HOST_WIDE_INT quo_est, scale;
626 /* Find the highest nonzero divisor digit. */
627 for (i = 4 - 1;; i--)
628 if (den[i] != 0)
630 den_hi_sig = i;
631 break;
634 /* Insure that the first digit of the divisor is at least BASE/2.
635 This is required by the quotient digit estimation algorithm. */
637 scale = BASE / (den[den_hi_sig] + 1);
638 if (scale > 1)
639 { /* scale divisor and dividend */
640 carry = 0;
641 for (i = 0; i <= 4 - 1; i++)
643 work = (num[i] * scale) + carry;
644 num[i] = LOWPART (work);
645 carry = HIGHPART (work);
648 num[4] = carry;
649 carry = 0;
650 for (i = 0; i <= 4 - 1; i++)
652 work = (den[i] * scale) + carry;
653 den[i] = LOWPART (work);
654 carry = HIGHPART (work);
655 if (den[i] != 0) den_hi_sig = i;
659 num_hi_sig = 4;
661 /* Main loop */
662 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
664 /* Guess the next quotient digit, quo_est, by dividing the first
665 two remaining dividend digits by the high order quotient digit.
666 quo_est is never low and is at most 2 high. */
667 unsigned HOST_WIDE_INT tmp;
669 num_hi_sig = i + den_hi_sig + 1;
670 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
671 if (num[num_hi_sig] != den[den_hi_sig])
672 quo_est = work / den[den_hi_sig];
673 else
674 quo_est = BASE - 1;
676 /* Refine quo_est so it's usually correct, and at most one high. */
677 tmp = work - quo_est * den[den_hi_sig];
678 if (tmp < BASE
679 && (den[den_hi_sig - 1] * quo_est
680 > (tmp * BASE + num[num_hi_sig - 2])))
681 quo_est--;
683 /* Try QUO_EST as the quotient digit, by multiplying the
684 divisor by QUO_EST and subtracting from the remaining dividend.
685 Keep in mind that QUO_EST is the I - 1st digit. */
687 carry = 0;
688 for (j = 0; j <= den_hi_sig; j++)
690 work = quo_est * den[j] + carry;
691 carry = HIGHPART (work);
692 work = num[i + j] - LOWPART (work);
693 num[i + j] = LOWPART (work);
694 carry += HIGHPART (work) != 0;
697 /* If quo_est was high by one, then num[i] went negative and
698 we need to correct things. */
699 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
701 quo_est--;
702 carry = 0; /* add divisor back in */
703 for (j = 0; j <= den_hi_sig; j++)
705 work = num[i + j] + den[j] + carry;
706 carry = HIGHPART (work);
707 num[i + j] = LOWPART (work);
710 num [num_hi_sig] += carry;
713 /* Store the quotient digit. */
714 quo[i] = quo_est;
718 decode (quo, lquo, hquo);
720 finish_up:
721 /* If result is negative, make it so. */
722 if (quo_neg)
723 neg_double (*lquo, *hquo, lquo, hquo);
725 /* compute trial remainder: rem = num - (quo * den) */
726 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
727 neg_double (*lrem, *hrem, lrem, hrem);
728 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
730 switch (code)
732 case TRUNC_DIV_EXPR:
733 case TRUNC_MOD_EXPR: /* round toward zero */
734 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
735 return overflow;
737 case FLOOR_DIV_EXPR:
738 case FLOOR_MOD_EXPR: /* round toward negative infinity */
739 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
741 /* quo = quo - 1; */
742 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
743 lquo, hquo);
745 else
746 return overflow;
747 break;
749 case CEIL_DIV_EXPR:
750 case CEIL_MOD_EXPR: /* round toward positive infinity */
751 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
753 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
754 lquo, hquo);
756 else
757 return overflow;
758 break;
760 case ROUND_DIV_EXPR:
761 case ROUND_MOD_EXPR: /* round to closest integer */
763 unsigned HOST_WIDE_INT labs_rem = *lrem;
764 HOST_WIDE_INT habs_rem = *hrem;
765 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
766 HOST_WIDE_INT habs_den = hden, htwice;
768 /* Get absolute values. */
769 if (*hrem < 0)
770 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
771 if (hden < 0)
772 neg_double (lden, hden, &labs_den, &habs_den);
774 /* If (2 * abs (lrem) >= abs (lden)) */
775 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
776 labs_rem, habs_rem, &ltwice, &htwice);
778 if (((unsigned HOST_WIDE_INT) habs_den
779 < (unsigned HOST_WIDE_INT) htwice)
780 || (((unsigned HOST_WIDE_INT) habs_den
781 == (unsigned HOST_WIDE_INT) htwice)
782 && (labs_den < ltwice)))
784 if (*hquo < 0)
785 /* quo = quo - 1; */
786 add_double (*lquo, *hquo,
787 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
788 else
789 /* quo = quo + 1; */
790 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
791 lquo, hquo);
793 else
794 return overflow;
796 break;
798 default:
799 abort ();
802 /* Compute true remainder: rem = num - (quo * den) */
803 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
804 neg_double (*lrem, *hrem, lrem, hrem);
805 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
806 return overflow;
809 /* Return true if built-in mathematical function specified by CODE
810 preserves the sign of it argument, i.e. -f(x) == f(-x). */
812 static bool
813 negate_mathfn_p (enum built_in_function code)
815 switch (code)
817 case BUILT_IN_ASIN:
818 case BUILT_IN_ASINF:
819 case BUILT_IN_ASINL:
820 case BUILT_IN_ATAN:
821 case BUILT_IN_ATANF:
822 case BUILT_IN_ATANL:
823 case BUILT_IN_SIN:
824 case BUILT_IN_SINF:
825 case BUILT_IN_SINL:
826 case BUILT_IN_TAN:
827 case BUILT_IN_TANF:
828 case BUILT_IN_TANL:
829 return true;
831 default:
832 break;
834 return false;
838 /* Determine whether an expression T can be cheaply negated using
839 the function negate_expr. */
841 static bool
842 negate_expr_p (tree t)
844 unsigned HOST_WIDE_INT val;
845 unsigned int prec;
846 tree type;
848 if (t == 0)
849 return false;
851 type = TREE_TYPE (t);
853 STRIP_SIGN_NOPS (t);
854 switch (TREE_CODE (t))
856 case INTEGER_CST:
857 if (TREE_UNSIGNED (type) || ! flag_trapv)
858 return true;
860 /* Check that -CST will not overflow type. */
861 prec = TYPE_PRECISION (type);
862 if (prec > HOST_BITS_PER_WIDE_INT)
864 if (TREE_INT_CST_LOW (t) != 0)
865 return true;
866 prec -= HOST_BITS_PER_WIDE_INT;
867 val = TREE_INT_CST_HIGH (t);
869 else
870 val = TREE_INT_CST_LOW (t);
871 if (prec < HOST_BITS_PER_WIDE_INT)
872 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
873 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
875 case REAL_CST:
876 case NEGATE_EXPR:
877 return true;
879 case COMPLEX_CST:
880 return negate_expr_p (TREE_REALPART (t))
881 && negate_expr_p (TREE_IMAGPART (t));
883 case MINUS_EXPR:
884 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
885 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
886 && reorder_operands_p (TREE_OPERAND (t, 0),
887 TREE_OPERAND (t, 1));
889 case MULT_EXPR:
890 if (TREE_UNSIGNED (TREE_TYPE (t)))
891 break;
893 /* Fall through. */
895 case RDIV_EXPR:
896 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
897 return negate_expr_p (TREE_OPERAND (t, 1))
898 || negate_expr_p (TREE_OPERAND (t, 0));
899 break;
901 case NOP_EXPR:
902 /* Negate -((double)float) as (double)(-float). */
903 if (TREE_CODE (type) == REAL_TYPE)
905 tree tem = strip_float_extensions (t);
906 if (tem != t)
907 return negate_expr_p (tem);
909 break;
911 case CALL_EXPR:
912 /* Negate -f(x) as f(-x). */
913 if (negate_mathfn_p (builtin_mathfn_code (t)))
914 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
915 break;
917 default:
918 break;
920 return false;
923 /* Given T, an expression, return the negation of T. Allow for T to be
924 null, in which case return null. */
926 static tree
927 negate_expr (tree t)
929 tree type;
930 tree tem;
932 if (t == 0)
933 return 0;
935 type = TREE_TYPE (t);
936 STRIP_SIGN_NOPS (t);
938 switch (TREE_CODE (t))
940 case INTEGER_CST:
942 unsigned HOST_WIDE_INT low;
943 HOST_WIDE_INT high;
944 int overflow = neg_double (TREE_INT_CST_LOW (t),
945 TREE_INT_CST_HIGH (t),
946 &low, &high);
947 tem = build_int_2 (low, high);
948 TREE_TYPE (tem) = type;
949 TREE_OVERFLOW (tem)
950 = (TREE_OVERFLOW (t)
951 | force_fit_type (tem, overflow && !TREE_UNSIGNED (type)));
952 TREE_CONSTANT_OVERFLOW (tem)
953 = TREE_OVERFLOW (tem) | TREE_CONSTANT_OVERFLOW (t);
955 if (! TREE_OVERFLOW (tem)
956 || TREE_UNSIGNED (type)
957 || ! flag_trapv)
958 return tem;
959 break;
961 case REAL_CST:
962 tem = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (t)));
963 /* Two's complement FP formats, such as c4x, may overflow. */
964 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
965 return fold_convert (type, tem);
966 break;
968 case COMPLEX_CST:
970 tree rpart = negate_expr (TREE_REALPART (t));
971 tree ipart = negate_expr (TREE_IMAGPART (t));
973 if ((TREE_CODE (rpart) == REAL_CST
974 && TREE_CODE (ipart) == REAL_CST)
975 || (TREE_CODE (rpart) == INTEGER_CST
976 && TREE_CODE (ipart) == INTEGER_CST))
977 return build_complex (type, rpart, ipart);
979 break;
981 case NEGATE_EXPR:
982 return fold_convert (type, TREE_OPERAND (t, 0));
984 case MINUS_EXPR:
985 /* - (A - B) -> B - A */
986 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
987 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
988 return fold_convert (type,
989 fold (build (MINUS_EXPR, TREE_TYPE (t),
990 TREE_OPERAND (t, 1),
991 TREE_OPERAND (t, 0))));
992 break;
994 case MULT_EXPR:
995 if (TREE_UNSIGNED (TREE_TYPE (t)))
996 break;
998 /* Fall through. */
1000 case RDIV_EXPR:
1001 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1003 tem = TREE_OPERAND (t, 1);
1004 if (negate_expr_p (tem))
1005 return fold_convert (type,
1006 fold (build (TREE_CODE (t), TREE_TYPE (t),
1007 TREE_OPERAND (t, 0),
1008 negate_expr (tem))));
1009 tem = TREE_OPERAND (t, 0);
1010 if (negate_expr_p (tem))
1011 return fold_convert (type,
1012 fold (build (TREE_CODE (t), TREE_TYPE (t),
1013 negate_expr (tem),
1014 TREE_OPERAND (t, 1))));
1016 break;
1018 case NOP_EXPR:
1019 /* Convert -((double)float) into (double)(-float). */
1020 if (TREE_CODE (type) == REAL_TYPE)
1022 tem = strip_float_extensions (t);
1023 if (tem != t && negate_expr_p (tem))
1024 return fold_convert (type, negate_expr (tem));
1026 break;
1028 case CALL_EXPR:
1029 /* Negate -f(x) as f(-x). */
1030 if (negate_mathfn_p (builtin_mathfn_code (t))
1031 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1033 tree fndecl, arg, arglist;
1035 fndecl = get_callee_fndecl (t);
1036 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1037 arglist = build_tree_list (NULL_TREE, arg);
1038 return build_function_call_expr (fndecl, arglist);
1040 break;
1042 default:
1043 break;
1046 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1047 return fold_convert (type, tem);
1050 /* Split a tree IN into a constant, literal and variable parts that could be
1051 combined with CODE to make IN. "constant" means an expression with
1052 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1053 commutative arithmetic operation. Store the constant part into *CONP,
1054 the literal in *LITP and return the variable part. If a part isn't
1055 present, set it to null. If the tree does not decompose in this way,
1056 return the entire tree as the variable part and the other parts as null.
1058 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1059 case, we negate an operand that was subtracted. Except if it is a
1060 literal for which we use *MINUS_LITP instead.
1062 If NEGATE_P is true, we are negating all of IN, again except a literal
1063 for which we use *MINUS_LITP instead.
1065 If IN is itself a literal or constant, return it as appropriate.
1067 Note that we do not guarantee that any of the three values will be the
1068 same type as IN, but they will have the same signedness and mode. */
1070 static tree
1071 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1072 tree *minus_litp, int negate_p)
1074 tree var = 0;
1076 *conp = 0;
1077 *litp = 0;
1078 *minus_litp = 0;
1080 /* Strip any conversions that don't change the machine mode or signedness. */
1081 STRIP_SIGN_NOPS (in);
1083 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1084 *litp = in;
1085 else if (TREE_CODE (in) == code
1086 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1087 /* We can associate addition and subtraction together (even
1088 though the C standard doesn't say so) for integers because
1089 the value is not affected. For reals, the value might be
1090 affected, so we can't. */
1091 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1092 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1094 tree op0 = TREE_OPERAND (in, 0);
1095 tree op1 = TREE_OPERAND (in, 1);
1096 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1097 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1099 /* First see if either of the operands is a literal, then a constant. */
1100 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1101 *litp = op0, op0 = 0;
1102 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1103 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1105 if (op0 != 0 && TREE_CONSTANT (op0))
1106 *conp = op0, op0 = 0;
1107 else if (op1 != 0 && TREE_CONSTANT (op1))
1108 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1110 /* If we haven't dealt with either operand, this is not a case we can
1111 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1112 if (op0 != 0 && op1 != 0)
1113 var = in;
1114 else if (op0 != 0)
1115 var = op0;
1116 else
1117 var = op1, neg_var_p = neg1_p;
1119 /* Now do any needed negations. */
1120 if (neg_litp_p)
1121 *minus_litp = *litp, *litp = 0;
1122 if (neg_conp_p)
1123 *conp = negate_expr (*conp);
1124 if (neg_var_p)
1125 var = negate_expr (var);
1127 else if (TREE_CONSTANT (in))
1128 *conp = in;
1129 else
1130 var = in;
1132 if (negate_p)
1134 if (*litp)
1135 *minus_litp = *litp, *litp = 0;
1136 else if (*minus_litp)
1137 *litp = *minus_litp, *minus_litp = 0;
1138 *conp = negate_expr (*conp);
1139 var = negate_expr (var);
1142 return var;
1145 /* Re-associate trees split by the above function. T1 and T2 are either
1146 expressions to associate or null. Return the new expression, if any. If
1147 we build an operation, do it in TYPE and with CODE. */
1149 static tree
1150 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1152 if (t1 == 0)
1153 return t2;
1154 else if (t2 == 0)
1155 return t1;
1157 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1158 try to fold this since we will have infinite recursion. But do
1159 deal with any NEGATE_EXPRs. */
1160 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1161 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1163 if (code == PLUS_EXPR)
1165 if (TREE_CODE (t1) == NEGATE_EXPR)
1166 return build (MINUS_EXPR, type, fold_convert (type, t2),
1167 fold_convert (type, TREE_OPERAND (t1, 0)));
1168 else if (TREE_CODE (t2) == NEGATE_EXPR)
1169 return build (MINUS_EXPR, type, fold_convert (type, t1),
1170 fold_convert (type, TREE_OPERAND (t2, 0)));
1172 return build (code, type, fold_convert (type, t1),
1173 fold_convert (type, t2));
1176 return fold (build (code, type, fold_convert (type, t1),
1177 fold_convert (type, t2)));
1180 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1181 to produce a new constant.
1183 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1185 static tree
1186 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1188 unsigned HOST_WIDE_INT int1l, int2l;
1189 HOST_WIDE_INT int1h, int2h;
1190 unsigned HOST_WIDE_INT low;
1191 HOST_WIDE_INT hi;
1192 unsigned HOST_WIDE_INT garbagel;
1193 HOST_WIDE_INT garbageh;
1194 tree t;
1195 tree type = TREE_TYPE (arg1);
1196 int uns = TREE_UNSIGNED (type);
1197 int is_sizetype
1198 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1199 int overflow = 0;
1200 int no_overflow = 0;
1202 int1l = TREE_INT_CST_LOW (arg1);
1203 int1h = TREE_INT_CST_HIGH (arg1);
1204 int2l = TREE_INT_CST_LOW (arg2);
1205 int2h = TREE_INT_CST_HIGH (arg2);
1207 switch (code)
1209 case BIT_IOR_EXPR:
1210 low = int1l | int2l, hi = int1h | int2h;
1211 break;
1213 case BIT_XOR_EXPR:
1214 low = int1l ^ int2l, hi = int1h ^ int2h;
1215 break;
1217 case BIT_AND_EXPR:
1218 low = int1l & int2l, hi = int1h & int2h;
1219 break;
1221 case RSHIFT_EXPR:
1222 int2l = -int2l;
1223 case LSHIFT_EXPR:
1224 /* It's unclear from the C standard whether shifts can overflow.
1225 The following code ignores overflow; perhaps a C standard
1226 interpretation ruling is needed. */
1227 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1228 &low, &hi, !uns);
1229 no_overflow = 1;
1230 break;
1232 case RROTATE_EXPR:
1233 int2l = - int2l;
1234 case LROTATE_EXPR:
1235 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1236 &low, &hi);
1237 break;
1239 case PLUS_EXPR:
1240 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1241 break;
1243 case MINUS_EXPR:
1244 neg_double (int2l, int2h, &low, &hi);
1245 add_double (int1l, int1h, low, hi, &low, &hi);
1246 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1247 break;
1249 case MULT_EXPR:
1250 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1251 break;
1253 case TRUNC_DIV_EXPR:
1254 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1255 case EXACT_DIV_EXPR:
1256 /* This is a shortcut for a common special case. */
1257 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1258 && ! TREE_CONSTANT_OVERFLOW (arg1)
1259 && ! TREE_CONSTANT_OVERFLOW (arg2)
1260 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1262 if (code == CEIL_DIV_EXPR)
1263 int1l += int2l - 1;
1265 low = int1l / int2l, hi = 0;
1266 break;
1269 /* ... fall through ... */
1271 case ROUND_DIV_EXPR:
1272 if (int2h == 0 && int2l == 1)
1274 low = int1l, hi = int1h;
1275 break;
1277 if (int1l == int2l && int1h == int2h
1278 && ! (int1l == 0 && int1h == 0))
1280 low = 1, hi = 0;
1281 break;
1283 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1284 &low, &hi, &garbagel, &garbageh);
1285 break;
1287 case TRUNC_MOD_EXPR:
1288 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1289 /* This is a shortcut for a common special case. */
1290 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1291 && ! TREE_CONSTANT_OVERFLOW (arg1)
1292 && ! TREE_CONSTANT_OVERFLOW (arg2)
1293 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1295 if (code == CEIL_MOD_EXPR)
1296 int1l += int2l - 1;
1297 low = int1l % int2l, hi = 0;
1298 break;
1301 /* ... fall through ... */
1303 case ROUND_MOD_EXPR:
1304 overflow = div_and_round_double (code, uns,
1305 int1l, int1h, int2l, int2h,
1306 &garbagel, &garbageh, &low, &hi);
1307 break;
1309 case MIN_EXPR:
1310 case MAX_EXPR:
1311 if (uns)
1312 low = (((unsigned HOST_WIDE_INT) int1h
1313 < (unsigned HOST_WIDE_INT) int2h)
1314 || (((unsigned HOST_WIDE_INT) int1h
1315 == (unsigned HOST_WIDE_INT) int2h)
1316 && int1l < int2l));
1317 else
1318 low = (int1h < int2h
1319 || (int1h == int2h && int1l < int2l));
1321 if (low == (code == MIN_EXPR))
1322 low = int1l, hi = int1h;
1323 else
1324 low = int2l, hi = int2h;
1325 break;
1327 default:
1328 abort ();
1331 /* If this is for a sizetype, can be represented as one (signed)
1332 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1333 constants. */
1334 if (is_sizetype
1335 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1336 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1337 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1338 return size_int_type_wide (low, type);
1339 else
1341 t = build_int_2 (low, hi);
1342 TREE_TYPE (t) = TREE_TYPE (arg1);
1345 TREE_OVERFLOW (t)
1346 = ((notrunc
1347 ? (!uns || is_sizetype) && overflow
1348 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1349 && ! no_overflow))
1350 | TREE_OVERFLOW (arg1)
1351 | TREE_OVERFLOW (arg2));
1353 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1354 So check if force_fit_type truncated the value. */
1355 if (is_sizetype
1356 && ! TREE_OVERFLOW (t)
1357 && (TREE_INT_CST_HIGH (t) != hi
1358 || TREE_INT_CST_LOW (t) != low))
1359 TREE_OVERFLOW (t) = 1;
1361 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1362 | TREE_CONSTANT_OVERFLOW (arg1)
1363 | TREE_CONSTANT_OVERFLOW (arg2));
1364 return t;
1367 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1368 constant. We assume ARG1 and ARG2 have the same data type, or at least
1369 are the same kind of constant and the same machine mode.
1371 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1373 static tree
1374 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1376 STRIP_NOPS (arg1);
1377 STRIP_NOPS (arg2);
1379 if (TREE_CODE (arg1) == INTEGER_CST)
1380 return int_const_binop (code, arg1, arg2, notrunc);
1382 if (TREE_CODE (arg1) == REAL_CST)
1384 enum machine_mode mode;
1385 REAL_VALUE_TYPE d1;
1386 REAL_VALUE_TYPE d2;
1387 REAL_VALUE_TYPE value;
1388 tree t, type;
1390 d1 = TREE_REAL_CST (arg1);
1391 d2 = TREE_REAL_CST (arg2);
1393 type = TREE_TYPE (arg1);
1394 mode = TYPE_MODE (type);
1396 /* Don't perform operation if we honor signaling NaNs and
1397 either operand is a NaN. */
1398 if (HONOR_SNANS (mode)
1399 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1400 return NULL_TREE;
1402 /* Don't perform operation if it would raise a division
1403 by zero exception. */
1404 if (code == RDIV_EXPR
1405 && REAL_VALUES_EQUAL (d2, dconst0)
1406 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1407 return NULL_TREE;
1409 /* If either operand is a NaN, just return it. Otherwise, set up
1410 for floating-point trap; we return an overflow. */
1411 if (REAL_VALUE_ISNAN (d1))
1412 return arg1;
1413 else if (REAL_VALUE_ISNAN (d2))
1414 return arg2;
1416 REAL_ARITHMETIC (value, code, d1, d2);
1418 t = build_real (type, real_value_truncate (mode, value));
1420 TREE_OVERFLOW (t)
1421 = (force_fit_type (t, 0)
1422 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1423 TREE_CONSTANT_OVERFLOW (t)
1424 = TREE_OVERFLOW (t)
1425 | TREE_CONSTANT_OVERFLOW (arg1)
1426 | TREE_CONSTANT_OVERFLOW (arg2);
1427 return t;
1429 if (TREE_CODE (arg1) == COMPLEX_CST)
1431 tree type = TREE_TYPE (arg1);
1432 tree r1 = TREE_REALPART (arg1);
1433 tree i1 = TREE_IMAGPART (arg1);
1434 tree r2 = TREE_REALPART (arg2);
1435 tree i2 = TREE_IMAGPART (arg2);
1436 tree t;
1438 switch (code)
1440 case PLUS_EXPR:
1441 t = build_complex (type,
1442 const_binop (PLUS_EXPR, r1, r2, notrunc),
1443 const_binop (PLUS_EXPR, i1, i2, notrunc));
1444 break;
1446 case MINUS_EXPR:
1447 t = build_complex (type,
1448 const_binop (MINUS_EXPR, r1, r2, notrunc),
1449 const_binop (MINUS_EXPR, i1, i2, notrunc));
1450 break;
1452 case MULT_EXPR:
1453 t = build_complex (type,
1454 const_binop (MINUS_EXPR,
1455 const_binop (MULT_EXPR,
1456 r1, r2, notrunc),
1457 const_binop (MULT_EXPR,
1458 i1, i2, notrunc),
1459 notrunc),
1460 const_binop (PLUS_EXPR,
1461 const_binop (MULT_EXPR,
1462 r1, i2, notrunc),
1463 const_binop (MULT_EXPR,
1464 i1, r2, notrunc),
1465 notrunc));
1466 break;
1468 case RDIV_EXPR:
1470 tree t1, t2, real, imag;
1471 tree magsquared
1472 = const_binop (PLUS_EXPR,
1473 const_binop (MULT_EXPR, r2, r2, notrunc),
1474 const_binop (MULT_EXPR, i2, i2, notrunc),
1475 notrunc);
1477 t1 = const_binop (PLUS_EXPR,
1478 const_binop (MULT_EXPR, r1, r2, notrunc),
1479 const_binop (MULT_EXPR, i1, i2, notrunc),
1480 notrunc);
1481 t2 = const_binop (MINUS_EXPR,
1482 const_binop (MULT_EXPR, i1, r2, notrunc),
1483 const_binop (MULT_EXPR, r1, i2, notrunc),
1484 notrunc);
1486 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1488 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1489 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1491 else
1493 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1494 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1495 if (!real || !imag)
1496 return NULL_TREE;
1499 t = build_complex (type, real, imag);
1501 break;
1503 default:
1504 abort ();
1506 return t;
1508 return 0;
1511 /* These are the hash table functions for the hash table of INTEGER_CST
1512 nodes of a sizetype. */
1514 /* Return the hash code code X, an INTEGER_CST. */
1516 static hashval_t
1517 size_htab_hash (const void *x)
1519 tree t = (tree) x;
1521 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1522 ^ htab_hash_pointer (TREE_TYPE (t))
1523 ^ (TREE_OVERFLOW (t) << 20));
1526 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1527 is the same as that given by *Y, which is the same. */
1529 static int
1530 size_htab_eq (const void *x, const void *y)
1532 tree xt = (tree) x;
1533 tree yt = (tree) y;
1535 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1536 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1537 && TREE_TYPE (xt) == TREE_TYPE (yt)
1538 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1541 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1542 bits are given by NUMBER and of the sizetype represented by KIND. */
1544 tree
1545 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1547 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1550 /* Likewise, but the desired type is specified explicitly. */
1552 static GTY (()) tree new_const;
1553 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1554 htab_t size_htab;
1556 tree
1557 size_int_type_wide (HOST_WIDE_INT number, tree type)
1559 void **slot;
1561 if (size_htab == 0)
1563 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1564 new_const = make_node (INTEGER_CST);
1567 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1568 hash table, we return the value from the hash table. Otherwise, we
1569 place that in the hash table and make a new node for the next time. */
1570 TREE_INT_CST_LOW (new_const) = number;
1571 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1572 TREE_TYPE (new_const) = type;
1573 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1574 = force_fit_type (new_const, 0);
1576 slot = htab_find_slot (size_htab, new_const, INSERT);
1577 if (*slot == 0)
1579 tree t = new_const;
1581 *slot = new_const;
1582 new_const = make_node (INTEGER_CST);
1583 return t;
1585 else
1586 return (tree) *slot;
1589 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1590 is a tree code. The type of the result is taken from the operands.
1591 Both must be the same type integer type and it must be a size type.
1592 If the operands are constant, so is the result. */
1594 tree
1595 size_binop (enum tree_code code, tree arg0, tree arg1)
1597 tree type = TREE_TYPE (arg0);
1599 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1600 || type != TREE_TYPE (arg1))
1601 abort ();
1603 /* Handle the special case of two integer constants faster. */
1604 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1606 /* And some specific cases even faster than that. */
1607 if (code == PLUS_EXPR && integer_zerop (arg0))
1608 return arg1;
1609 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1610 && integer_zerop (arg1))
1611 return arg0;
1612 else if (code == MULT_EXPR && integer_onep (arg0))
1613 return arg1;
1615 /* Handle general case of two integer constants. */
1616 return int_const_binop (code, arg0, arg1, 0);
1619 if (arg0 == error_mark_node || arg1 == error_mark_node)
1620 return error_mark_node;
1622 return fold (build (code, type, arg0, arg1));
1625 /* Given two values, either both of sizetype or both of bitsizetype,
1626 compute the difference between the two values. Return the value
1627 in signed type corresponding to the type of the operands. */
1629 tree
1630 size_diffop (tree arg0, tree arg1)
1632 tree type = TREE_TYPE (arg0);
1633 tree ctype;
1635 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1636 || type != TREE_TYPE (arg1))
1637 abort ();
1639 /* If the type is already signed, just do the simple thing. */
1640 if (! TREE_UNSIGNED (type))
1641 return size_binop (MINUS_EXPR, arg0, arg1);
1643 ctype = (type == bitsizetype || type == ubitsizetype
1644 ? sbitsizetype : ssizetype);
1646 /* If either operand is not a constant, do the conversions to the signed
1647 type and subtract. The hardware will do the right thing with any
1648 overflow in the subtraction. */
1649 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1650 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1651 fold_convert (ctype, arg1));
1653 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1654 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1655 overflow) and negate (which can't either). Special-case a result
1656 of zero while we're here. */
1657 if (tree_int_cst_equal (arg0, arg1))
1658 return fold_convert (ctype, integer_zero_node);
1659 else if (tree_int_cst_lt (arg1, arg0))
1660 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1661 else
1662 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1663 fold_convert (ctype, size_binop (MINUS_EXPR,
1664 arg1, arg0)));
1668 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1669 type TYPE. If no simplification can be done return NULL_TREE. */
1671 static tree
1672 fold_convert_const (enum tree_code code ATTRIBUTE_UNUSED, tree type,
1673 tree arg1)
1675 int overflow = 0;
1676 tree t;
1678 if (TREE_TYPE (arg1) == type)
1679 return arg1;
1681 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1683 if (TREE_CODE (arg1) == INTEGER_CST)
1685 /* If we would build a constant wider than GCC supports,
1686 leave the conversion unfolded. */
1687 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1688 return NULL_TREE;
1690 /* If we are trying to make a sizetype for a small integer, use
1691 size_int to pick up cached types to reduce duplicate nodes. */
1692 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1693 && !TREE_CONSTANT_OVERFLOW (arg1)
1694 && compare_tree_int (arg1, 10000) < 0)
1695 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1697 /* Given an integer constant, make new constant with new type,
1698 appropriately sign-extended or truncated. */
1699 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1700 TREE_INT_CST_HIGH (arg1));
1701 TREE_TYPE (t) = type;
1702 /* Indicate an overflow if (1) ARG1 already overflowed,
1703 or (2) force_fit_type indicates an overflow.
1704 Tell force_fit_type that an overflow has already occurred
1705 if ARG1 is a too-large unsigned value and T is signed.
1706 But don't indicate an overflow if converting a pointer. */
1707 TREE_OVERFLOW (t)
1708 = ((force_fit_type (t,
1709 (TREE_INT_CST_HIGH (arg1) < 0
1710 && (TREE_UNSIGNED (type)
1711 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1712 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1713 || TREE_OVERFLOW (arg1));
1714 TREE_CONSTANT_OVERFLOW (t)
1715 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1716 return t;
1718 else if (TREE_CODE (arg1) == REAL_CST)
1720 /* The following code implements the floating point to integer
1721 conversion rules required by the Java Language Specification,
1722 that IEEE NaNs are mapped to zero and values that overflow
1723 the target precision saturate, i.e. values greater than
1724 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1725 are mapped to INT_MIN. These semantics are allowed by the
1726 C and C++ standards that simply state that the behavior of
1727 FP-to-integer conversion is unspecified upon overflow. */
1729 HOST_WIDE_INT high, low;
1731 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1732 /* If x is NaN, return zero and show we have an overflow. */
1733 if (REAL_VALUE_ISNAN (x))
1735 overflow = 1;
1736 high = 0;
1737 low = 0;
1740 /* See if X will be in range after truncation towards 0.
1741 To compensate for truncation, move the bounds away from 0,
1742 but reject if X exactly equals the adjusted bounds. */
1744 if (! overflow)
1746 tree lt = TYPE_MIN_VALUE (type);
1747 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1748 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1749 if (! REAL_VALUES_LESS (l, x))
1751 overflow = 1;
1752 high = TREE_INT_CST_HIGH (lt);
1753 low = TREE_INT_CST_LOW (lt);
1757 if (! overflow)
1759 tree ut = TYPE_MAX_VALUE (type);
1760 if (ut)
1762 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1763 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1764 if (! REAL_VALUES_LESS (x, u))
1766 overflow = 1;
1767 high = TREE_INT_CST_HIGH (ut);
1768 low = TREE_INT_CST_LOW (ut);
1773 if (! overflow)
1774 REAL_VALUE_TO_INT (&low, &high, x);
1776 t = build_int_2 (low, high);
1777 TREE_TYPE (t) = type;
1778 TREE_OVERFLOW (t)
1779 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1780 TREE_CONSTANT_OVERFLOW (t)
1781 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1782 return t;
1785 else if (TREE_CODE (type) == REAL_TYPE)
1787 if (TREE_CODE (arg1) == INTEGER_CST)
1788 return build_real_from_int_cst (type, arg1);
1789 if (TREE_CODE (arg1) == REAL_CST)
1791 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1793 /* We make a copy of ARG1 so that we don't modify an
1794 existing constant tree. */
1795 t = copy_node (arg1);
1796 TREE_TYPE (t) = type;
1797 return t;
1800 t = build_real (type,
1801 real_value_truncate (TYPE_MODE (type),
1802 TREE_REAL_CST (arg1)));
1804 TREE_OVERFLOW (t)
1805 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1806 TREE_CONSTANT_OVERFLOW (t)
1807 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1808 return t;
1811 return NULL_TREE;
1814 /* Convert expression ARG to type TYPE. Used by the middle-end for
1815 simple conversions in preference to calling the front-end's convert. */
1817 static tree
1818 fold_convert (tree type, tree arg)
1820 tree orig = TREE_TYPE (arg);
1821 tree tem;
1823 if (type == orig)
1824 return arg;
1826 if (TREE_CODE (arg) == ERROR_MARK
1827 || TREE_CODE (type) == ERROR_MARK
1828 || TREE_CODE (orig) == ERROR_MARK)
1829 return error_mark_node;
1831 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1832 return fold (build1 (NOP_EXPR, type, arg));
1834 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
1835 || TREE_CODE (type) == OFFSET_TYPE)
1837 if (TREE_CODE (arg) == INTEGER_CST)
1839 tem = fold_convert_const (NOP_EXPR, type, arg);
1840 if (tem != NULL_TREE)
1841 return tem;
1843 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1844 || TREE_CODE (orig) == OFFSET_TYPE)
1845 return fold (build1 (NOP_EXPR, type, arg));
1846 if (TREE_CODE (orig) == COMPLEX_TYPE)
1848 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1849 return fold_convert (type, tem);
1851 if (TREE_CODE (orig) == VECTOR_TYPE
1852 && GET_MODE_SIZE (TYPE_MODE (type))
1853 == GET_MODE_SIZE (TYPE_MODE (orig)))
1854 return fold (build1 (NOP_EXPR, type, arg));
1856 else if (TREE_CODE (type) == REAL_TYPE)
1858 if (TREE_CODE (arg) == INTEGER_CST)
1860 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1861 if (tem != NULL_TREE)
1862 return tem;
1864 else if (TREE_CODE (arg) == REAL_CST)
1866 tem = fold_convert_const (NOP_EXPR, type, arg);
1867 if (tem != NULL_TREE)
1868 return tem;
1871 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1872 return fold (build1 (FLOAT_EXPR, type, arg));
1873 if (TREE_CODE (orig) == REAL_TYPE)
1874 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1875 type, arg));
1876 if (TREE_CODE (orig) == COMPLEX_TYPE)
1878 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1879 return fold_convert (type, tem);
1882 else if (TREE_CODE (type) == COMPLEX_TYPE)
1884 if (INTEGRAL_TYPE_P (orig)
1885 || POINTER_TYPE_P (orig)
1886 || TREE_CODE (orig) == REAL_TYPE)
1887 return build (COMPLEX_EXPR, type,
1888 fold_convert (TREE_TYPE (type), arg),
1889 fold_convert (TREE_TYPE (type), integer_zero_node));
1890 if (TREE_CODE (orig) == COMPLEX_TYPE)
1892 tree rpart, ipart;
1894 if (TREE_CODE (arg) == COMPLEX_EXPR)
1896 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1897 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1898 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1901 arg = save_expr (arg);
1902 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1903 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1904 rpart = fold_convert (TREE_TYPE (type), rpart);
1905 ipart = fold_convert (TREE_TYPE (type), ipart);
1906 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1909 else if (TREE_CODE (type) == VECTOR_TYPE)
1911 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1912 && GET_MODE_SIZE (TYPE_MODE (type))
1913 == GET_MODE_SIZE (TYPE_MODE (orig)))
1914 return fold (build1 (NOP_EXPR, type, arg));
1915 if (TREE_CODE (orig) == VECTOR_TYPE
1916 && GET_MODE_SIZE (TYPE_MODE (type))
1917 == GET_MODE_SIZE (TYPE_MODE (orig)))
1918 return fold (build1 (NOP_EXPR, type, arg));
1920 else if (VOID_TYPE_P (type))
1921 return fold (build1 (CONVERT_EXPR, type, arg));
1922 abort ();
1925 /* Return an expr equal to X but certainly not valid as an lvalue. */
1927 tree
1928 non_lvalue (tree x)
1930 tree result;
1932 /* These things are certainly not lvalues. */
1933 if (TREE_CODE (x) == NON_LVALUE_EXPR
1934 || TREE_CODE (x) == INTEGER_CST
1935 || TREE_CODE (x) == REAL_CST
1936 || TREE_CODE (x) == STRING_CST
1937 || TREE_CODE (x) == ADDR_EXPR)
1938 return x;
1940 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1941 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1942 return result;
1945 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1946 Zero means allow extended lvalues. */
1948 int pedantic_lvalues;
1950 /* When pedantic, return an expr equal to X but certainly not valid as a
1951 pedantic lvalue. Otherwise, return X. */
1953 tree
1954 pedantic_non_lvalue (tree x)
1956 if (pedantic_lvalues)
1957 return non_lvalue (x);
1958 else
1959 return x;
1962 /* Given a tree comparison code, return the code that is the logical inverse
1963 of the given code. It is not safe to do this for floating-point
1964 comparisons, except for NE_EXPR and EQ_EXPR. */
1966 static enum tree_code
1967 invert_tree_comparison (enum tree_code code)
1969 switch (code)
1971 case EQ_EXPR:
1972 return NE_EXPR;
1973 case NE_EXPR:
1974 return EQ_EXPR;
1975 case GT_EXPR:
1976 return LE_EXPR;
1977 case GE_EXPR:
1978 return LT_EXPR;
1979 case LT_EXPR:
1980 return GE_EXPR;
1981 case LE_EXPR:
1982 return GT_EXPR;
1983 default:
1984 abort ();
1988 /* Similar, but return the comparison that results if the operands are
1989 swapped. This is safe for floating-point. */
1991 static enum tree_code
1992 swap_tree_comparison (enum tree_code code)
1994 switch (code)
1996 case EQ_EXPR:
1997 case NE_EXPR:
1998 return code;
1999 case GT_EXPR:
2000 return LT_EXPR;
2001 case GE_EXPR:
2002 return LE_EXPR;
2003 case LT_EXPR:
2004 return GT_EXPR;
2005 case LE_EXPR:
2006 return GE_EXPR;
2007 default:
2008 abort ();
2013 /* Convert a comparison tree code from an enum tree_code representation
2014 into a compcode bit-based encoding. This function is the inverse of
2015 compcode_to_comparison. */
2017 static int
2018 comparison_to_compcode (enum tree_code code)
2020 switch (code)
2022 case LT_EXPR:
2023 return COMPCODE_LT;
2024 case EQ_EXPR:
2025 return COMPCODE_EQ;
2026 case LE_EXPR:
2027 return COMPCODE_LE;
2028 case GT_EXPR:
2029 return COMPCODE_GT;
2030 case NE_EXPR:
2031 return COMPCODE_NE;
2032 case GE_EXPR:
2033 return COMPCODE_GE;
2034 default:
2035 abort ();
2039 /* Convert a compcode bit-based encoding of a comparison operator back
2040 to GCC's enum tree_code representation. This function is the
2041 inverse of comparison_to_compcode. */
2043 static enum tree_code
2044 compcode_to_comparison (int code)
2046 switch (code)
2048 case COMPCODE_LT:
2049 return LT_EXPR;
2050 case COMPCODE_EQ:
2051 return EQ_EXPR;
2052 case COMPCODE_LE:
2053 return LE_EXPR;
2054 case COMPCODE_GT:
2055 return GT_EXPR;
2056 case COMPCODE_NE:
2057 return NE_EXPR;
2058 case COMPCODE_GE:
2059 return GE_EXPR;
2060 default:
2061 abort ();
2065 /* Return nonzero if CODE is a tree code that represents a truth value. */
2067 static int
2068 truth_value_p (enum tree_code code)
2070 return (TREE_CODE_CLASS (code) == '<'
2071 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2072 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2073 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2076 /* Return nonzero if two operands (typically of the same tree node)
2077 are necessarily equal. If either argument has side-effects this
2078 function returns zero.
2080 If ONLY_CONST is nonzero, only return nonzero for constants.
2081 This function tests whether the operands are indistinguishable;
2082 it does not test whether they are equal using C's == operation.
2083 The distinction is important for IEEE floating point, because
2084 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2085 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2087 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
2088 even though it may hold multiple values during a function.
2089 This is because a GCC tree node guarantees that nothing else is
2090 executed between the evaluation of its "operands" (which may often
2091 be evaluated in arbitrary order). Hence if the operands themselves
2092 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2093 same value in each operand/subexpression. Hence a zero value for
2094 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2095 If comparing arbitrary expression trees, such as from different
2096 statements, ONLY_CONST must usually be nonzero. */
2099 operand_equal_p (tree arg0, tree arg1, int only_const)
2101 tree fndecl;
2103 /* If both types don't have the same signedness, then we can't consider
2104 them equal. We must check this before the STRIP_NOPS calls
2105 because they may change the signedness of the arguments. */
2106 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2107 return 0;
2109 STRIP_NOPS (arg0);
2110 STRIP_NOPS (arg1);
2112 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2113 /* This is needed for conversions and for COMPONENT_REF.
2114 Might as well play it safe and always test this. */
2115 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2116 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2117 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2118 return 0;
2120 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2121 We don't care about side effects in that case because the SAVE_EXPR
2122 takes care of that for us. In all other cases, two expressions are
2123 equal if they have no side effects. If we have two identical
2124 expressions with side effects that should be treated the same due
2125 to the only side effects being identical SAVE_EXPR's, that will
2126 be detected in the recursive calls below. */
2127 if (arg0 == arg1 && ! only_const
2128 && (TREE_CODE (arg0) == SAVE_EXPR
2129 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2130 return 1;
2132 /* Next handle constant cases, those for which we can return 1 even
2133 if ONLY_CONST is set. */
2134 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2135 switch (TREE_CODE (arg0))
2137 case INTEGER_CST:
2138 return (! TREE_CONSTANT_OVERFLOW (arg0)
2139 && ! TREE_CONSTANT_OVERFLOW (arg1)
2140 && tree_int_cst_equal (arg0, arg1));
2142 case REAL_CST:
2143 return (! TREE_CONSTANT_OVERFLOW (arg0)
2144 && ! TREE_CONSTANT_OVERFLOW (arg1)
2145 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2146 TREE_REAL_CST (arg1)));
2148 case VECTOR_CST:
2150 tree v1, v2;
2152 if (TREE_CONSTANT_OVERFLOW (arg0)
2153 || TREE_CONSTANT_OVERFLOW (arg1))
2154 return 0;
2156 v1 = TREE_VECTOR_CST_ELTS (arg0);
2157 v2 = TREE_VECTOR_CST_ELTS (arg1);
2158 while (v1 && v2)
2160 if (!operand_equal_p (v1, v2, only_const))
2161 return 0;
2162 v1 = TREE_CHAIN (v1);
2163 v2 = TREE_CHAIN (v2);
2166 return 1;
2169 case COMPLEX_CST:
2170 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2171 only_const)
2172 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2173 only_const));
2175 case STRING_CST:
2176 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2177 && ! memcmp (TREE_STRING_POINTER (arg0),
2178 TREE_STRING_POINTER (arg1),
2179 TREE_STRING_LENGTH (arg0)));
2181 case ADDR_EXPR:
2182 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2184 default:
2185 break;
2188 if (only_const)
2189 return 0;
2191 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2193 case '1':
2194 /* Two conversions are equal only if signedness and modes match. */
2195 switch (TREE_CODE (arg0))
2197 case NOP_EXPR:
2198 case CONVERT_EXPR:
2199 case FIX_CEIL_EXPR:
2200 case FIX_TRUNC_EXPR:
2201 case FIX_FLOOR_EXPR:
2202 case FIX_ROUND_EXPR:
2203 if (TREE_UNSIGNED (TREE_TYPE (arg0))
2204 != TREE_UNSIGNED (TREE_TYPE (arg1)))
2205 return 0;
2206 break;
2207 default:
2208 break;
2211 return operand_equal_p (TREE_OPERAND (arg0, 0),
2212 TREE_OPERAND (arg1, 0), 0);
2214 case '<':
2215 case '2':
2216 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2217 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2219 return 1;
2221 /* For commutative ops, allow the other order. */
2222 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
2223 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
2224 || TREE_CODE (arg0) == BIT_IOR_EXPR
2225 || TREE_CODE (arg0) == BIT_XOR_EXPR
2226 || TREE_CODE (arg0) == BIT_AND_EXPR
2227 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
2228 && operand_equal_p (TREE_OPERAND (arg0, 0),
2229 TREE_OPERAND (arg1, 1), 0)
2230 && operand_equal_p (TREE_OPERAND (arg0, 1),
2231 TREE_OPERAND (arg1, 0), 0));
2233 case 'r':
2234 /* If either of the pointer (or reference) expressions we are
2235 dereferencing contain a side effect, these cannot be equal. */
2236 if (TREE_SIDE_EFFECTS (arg0)
2237 || TREE_SIDE_EFFECTS (arg1))
2238 return 0;
2240 switch (TREE_CODE (arg0))
2242 case INDIRECT_REF:
2243 return operand_equal_p (TREE_OPERAND (arg0, 0),
2244 TREE_OPERAND (arg1, 0), 0);
2246 case COMPONENT_REF:
2247 case ARRAY_REF:
2248 case ARRAY_RANGE_REF:
2249 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2250 TREE_OPERAND (arg1, 0), 0)
2251 && operand_equal_p (TREE_OPERAND (arg0, 1),
2252 TREE_OPERAND (arg1, 1), 0));
2254 case BIT_FIELD_REF:
2255 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2256 TREE_OPERAND (arg1, 0), 0)
2257 && operand_equal_p (TREE_OPERAND (arg0, 1),
2258 TREE_OPERAND (arg1, 1), 0)
2259 && operand_equal_p (TREE_OPERAND (arg0, 2),
2260 TREE_OPERAND (arg1, 2), 0));
2261 default:
2262 return 0;
2265 case 'e':
2266 switch (TREE_CODE (arg0))
2268 case ADDR_EXPR:
2269 case TRUTH_NOT_EXPR:
2270 return operand_equal_p (TREE_OPERAND (arg0, 0),
2271 TREE_OPERAND (arg1, 0), 0);
2273 case RTL_EXPR:
2274 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2276 case CALL_EXPR:
2277 /* If the CALL_EXPRs call different functions, then they
2278 clearly can not be equal. */
2279 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2280 TREE_OPERAND (arg1, 0), 0))
2281 return 0;
2283 /* Only consider const functions equivalent. */
2284 fndecl = get_callee_fndecl (arg0);
2285 if (fndecl == NULL_TREE
2286 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2287 return 0;
2289 /* Now see if all the arguments are the same. operand_equal_p
2290 does not handle TREE_LIST, so we walk the operands here
2291 feeding them to operand_equal_p. */
2292 arg0 = TREE_OPERAND (arg0, 1);
2293 arg1 = TREE_OPERAND (arg1, 1);
2294 while (arg0 && arg1)
2296 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2297 return 0;
2299 arg0 = TREE_CHAIN (arg0);
2300 arg1 = TREE_CHAIN (arg1);
2303 /* If we get here and both argument lists are exhausted
2304 then the CALL_EXPRs are equal. */
2305 return ! (arg0 || arg1);
2307 default:
2308 return 0;
2311 case 'd':
2312 /* Consider __builtin_sqrt equal to sqrt. */
2313 return TREE_CODE (arg0) == FUNCTION_DECL
2314 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2315 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2316 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2318 default:
2319 return 0;
2323 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2324 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2326 When in doubt, return 0. */
2328 static int
2329 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2331 int unsignedp1, unsignedpo;
2332 tree primarg0, primarg1, primother;
2333 unsigned int correct_width;
2335 if (operand_equal_p (arg0, arg1, 0))
2336 return 1;
2338 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2339 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2340 return 0;
2342 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2343 and see if the inner values are the same. This removes any
2344 signedness comparison, which doesn't matter here. */
2345 primarg0 = arg0, primarg1 = arg1;
2346 STRIP_NOPS (primarg0);
2347 STRIP_NOPS (primarg1);
2348 if (operand_equal_p (primarg0, primarg1, 0))
2349 return 1;
2351 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2352 actual comparison operand, ARG0.
2354 First throw away any conversions to wider types
2355 already present in the operands. */
2357 primarg1 = get_narrower (arg1, &unsignedp1);
2358 primother = get_narrower (other, &unsignedpo);
2360 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2361 if (unsignedp1 == unsignedpo
2362 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2363 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2365 tree type = TREE_TYPE (arg0);
2367 /* Make sure shorter operand is extended the right way
2368 to match the longer operand. */
2369 primarg1 = fold_convert ((*lang_hooks.types.signed_or_unsigned_type)
2370 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2372 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2373 return 1;
2376 return 0;
2379 /* See if ARG is an expression that is either a comparison or is performing
2380 arithmetic on comparisons. The comparisons must only be comparing
2381 two different values, which will be stored in *CVAL1 and *CVAL2; if
2382 they are nonzero it means that some operands have already been found.
2383 No variables may be used anywhere else in the expression except in the
2384 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2385 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2387 If this is true, return 1. Otherwise, return zero. */
2389 static int
2390 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2392 enum tree_code code = TREE_CODE (arg);
2393 char class = TREE_CODE_CLASS (code);
2395 /* We can handle some of the 'e' cases here. */
2396 if (class == 'e' && code == TRUTH_NOT_EXPR)
2397 class = '1';
2398 else if (class == 'e'
2399 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2400 || code == COMPOUND_EXPR))
2401 class = '2';
2403 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2404 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2406 /* If we've already found a CVAL1 or CVAL2, this expression is
2407 two complex to handle. */
2408 if (*cval1 || *cval2)
2409 return 0;
2411 class = '1';
2412 *save_p = 1;
2415 switch (class)
2417 case '1':
2418 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2420 case '2':
2421 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2422 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2423 cval1, cval2, save_p));
2425 case 'c':
2426 return 1;
2428 case 'e':
2429 if (code == COND_EXPR)
2430 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2431 cval1, cval2, save_p)
2432 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2433 cval1, cval2, save_p)
2434 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2435 cval1, cval2, save_p));
2436 return 0;
2438 case '<':
2439 /* First see if we can handle the first operand, then the second. For
2440 the second operand, we know *CVAL1 can't be zero. It must be that
2441 one side of the comparison is each of the values; test for the
2442 case where this isn't true by failing if the two operands
2443 are the same. */
2445 if (operand_equal_p (TREE_OPERAND (arg, 0),
2446 TREE_OPERAND (arg, 1), 0))
2447 return 0;
2449 if (*cval1 == 0)
2450 *cval1 = TREE_OPERAND (arg, 0);
2451 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2453 else if (*cval2 == 0)
2454 *cval2 = TREE_OPERAND (arg, 0);
2455 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2457 else
2458 return 0;
2460 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2462 else if (*cval2 == 0)
2463 *cval2 = TREE_OPERAND (arg, 1);
2464 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2466 else
2467 return 0;
2469 return 1;
2471 default:
2472 return 0;
2476 /* ARG is a tree that is known to contain just arithmetic operations and
2477 comparisons. Evaluate the operations in the tree substituting NEW0 for
2478 any occurrence of OLD0 as an operand of a comparison and likewise for
2479 NEW1 and OLD1. */
2481 static tree
2482 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2484 tree type = TREE_TYPE (arg);
2485 enum tree_code code = TREE_CODE (arg);
2486 char class = TREE_CODE_CLASS (code);
2488 /* We can handle some of the 'e' cases here. */
2489 if (class == 'e' && code == TRUTH_NOT_EXPR)
2490 class = '1';
2491 else if (class == 'e'
2492 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2493 class = '2';
2495 switch (class)
2497 case '1':
2498 return fold (build1 (code, type,
2499 eval_subst (TREE_OPERAND (arg, 0),
2500 old0, new0, old1, new1)));
2502 case '2':
2503 return fold (build (code, type,
2504 eval_subst (TREE_OPERAND (arg, 0),
2505 old0, new0, old1, new1),
2506 eval_subst (TREE_OPERAND (arg, 1),
2507 old0, new0, old1, new1)));
2509 case 'e':
2510 switch (code)
2512 case SAVE_EXPR:
2513 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2515 case COMPOUND_EXPR:
2516 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2518 case COND_EXPR:
2519 return fold (build (code, type,
2520 eval_subst (TREE_OPERAND (arg, 0),
2521 old0, new0, old1, new1),
2522 eval_subst (TREE_OPERAND (arg, 1),
2523 old0, new0, old1, new1),
2524 eval_subst (TREE_OPERAND (arg, 2),
2525 old0, new0, old1, new1)));
2526 default:
2527 break;
2529 /* Fall through - ??? */
2531 case '<':
2533 tree arg0 = TREE_OPERAND (arg, 0);
2534 tree arg1 = TREE_OPERAND (arg, 1);
2536 /* We need to check both for exact equality and tree equality. The
2537 former will be true if the operand has a side-effect. In that
2538 case, we know the operand occurred exactly once. */
2540 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2541 arg0 = new0;
2542 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2543 arg0 = new1;
2545 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2546 arg1 = new0;
2547 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2548 arg1 = new1;
2550 return fold (build (code, type, arg0, arg1));
2553 default:
2554 return arg;
2558 /* Return a tree for the case when the result of an expression is RESULT
2559 converted to TYPE and OMITTED was previously an operand of the expression
2560 but is now not needed (e.g., we folded OMITTED * 0).
2562 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2563 the conversion of RESULT to TYPE. */
2565 tree
2566 omit_one_operand (tree type, tree result, tree omitted)
2568 tree t = fold_convert (type, result);
2570 if (TREE_SIDE_EFFECTS (omitted))
2571 return build (COMPOUND_EXPR, type, omitted, t);
2573 return non_lvalue (t);
2576 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2578 static tree
2579 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2581 tree t = fold_convert (type, result);
2583 if (TREE_SIDE_EFFECTS (omitted))
2584 return build (COMPOUND_EXPR, type, omitted, t);
2586 return pedantic_non_lvalue (t);
2589 /* Return a simplified tree node for the truth-negation of ARG. This
2590 never alters ARG itself. We assume that ARG is an operation that
2591 returns a truth value (0 or 1). */
2593 tree
2594 invert_truthvalue (tree arg)
2596 tree type = TREE_TYPE (arg);
2597 enum tree_code code = TREE_CODE (arg);
2599 if (code == ERROR_MARK)
2600 return arg;
2602 /* If this is a comparison, we can simply invert it, except for
2603 floating-point non-equality comparisons, in which case we just
2604 enclose a TRUTH_NOT_EXPR around what we have. */
2606 if (TREE_CODE_CLASS (code) == '<')
2608 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2609 && !flag_unsafe_math_optimizations
2610 && code != NE_EXPR
2611 && code != EQ_EXPR)
2612 return build1 (TRUTH_NOT_EXPR, type, arg);
2613 else
2614 return build (invert_tree_comparison (code), type,
2615 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2618 switch (code)
2620 case INTEGER_CST:
2621 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2623 case TRUTH_AND_EXPR:
2624 return build (TRUTH_OR_EXPR, type,
2625 invert_truthvalue (TREE_OPERAND (arg, 0)),
2626 invert_truthvalue (TREE_OPERAND (arg, 1)));
2628 case TRUTH_OR_EXPR:
2629 return build (TRUTH_AND_EXPR, type,
2630 invert_truthvalue (TREE_OPERAND (arg, 0)),
2631 invert_truthvalue (TREE_OPERAND (arg, 1)));
2633 case TRUTH_XOR_EXPR:
2634 /* Here we can invert either operand. We invert the first operand
2635 unless the second operand is a TRUTH_NOT_EXPR in which case our
2636 result is the XOR of the first operand with the inside of the
2637 negation of the second operand. */
2639 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2640 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2641 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2642 else
2643 return build (TRUTH_XOR_EXPR, type,
2644 invert_truthvalue (TREE_OPERAND (arg, 0)),
2645 TREE_OPERAND (arg, 1));
2647 case TRUTH_ANDIF_EXPR:
2648 return build (TRUTH_ORIF_EXPR, type,
2649 invert_truthvalue (TREE_OPERAND (arg, 0)),
2650 invert_truthvalue (TREE_OPERAND (arg, 1)));
2652 case TRUTH_ORIF_EXPR:
2653 return build (TRUTH_ANDIF_EXPR, type,
2654 invert_truthvalue (TREE_OPERAND (arg, 0)),
2655 invert_truthvalue (TREE_OPERAND (arg, 1)));
2657 case TRUTH_NOT_EXPR:
2658 return TREE_OPERAND (arg, 0);
2660 case COND_EXPR:
2661 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2662 invert_truthvalue (TREE_OPERAND (arg, 1)),
2663 invert_truthvalue (TREE_OPERAND (arg, 2)));
2665 case COMPOUND_EXPR:
2666 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2667 invert_truthvalue (TREE_OPERAND (arg, 1)));
2669 case WITH_RECORD_EXPR:
2670 return build (WITH_RECORD_EXPR, type,
2671 invert_truthvalue (TREE_OPERAND (arg, 0)),
2672 TREE_OPERAND (arg, 1));
2674 case NON_LVALUE_EXPR:
2675 return invert_truthvalue (TREE_OPERAND (arg, 0));
2677 case NOP_EXPR:
2678 case CONVERT_EXPR:
2679 case FLOAT_EXPR:
2680 return build1 (TREE_CODE (arg), type,
2681 invert_truthvalue (TREE_OPERAND (arg, 0)));
2683 case BIT_AND_EXPR:
2684 if (!integer_onep (TREE_OPERAND (arg, 1)))
2685 break;
2686 return build (EQ_EXPR, type, arg,
2687 fold_convert (type, integer_zero_node));
2689 case SAVE_EXPR:
2690 return build1 (TRUTH_NOT_EXPR, type, arg);
2692 case CLEANUP_POINT_EXPR:
2693 return build1 (CLEANUP_POINT_EXPR, type,
2694 invert_truthvalue (TREE_OPERAND (arg, 0)));
2696 default:
2697 break;
2699 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2700 abort ();
2701 return build1 (TRUTH_NOT_EXPR, type, arg);
2704 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2705 operands are another bit-wise operation with a common input. If so,
2706 distribute the bit operations to save an operation and possibly two if
2707 constants are involved. For example, convert
2708 (A | B) & (A | C) into A | (B & C)
2709 Further simplification will occur if B and C are constants.
2711 If this optimization cannot be done, 0 will be returned. */
2713 static tree
2714 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2716 tree common;
2717 tree left, right;
2719 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2720 || TREE_CODE (arg0) == code
2721 || (TREE_CODE (arg0) != BIT_AND_EXPR
2722 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2723 return 0;
2725 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2727 common = TREE_OPERAND (arg0, 0);
2728 left = TREE_OPERAND (arg0, 1);
2729 right = TREE_OPERAND (arg1, 1);
2731 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2733 common = TREE_OPERAND (arg0, 0);
2734 left = TREE_OPERAND (arg0, 1);
2735 right = TREE_OPERAND (arg1, 0);
2737 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2739 common = TREE_OPERAND (arg0, 1);
2740 left = TREE_OPERAND (arg0, 0);
2741 right = TREE_OPERAND (arg1, 1);
2743 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2745 common = TREE_OPERAND (arg0, 1);
2746 left = TREE_OPERAND (arg0, 0);
2747 right = TREE_OPERAND (arg1, 0);
2749 else
2750 return 0;
2752 return fold (build (TREE_CODE (arg0), type, common,
2753 fold (build (code, type, left, right))));
2756 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2757 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2759 static tree
2760 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2761 int unsignedp)
2763 tree result = build (BIT_FIELD_REF, type, inner,
2764 size_int (bitsize), bitsize_int (bitpos));
2766 TREE_UNSIGNED (result) = unsignedp;
2768 return result;
2771 /* Optimize a bit-field compare.
2773 There are two cases: First is a compare against a constant and the
2774 second is a comparison of two items where the fields are at the same
2775 bit position relative to the start of a chunk (byte, halfword, word)
2776 large enough to contain it. In these cases we can avoid the shift
2777 implicit in bitfield extractions.
2779 For constants, we emit a compare of the shifted constant with the
2780 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2781 compared. For two fields at the same position, we do the ANDs with the
2782 similar mask and compare the result of the ANDs.
2784 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2785 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2786 are the left and right operands of the comparison, respectively.
2788 If the optimization described above can be done, we return the resulting
2789 tree. Otherwise we return zero. */
2791 static tree
2792 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2793 tree lhs, tree rhs)
2795 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2796 tree type = TREE_TYPE (lhs);
2797 tree signed_type, unsigned_type;
2798 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2799 enum machine_mode lmode, rmode, nmode;
2800 int lunsignedp, runsignedp;
2801 int lvolatilep = 0, rvolatilep = 0;
2802 tree linner, rinner = NULL_TREE;
2803 tree mask;
2804 tree offset;
2806 /* Get all the information about the extractions being done. If the bit size
2807 if the same as the size of the underlying object, we aren't doing an
2808 extraction at all and so can do nothing. We also don't want to
2809 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2810 then will no longer be able to replace it. */
2811 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2812 &lunsignedp, &lvolatilep);
2813 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2814 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2815 return 0;
2817 if (!const_p)
2819 /* If this is not a constant, we can only do something if bit positions,
2820 sizes, and signedness are the same. */
2821 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2822 &runsignedp, &rvolatilep);
2824 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2825 || lunsignedp != runsignedp || offset != 0
2826 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2827 return 0;
2830 /* See if we can find a mode to refer to this field. We should be able to,
2831 but fail if we can't. */
2832 nmode = get_best_mode (lbitsize, lbitpos,
2833 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2834 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2835 TYPE_ALIGN (TREE_TYPE (rinner))),
2836 word_mode, lvolatilep || rvolatilep);
2837 if (nmode == VOIDmode)
2838 return 0;
2840 /* Set signed and unsigned types of the precision of this mode for the
2841 shifts below. */
2842 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2843 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2845 /* Compute the bit position and size for the new reference and our offset
2846 within it. If the new reference is the same size as the original, we
2847 won't optimize anything, so return zero. */
2848 nbitsize = GET_MODE_BITSIZE (nmode);
2849 nbitpos = lbitpos & ~ (nbitsize - 1);
2850 lbitpos -= nbitpos;
2851 if (nbitsize == lbitsize)
2852 return 0;
2854 if (BYTES_BIG_ENDIAN)
2855 lbitpos = nbitsize - lbitsize - lbitpos;
2857 /* Make the mask to be used against the extracted field. */
2858 mask = build_int_2 (~0, ~0);
2859 TREE_TYPE (mask) = unsigned_type;
2860 force_fit_type (mask, 0);
2861 mask = fold_convert (unsigned_type, mask);
2862 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2863 mask = const_binop (RSHIFT_EXPR, mask,
2864 size_int (nbitsize - lbitsize - lbitpos), 0);
2866 if (! const_p)
2867 /* If not comparing with constant, just rework the comparison
2868 and return. */
2869 return build (code, compare_type,
2870 build (BIT_AND_EXPR, unsigned_type,
2871 make_bit_field_ref (linner, unsigned_type,
2872 nbitsize, nbitpos, 1),
2873 mask),
2874 build (BIT_AND_EXPR, unsigned_type,
2875 make_bit_field_ref (rinner, unsigned_type,
2876 nbitsize, nbitpos, 1),
2877 mask));
2879 /* Otherwise, we are handling the constant case. See if the constant is too
2880 big for the field. Warn and return a tree of for 0 (false) if so. We do
2881 this not only for its own sake, but to avoid having to test for this
2882 error case below. If we didn't, we might generate wrong code.
2884 For unsigned fields, the constant shifted right by the field length should
2885 be all zero. For signed fields, the high-order bits should agree with
2886 the sign bit. */
2888 if (lunsignedp)
2890 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2891 fold_convert (unsigned_type, rhs),
2892 size_int (lbitsize), 0)))
2894 warning ("comparison is always %d due to width of bit-field",
2895 code == NE_EXPR);
2896 return fold_convert (compare_type,
2897 (code == NE_EXPR
2898 ? integer_one_node : integer_zero_node));
2901 else
2903 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
2904 size_int (lbitsize - 1), 0);
2905 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2907 warning ("comparison is always %d due to width of bit-field",
2908 code == NE_EXPR);
2909 return fold_convert (compare_type,
2910 (code == NE_EXPR
2911 ? integer_one_node : integer_zero_node));
2915 /* Single-bit compares should always be against zero. */
2916 if (lbitsize == 1 && ! integer_zerop (rhs))
2918 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2919 rhs = fold_convert (type, integer_zero_node);
2922 /* Make a new bitfield reference, shift the constant over the
2923 appropriate number of bits and mask it with the computed mask
2924 (in case this was a signed field). If we changed it, make a new one. */
2925 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2926 if (lvolatilep)
2928 TREE_SIDE_EFFECTS (lhs) = 1;
2929 TREE_THIS_VOLATILE (lhs) = 1;
2932 rhs = fold (const_binop (BIT_AND_EXPR,
2933 const_binop (LSHIFT_EXPR,
2934 fold_convert (unsigned_type, rhs),
2935 size_int (lbitpos), 0),
2936 mask, 0));
2938 return build (code, compare_type,
2939 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2940 rhs);
2943 /* Subroutine for fold_truthop: decode a field reference.
2945 If EXP is a comparison reference, we return the innermost reference.
2947 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2948 set to the starting bit number.
2950 If the innermost field can be completely contained in a mode-sized
2951 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2953 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2954 otherwise it is not changed.
2956 *PUNSIGNEDP is set to the signedness of the field.
2958 *PMASK is set to the mask used. This is either contained in a
2959 BIT_AND_EXPR or derived from the width of the field.
2961 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2963 Return 0 if this is not a component reference or is one that we can't
2964 do anything with. */
2966 static tree
2967 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
2968 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
2969 int *punsignedp, int *pvolatilep,
2970 tree *pmask, tree *pand_mask)
2972 tree outer_type = 0;
2973 tree and_mask = 0;
2974 tree mask, inner, offset;
2975 tree unsigned_type;
2976 unsigned int precision;
2978 /* All the optimizations using this function assume integer fields.
2979 There are problems with FP fields since the type_for_size call
2980 below can fail for, e.g., XFmode. */
2981 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2982 return 0;
2984 /* We are interested in the bare arrangement of bits, so strip everything
2985 that doesn't affect the machine mode. However, record the type of the
2986 outermost expression if it may matter below. */
2987 if (TREE_CODE (exp) == NOP_EXPR
2988 || TREE_CODE (exp) == CONVERT_EXPR
2989 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2990 outer_type = TREE_TYPE (exp);
2991 STRIP_NOPS (exp);
2993 if (TREE_CODE (exp) == BIT_AND_EXPR)
2995 and_mask = TREE_OPERAND (exp, 1);
2996 exp = TREE_OPERAND (exp, 0);
2997 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2998 if (TREE_CODE (and_mask) != INTEGER_CST)
2999 return 0;
3002 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3003 punsignedp, pvolatilep);
3004 if ((inner == exp && and_mask == 0)
3005 || *pbitsize < 0 || offset != 0
3006 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3007 return 0;
3009 /* If the number of bits in the reference is the same as the bitsize of
3010 the outer type, then the outer type gives the signedness. Otherwise
3011 (in case of a small bitfield) the signedness is unchanged. */
3012 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3013 *punsignedp = TREE_UNSIGNED (outer_type);
3015 /* Compute the mask to access the bitfield. */
3016 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
3017 precision = TYPE_PRECISION (unsigned_type);
3019 mask = build_int_2 (~0, ~0);
3020 TREE_TYPE (mask) = unsigned_type;
3021 force_fit_type (mask, 0);
3022 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3023 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3025 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3026 if (and_mask != 0)
3027 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3028 fold_convert (unsigned_type, and_mask), mask));
3030 *pmask = mask;
3031 *pand_mask = and_mask;
3032 return inner;
3035 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3036 bit positions. */
3038 static int
3039 all_ones_mask_p (tree mask, int size)
3041 tree type = TREE_TYPE (mask);
3042 unsigned int precision = TYPE_PRECISION (type);
3043 tree tmask;
3045 tmask = build_int_2 (~0, ~0);
3046 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
3047 force_fit_type (tmask, 0);
3048 return
3049 tree_int_cst_equal (mask,
3050 const_binop (RSHIFT_EXPR,
3051 const_binop (LSHIFT_EXPR, tmask,
3052 size_int (precision - size),
3054 size_int (precision - size), 0));
3057 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3058 represents the sign bit of EXP's type. If EXP represents a sign
3059 or zero extension, also test VAL against the unextended type.
3060 The return value is the (sub)expression whose sign bit is VAL,
3061 or NULL_TREE otherwise. */
3063 static tree
3064 sign_bit_p (tree exp, tree val)
3066 unsigned HOST_WIDE_INT mask_lo, lo;
3067 HOST_WIDE_INT mask_hi, hi;
3068 int width;
3069 tree t;
3071 /* Tree EXP must have an integral type. */
3072 t = TREE_TYPE (exp);
3073 if (! INTEGRAL_TYPE_P (t))
3074 return NULL_TREE;
3076 /* Tree VAL must be an integer constant. */
3077 if (TREE_CODE (val) != INTEGER_CST
3078 || TREE_CONSTANT_OVERFLOW (val))
3079 return NULL_TREE;
3081 width = TYPE_PRECISION (t);
3082 if (width > HOST_BITS_PER_WIDE_INT)
3084 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3085 lo = 0;
3087 mask_hi = ((unsigned HOST_WIDE_INT) -1
3088 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3089 mask_lo = -1;
3091 else
3093 hi = 0;
3094 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3096 mask_hi = 0;
3097 mask_lo = ((unsigned HOST_WIDE_INT) -1
3098 >> (HOST_BITS_PER_WIDE_INT - width));
3101 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3102 treat VAL as if it were unsigned. */
3103 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3104 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3105 return exp;
3107 /* Handle extension from a narrower type. */
3108 if (TREE_CODE (exp) == NOP_EXPR
3109 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3110 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3112 return NULL_TREE;
3115 /* Subroutine for fold_truthop: determine if an operand is simple enough
3116 to be evaluated unconditionally. */
3118 static int
3119 simple_operand_p (tree exp)
3121 /* Strip any conversions that don't change the machine mode. */
3122 while ((TREE_CODE (exp) == NOP_EXPR
3123 || TREE_CODE (exp) == CONVERT_EXPR)
3124 && (TYPE_MODE (TREE_TYPE (exp))
3125 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3126 exp = TREE_OPERAND (exp, 0);
3128 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3129 || (DECL_P (exp)
3130 && ! TREE_ADDRESSABLE (exp)
3131 && ! TREE_THIS_VOLATILE (exp)
3132 && ! DECL_NONLOCAL (exp)
3133 /* Don't regard global variables as simple. They may be
3134 allocated in ways unknown to the compiler (shared memory,
3135 #pragma weak, etc). */
3136 && ! TREE_PUBLIC (exp)
3137 && ! DECL_EXTERNAL (exp)
3138 /* Loading a static variable is unduly expensive, but global
3139 registers aren't expensive. */
3140 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3143 /* The following functions are subroutines to fold_range_test and allow it to
3144 try to change a logical combination of comparisons into a range test.
3146 For example, both
3147 X == 2 || X == 3 || X == 4 || X == 5
3149 X >= 2 && X <= 5
3150 are converted to
3151 (unsigned) (X - 2) <= 3
3153 We describe each set of comparisons as being either inside or outside
3154 a range, using a variable named like IN_P, and then describe the
3155 range with a lower and upper bound. If one of the bounds is omitted,
3156 it represents either the highest or lowest value of the type.
3158 In the comments below, we represent a range by two numbers in brackets
3159 preceded by a "+" to designate being inside that range, or a "-" to
3160 designate being outside that range, so the condition can be inverted by
3161 flipping the prefix. An omitted bound is represented by a "-". For
3162 example, "- [-, 10]" means being outside the range starting at the lowest
3163 possible value and ending at 10, in other words, being greater than 10.
3164 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3165 always false.
3167 We set up things so that the missing bounds are handled in a consistent
3168 manner so neither a missing bound nor "true" and "false" need to be
3169 handled using a special case. */
3171 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3172 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3173 and UPPER1_P are nonzero if the respective argument is an upper bound
3174 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3175 must be specified for a comparison. ARG1 will be converted to ARG0's
3176 type if both are specified. */
3178 static tree
3179 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3180 tree arg1, int upper1_p)
3182 tree tem;
3183 int result;
3184 int sgn0, sgn1;
3186 /* If neither arg represents infinity, do the normal operation.
3187 Else, if not a comparison, return infinity. Else handle the special
3188 comparison rules. Note that most of the cases below won't occur, but
3189 are handled for consistency. */
3191 if (arg0 != 0 && arg1 != 0)
3193 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3194 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3195 STRIP_NOPS (tem);
3196 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3199 if (TREE_CODE_CLASS (code) != '<')
3200 return 0;
3202 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3203 for neither. In real maths, we cannot assume open ended ranges are
3204 the same. But, this is computer arithmetic, where numbers are finite.
3205 We can therefore make the transformation of any unbounded range with
3206 the value Z, Z being greater than any representable number. This permits
3207 us to treat unbounded ranges as equal. */
3208 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3209 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3210 switch (code)
3212 case EQ_EXPR:
3213 result = sgn0 == sgn1;
3214 break;
3215 case NE_EXPR:
3216 result = sgn0 != sgn1;
3217 break;
3218 case LT_EXPR:
3219 result = sgn0 < sgn1;
3220 break;
3221 case LE_EXPR:
3222 result = sgn0 <= sgn1;
3223 break;
3224 case GT_EXPR:
3225 result = sgn0 > sgn1;
3226 break;
3227 case GE_EXPR:
3228 result = sgn0 >= sgn1;
3229 break;
3230 default:
3231 abort ();
3234 return fold_convert (type, result ? integer_one_node : integer_zero_node);
3237 /* Given EXP, a logical expression, set the range it is testing into
3238 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3239 actually being tested. *PLOW and *PHIGH will be made of the same type
3240 as the returned expression. If EXP is not a comparison, we will most
3241 likely not be returning a useful value and range. */
3243 static tree
3244 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3246 enum tree_code code;
3247 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3248 tree orig_type = NULL_TREE;
3249 int in_p, n_in_p;
3250 tree low, high, n_low, n_high;
3252 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3253 and see if we can refine the range. Some of the cases below may not
3254 happen, but it doesn't seem worth worrying about this. We "continue"
3255 the outer loop when we've changed something; otherwise we "break"
3256 the switch, which will "break" the while. */
3258 in_p = 0;
3259 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3261 while (1)
3263 code = TREE_CODE (exp);
3265 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3267 if (first_rtl_op (code) > 0)
3268 arg0 = TREE_OPERAND (exp, 0);
3269 if (TREE_CODE_CLASS (code) == '<'
3270 || TREE_CODE_CLASS (code) == '1'
3271 || TREE_CODE_CLASS (code) == '2')
3272 type = TREE_TYPE (arg0);
3273 if (TREE_CODE_CLASS (code) == '2'
3274 || TREE_CODE_CLASS (code) == '<'
3275 || (TREE_CODE_CLASS (code) == 'e'
3276 && TREE_CODE_LENGTH (code) > 1))
3277 arg1 = TREE_OPERAND (exp, 1);
3280 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3281 lose a cast by accident. */
3282 if (type != NULL_TREE && orig_type == NULL_TREE)
3283 orig_type = type;
3285 switch (code)
3287 case TRUTH_NOT_EXPR:
3288 in_p = ! in_p, exp = arg0;
3289 continue;
3291 case EQ_EXPR: case NE_EXPR:
3292 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3293 /* We can only do something if the range is testing for zero
3294 and if the second operand is an integer constant. Note that
3295 saying something is "in" the range we make is done by
3296 complementing IN_P since it will set in the initial case of
3297 being not equal to zero; "out" is leaving it alone. */
3298 if (low == 0 || high == 0
3299 || ! integer_zerop (low) || ! integer_zerop (high)
3300 || TREE_CODE (arg1) != INTEGER_CST)
3301 break;
3303 switch (code)
3305 case NE_EXPR: /* - [c, c] */
3306 low = high = arg1;
3307 break;
3308 case EQ_EXPR: /* + [c, c] */
3309 in_p = ! in_p, low = high = arg1;
3310 break;
3311 case GT_EXPR: /* - [-, c] */
3312 low = 0, high = arg1;
3313 break;
3314 case GE_EXPR: /* + [c, -] */
3315 in_p = ! in_p, low = arg1, high = 0;
3316 break;
3317 case LT_EXPR: /* - [c, -] */
3318 low = arg1, high = 0;
3319 break;
3320 case LE_EXPR: /* + [-, c] */
3321 in_p = ! in_p, low = 0, high = arg1;
3322 break;
3323 default:
3324 abort ();
3327 exp = arg0;
3329 /* If this is an unsigned comparison, we also know that EXP is
3330 greater than or equal to zero. We base the range tests we make
3331 on that fact, so we record it here so we can parse existing
3332 range tests. */
3333 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3335 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3336 1, fold_convert (type, integer_zero_node),
3337 NULL_TREE))
3338 break;
3340 in_p = n_in_p, low = n_low, high = n_high;
3342 /* If the high bound is missing, but we have a nonzero low
3343 bound, reverse the range so it goes from zero to the low bound
3344 minus 1. */
3345 if (high == 0 && low && ! integer_zerop (low))
3347 in_p = ! in_p;
3348 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3349 integer_one_node, 0);
3350 low = fold_convert (type, integer_zero_node);
3353 continue;
3355 case NEGATE_EXPR:
3356 /* (-x) IN [a,b] -> x in [-b, -a] */
3357 n_low = range_binop (MINUS_EXPR, type,
3358 fold_convert (type, integer_zero_node),
3359 0, high, 1);
3360 n_high = range_binop (MINUS_EXPR, type,
3361 fold_convert (type, integer_zero_node),
3362 0, low, 0);
3363 low = n_low, high = n_high;
3364 exp = arg0;
3365 continue;
3367 case BIT_NOT_EXPR:
3368 /* ~ X -> -X - 1 */
3369 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3370 fold_convert (type, integer_one_node));
3371 continue;
3373 case PLUS_EXPR: case MINUS_EXPR:
3374 if (TREE_CODE (arg1) != INTEGER_CST)
3375 break;
3377 /* If EXP is signed, any overflow in the computation is undefined,
3378 so we don't worry about it so long as our computations on
3379 the bounds don't overflow. For unsigned, overflow is defined
3380 and this is exactly the right thing. */
3381 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3382 type, low, 0, arg1, 0);
3383 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3384 type, high, 1, arg1, 0);
3385 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3386 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3387 break;
3389 /* Check for an unsigned range which has wrapped around the maximum
3390 value thus making n_high < n_low, and normalize it. */
3391 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3393 low = range_binop (PLUS_EXPR, type, n_high, 0,
3394 integer_one_node, 0);
3395 high = range_binop (MINUS_EXPR, type, n_low, 0,
3396 integer_one_node, 0);
3398 /* If the range is of the form +/- [ x+1, x ], we won't
3399 be able to normalize it. But then, it represents the
3400 whole range or the empty set, so make it
3401 +/- [ -, - ]. */
3402 if (tree_int_cst_equal (n_low, low)
3403 && tree_int_cst_equal (n_high, high))
3404 low = high = 0;
3405 else
3406 in_p = ! in_p;
3408 else
3409 low = n_low, high = n_high;
3411 exp = arg0;
3412 continue;
3414 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3415 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3416 break;
3418 if (! INTEGRAL_TYPE_P (type)
3419 || (low != 0 && ! int_fits_type_p (low, type))
3420 || (high != 0 && ! int_fits_type_p (high, type)))
3421 break;
3423 n_low = low, n_high = high;
3425 if (n_low != 0)
3426 n_low = fold_convert (type, n_low);
3428 if (n_high != 0)
3429 n_high = fold_convert (type, n_high);
3431 /* If we're converting from an unsigned to a signed type,
3432 we will be doing the comparison as unsigned. The tests above
3433 have already verified that LOW and HIGH are both positive.
3435 So we have to make sure that the original unsigned value will
3436 be interpreted as positive. */
3437 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3439 tree equiv_type = (*lang_hooks.types.type_for_mode)
3440 (TYPE_MODE (type), 1);
3441 tree high_positive;
3443 /* A range without an upper bound is, naturally, unbounded.
3444 Since convert would have cropped a very large value, use
3445 the max value for the destination type. */
3446 high_positive
3447 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3448 : TYPE_MAX_VALUE (type);
3450 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3451 high_positive = fold (build (RSHIFT_EXPR, type,
3452 fold_convert (type,
3453 high_positive),
3454 fold_convert (type,
3455 integer_one_node)));
3457 /* If the low bound is specified, "and" the range with the
3458 range for which the original unsigned value will be
3459 positive. */
3460 if (low != 0)
3462 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3463 1, n_low, n_high, 1,
3464 fold_convert (type, integer_zero_node),
3465 high_positive))
3466 break;
3468 in_p = (n_in_p == in_p);
3470 else
3472 /* Otherwise, "or" the range with the range of the input
3473 that will be interpreted as negative. */
3474 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3475 0, n_low, n_high, 1,
3476 fold_convert (type, integer_zero_node),
3477 high_positive))
3478 break;
3480 in_p = (in_p != n_in_p);
3484 exp = arg0;
3485 low = n_low, high = n_high;
3486 continue;
3488 default:
3489 break;
3492 break;
3495 /* If EXP is a constant, we can evaluate whether this is true or false. */
3496 if (TREE_CODE (exp) == INTEGER_CST)
3498 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3499 exp, 0, low, 0))
3500 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3501 exp, 1, high, 1)));
3502 low = high = 0;
3503 exp = 0;
3506 *pin_p = in_p, *plow = low, *phigh = high;
3507 return exp;
3510 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3511 type, TYPE, return an expression to test if EXP is in (or out of, depending
3512 on IN_P) the range. Return 0 if the test couldn't be created. */
3514 static tree
3515 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3517 tree etype = TREE_TYPE (exp);
3518 tree value;
3520 if (! in_p)
3522 value = build_range_check (type, exp, 1, low, high);
3523 if (value != 0)
3524 return invert_truthvalue (value);
3526 return 0;
3529 if (low == 0 && high == 0)
3530 return fold_convert (type, integer_one_node);
3532 if (low == 0)
3533 return fold (build (LE_EXPR, type, exp, high));
3535 if (high == 0)
3536 return fold (build (GE_EXPR, type, exp, low));
3538 if (operand_equal_p (low, high, 0))
3539 return fold (build (EQ_EXPR, type, exp, low));
3541 if (integer_zerop (low))
3543 if (! TREE_UNSIGNED (etype))
3545 etype = (*lang_hooks.types.unsigned_type) (etype);
3546 high = fold_convert (etype, high);
3547 exp = fold_convert (etype, exp);
3549 return build_range_check (type, exp, 1, 0, high);
3552 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3553 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3555 unsigned HOST_WIDE_INT lo;
3556 HOST_WIDE_INT hi;
3557 int prec;
3559 /* For enums the comparison will be done in the underlying type,
3560 so using enum's precision is wrong here.
3561 Consider e.g. enum { A, B, C, D, E }, low == B and high == D. */
3562 if (TREE_CODE (etype) == ENUMERAL_TYPE)
3563 prec = GET_MODE_BITSIZE (TYPE_MODE (etype));
3564 else
3565 prec = TYPE_PRECISION (etype);
3566 if (prec <= HOST_BITS_PER_WIDE_INT)
3568 hi = 0;
3569 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3571 else
3573 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3574 lo = (unsigned HOST_WIDE_INT) -1;
3577 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3579 if (TREE_UNSIGNED (etype))
3581 etype = (*lang_hooks.types.signed_type) (etype);
3582 exp = fold_convert (etype, exp);
3584 return fold (build (GT_EXPR, type, exp,
3585 fold_convert (etype, integer_zero_node)));
3589 value = const_binop (MINUS_EXPR, high, low, 0);
3590 if (value != 0 && TREE_OVERFLOW (value) && ! TREE_UNSIGNED (etype))
3592 tree utype, minv, maxv;
3594 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3595 for the type in question, as we rely on this here. */
3596 switch (TREE_CODE (etype))
3598 case INTEGER_TYPE:
3599 case ENUMERAL_TYPE:
3600 case CHAR_TYPE:
3601 utype = lang_hooks.types.unsigned_type (etype);
3602 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3603 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3604 integer_one_node, 1);
3605 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3606 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3607 minv, 1, maxv, 1)))
3609 etype = utype;
3610 high = fold_convert (etype, high);
3611 low = fold_convert (etype, low);
3612 exp = fold_convert (etype, exp);
3613 value = const_binop (MINUS_EXPR, high, low, 0);
3615 break;
3616 default:
3617 break;
3621 if (value != 0 && ! TREE_OVERFLOW (value))
3622 return build_range_check (type,
3623 fold (build (MINUS_EXPR, etype, exp, low)),
3624 1, fold_convert (etype, integer_zero_node),
3625 value);
3627 return 0;
3630 /* Given two ranges, see if we can merge them into one. Return 1 if we
3631 can, 0 if we can't. Set the output range into the specified parameters. */
3633 static int
3634 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3635 tree high0, int in1_p, tree low1, tree high1)
3637 int no_overlap;
3638 int subset;
3639 int temp;
3640 tree tem;
3641 int in_p;
3642 tree low, high;
3643 int lowequal = ((low0 == 0 && low1 == 0)
3644 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3645 low0, 0, low1, 0)));
3646 int highequal = ((high0 == 0 && high1 == 0)
3647 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3648 high0, 1, high1, 1)));
3650 /* Make range 0 be the range that starts first, or ends last if they
3651 start at the same value. Swap them if it isn't. */
3652 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3653 low0, 0, low1, 0))
3654 || (lowequal
3655 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3656 high1, 1, high0, 1))))
3658 temp = in0_p, in0_p = in1_p, in1_p = temp;
3659 tem = low0, low0 = low1, low1 = tem;
3660 tem = high0, high0 = high1, high1 = tem;
3663 /* Now flag two cases, whether the ranges are disjoint or whether the
3664 second range is totally subsumed in the first. Note that the tests
3665 below are simplified by the ones above. */
3666 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3667 high0, 1, low1, 0));
3668 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3669 high1, 1, high0, 1));
3671 /* We now have four cases, depending on whether we are including or
3672 excluding the two ranges. */
3673 if (in0_p && in1_p)
3675 /* If they don't overlap, the result is false. If the second range
3676 is a subset it is the result. Otherwise, the range is from the start
3677 of the second to the end of the first. */
3678 if (no_overlap)
3679 in_p = 0, low = high = 0;
3680 else if (subset)
3681 in_p = 1, low = low1, high = high1;
3682 else
3683 in_p = 1, low = low1, high = high0;
3686 else if (in0_p && ! in1_p)
3688 /* If they don't overlap, the result is the first range. If they are
3689 equal, the result is false. If the second range is a subset of the
3690 first, and the ranges begin at the same place, we go from just after
3691 the end of the first range to the end of the second. If the second
3692 range is not a subset of the first, or if it is a subset and both
3693 ranges end at the same place, the range starts at the start of the
3694 first range and ends just before the second range.
3695 Otherwise, we can't describe this as a single range. */
3696 if (no_overlap)
3697 in_p = 1, low = low0, high = high0;
3698 else if (lowequal && highequal)
3699 in_p = 0, low = high = 0;
3700 else if (subset && lowequal)
3702 in_p = 1, high = high0;
3703 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3704 integer_one_node, 0);
3706 else if (! subset || highequal)
3708 in_p = 1, low = low0;
3709 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3710 integer_one_node, 0);
3712 else
3713 return 0;
3716 else if (! in0_p && in1_p)
3718 /* If they don't overlap, the result is the second range. If the second
3719 is a subset of the first, the result is false. Otherwise,
3720 the range starts just after the first range and ends at the
3721 end of the second. */
3722 if (no_overlap)
3723 in_p = 1, low = low1, high = high1;
3724 else if (subset || highequal)
3725 in_p = 0, low = high = 0;
3726 else
3728 in_p = 1, high = high1;
3729 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3730 integer_one_node, 0);
3734 else
3736 /* The case where we are excluding both ranges. Here the complex case
3737 is if they don't overlap. In that case, the only time we have a
3738 range is if they are adjacent. If the second is a subset of the
3739 first, the result is the first. Otherwise, the range to exclude
3740 starts at the beginning of the first range and ends at the end of the
3741 second. */
3742 if (no_overlap)
3744 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3745 range_binop (PLUS_EXPR, NULL_TREE,
3746 high0, 1,
3747 integer_one_node, 1),
3748 1, low1, 0)))
3749 in_p = 0, low = low0, high = high1;
3750 else
3752 /* Canonicalize - [min, x] into - [-, x]. */
3753 if (low0 && TREE_CODE (low0) == INTEGER_CST)
3754 switch (TREE_CODE (TREE_TYPE (low0)))
3756 case ENUMERAL_TYPE:
3757 if (TYPE_PRECISION (TREE_TYPE (low0))
3758 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
3759 break;
3760 /* FALLTHROUGH */
3761 case INTEGER_TYPE:
3762 case CHAR_TYPE:
3763 if (tree_int_cst_equal (low0,
3764 TYPE_MIN_VALUE (TREE_TYPE (low0))))
3765 low0 = 0;
3766 break;
3767 case POINTER_TYPE:
3768 if (TREE_UNSIGNED (TREE_TYPE (low0))
3769 && integer_zerop (low0))
3770 low0 = 0;
3771 break;
3772 default:
3773 break;
3776 /* Canonicalize - [x, max] into - [x, -]. */
3777 if (high1 && TREE_CODE (high1) == INTEGER_CST)
3778 switch (TREE_CODE (TREE_TYPE (high1)))
3780 case ENUMERAL_TYPE:
3781 if (TYPE_PRECISION (TREE_TYPE (high1))
3782 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
3783 break;
3784 /* FALLTHROUGH */
3785 case INTEGER_TYPE:
3786 case CHAR_TYPE:
3787 if (tree_int_cst_equal (high1,
3788 TYPE_MAX_VALUE (TREE_TYPE (high1))))
3789 high1 = 0;
3790 break;
3791 case POINTER_TYPE:
3792 if (TREE_UNSIGNED (TREE_TYPE (high1))
3793 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
3794 high1, 1,
3795 integer_one_node, 1)))
3796 high1 = 0;
3797 break;
3798 default:
3799 break;
3802 /* The ranges might be also adjacent between the maximum and
3803 minimum values of the given type. For
3804 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
3805 return + [x + 1, y - 1]. */
3806 if (low0 == 0 && high1 == 0)
3808 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3809 integer_one_node, 1);
3810 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3811 integer_one_node, 0);
3812 if (low == 0 || high == 0)
3813 return 0;
3815 in_p = 1;
3817 else
3818 return 0;
3821 else if (subset)
3822 in_p = 0, low = low0, high = high0;
3823 else
3824 in_p = 0, low = low0, high = high1;
3827 *pin_p = in_p, *plow = low, *phigh = high;
3828 return 1;
3831 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3832 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3833 #endif
3835 /* EXP is some logical combination of boolean tests. See if we can
3836 merge it into some range test. Return the new tree if so. */
3838 static tree
3839 fold_range_test (tree exp)
3841 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3842 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3843 int in0_p, in1_p, in_p;
3844 tree low0, low1, low, high0, high1, high;
3845 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3846 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3847 tree tem;
3849 /* If this is an OR operation, invert both sides; we will invert
3850 again at the end. */
3851 if (or_op)
3852 in0_p = ! in0_p, in1_p = ! in1_p;
3854 /* If both expressions are the same, if we can merge the ranges, and we
3855 can build the range test, return it or it inverted. If one of the
3856 ranges is always true or always false, consider it to be the same
3857 expression as the other. */
3858 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3859 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3860 in1_p, low1, high1)
3861 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3862 lhs != 0 ? lhs
3863 : rhs != 0 ? rhs : integer_zero_node,
3864 in_p, low, high))))
3865 return or_op ? invert_truthvalue (tem) : tem;
3867 /* On machines where the branch cost is expensive, if this is a
3868 short-circuited branch and the underlying object on both sides
3869 is the same, make a non-short-circuit operation. */
3870 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3871 && lhs != 0 && rhs != 0
3872 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3873 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3874 && operand_equal_p (lhs, rhs, 0))
3876 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3877 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3878 which cases we can't do this. */
3879 if (simple_operand_p (lhs))
3880 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3881 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3882 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3883 TREE_OPERAND (exp, 1));
3885 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3886 && ! CONTAINS_PLACEHOLDER_P (lhs))
3888 tree common = save_expr (lhs);
3890 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3891 or_op ? ! in0_p : in0_p,
3892 low0, high0))
3893 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3894 or_op ? ! in1_p : in1_p,
3895 low1, high1))))
3896 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3897 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3898 TREE_TYPE (exp), lhs, rhs);
3902 return 0;
3905 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3906 bit value. Arrange things so the extra bits will be set to zero if and
3907 only if C is signed-extended to its full width. If MASK is nonzero,
3908 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3910 static tree
3911 unextend (tree c, int p, int unsignedp, tree mask)
3913 tree type = TREE_TYPE (c);
3914 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3915 tree temp;
3917 if (p == modesize || unsignedp)
3918 return c;
3920 /* We work by getting just the sign bit into the low-order bit, then
3921 into the high-order bit, then sign-extend. We then XOR that value
3922 with C. */
3923 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3924 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3926 /* We must use a signed type in order to get an arithmetic right shift.
3927 However, we must also avoid introducing accidental overflows, so that
3928 a subsequent call to integer_zerop will work. Hence we must
3929 do the type conversion here. At this point, the constant is either
3930 zero or one, and the conversion to a signed type can never overflow.
3931 We could get an overflow if this conversion is done anywhere else. */
3932 if (TREE_UNSIGNED (type))
3933 temp = fold_convert ((*lang_hooks.types.signed_type) (type), temp);
3935 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3936 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3937 if (mask != 0)
3938 temp = const_binop (BIT_AND_EXPR, temp,
3939 fold_convert (TREE_TYPE (c), mask), 0);
3940 /* If necessary, convert the type back to match the type of C. */
3941 if (TREE_UNSIGNED (type))
3942 temp = fold_convert (type, temp);
3944 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3947 /* Find ways of folding logical expressions of LHS and RHS:
3948 Try to merge two comparisons to the same innermost item.
3949 Look for range tests like "ch >= '0' && ch <= '9'".
3950 Look for combinations of simple terms on machines with expensive branches
3951 and evaluate the RHS unconditionally.
3953 For example, if we have p->a == 2 && p->b == 4 and we can make an
3954 object large enough to span both A and B, we can do this with a comparison
3955 against the object ANDed with the a mask.
3957 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3958 operations to do this with one comparison.
3960 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3961 function and the one above.
3963 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3964 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3966 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3967 two operands.
3969 We return the simplified tree or 0 if no optimization is possible. */
3971 static tree
3972 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3974 /* If this is the "or" of two comparisons, we can do something if
3975 the comparisons are NE_EXPR. If this is the "and", we can do something
3976 if the comparisons are EQ_EXPR. I.e.,
3977 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3979 WANTED_CODE is this operation code. For single bit fields, we can
3980 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3981 comparison for one-bit fields. */
3983 enum tree_code wanted_code;
3984 enum tree_code lcode, rcode;
3985 tree ll_arg, lr_arg, rl_arg, rr_arg;
3986 tree ll_inner, lr_inner, rl_inner, rr_inner;
3987 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3988 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3989 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3990 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3991 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3992 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3993 enum machine_mode lnmode, rnmode;
3994 tree ll_mask, lr_mask, rl_mask, rr_mask;
3995 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3996 tree l_const, r_const;
3997 tree lntype, rntype, result;
3998 int first_bit, end_bit;
3999 int volatilep;
4001 /* Start by getting the comparison codes. Fail if anything is volatile.
4002 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4003 it were surrounded with a NE_EXPR. */
4005 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4006 return 0;
4008 lcode = TREE_CODE (lhs);
4009 rcode = TREE_CODE (rhs);
4011 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4012 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
4014 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4015 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
4017 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
4018 return 0;
4020 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4021 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4023 ll_arg = TREE_OPERAND (lhs, 0);
4024 lr_arg = TREE_OPERAND (lhs, 1);
4025 rl_arg = TREE_OPERAND (rhs, 0);
4026 rr_arg = TREE_OPERAND (rhs, 1);
4028 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4029 if (simple_operand_p (ll_arg)
4030 && simple_operand_p (lr_arg)
4031 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
4033 int compcode;
4035 if (operand_equal_p (ll_arg, rl_arg, 0)
4036 && operand_equal_p (lr_arg, rr_arg, 0))
4038 int lcompcode, rcompcode;
4040 lcompcode = comparison_to_compcode (lcode);
4041 rcompcode = comparison_to_compcode (rcode);
4042 compcode = (code == TRUTH_AND_EXPR)
4043 ? lcompcode & rcompcode
4044 : lcompcode | rcompcode;
4046 else if (operand_equal_p (ll_arg, rr_arg, 0)
4047 && operand_equal_p (lr_arg, rl_arg, 0))
4049 int lcompcode, rcompcode;
4051 rcode = swap_tree_comparison (rcode);
4052 lcompcode = comparison_to_compcode (lcode);
4053 rcompcode = comparison_to_compcode (rcode);
4054 compcode = (code == TRUTH_AND_EXPR)
4055 ? lcompcode & rcompcode
4056 : lcompcode | rcompcode;
4058 else
4059 compcode = -1;
4061 if (compcode == COMPCODE_TRUE)
4062 return fold_convert (truth_type, integer_one_node);
4063 else if (compcode == COMPCODE_FALSE)
4064 return fold_convert (truth_type, integer_zero_node);
4065 else if (compcode != -1)
4066 return build (compcode_to_comparison (compcode),
4067 truth_type, ll_arg, lr_arg);
4070 /* If the RHS can be evaluated unconditionally and its operands are
4071 simple, it wins to evaluate the RHS unconditionally on machines
4072 with expensive branches. In this case, this isn't a comparison
4073 that can be merged. Avoid doing this if the RHS is a floating-point
4074 comparison since those can trap. */
4076 if (BRANCH_COST >= 2
4077 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4078 && simple_operand_p (rl_arg)
4079 && simple_operand_p (rr_arg))
4081 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4082 if (code == TRUTH_OR_EXPR
4083 && lcode == NE_EXPR && integer_zerop (lr_arg)
4084 && rcode == NE_EXPR && integer_zerop (rr_arg)
4085 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4086 return build (NE_EXPR, truth_type,
4087 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4088 ll_arg, rl_arg),
4089 integer_zero_node);
4091 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4092 if (code == TRUTH_AND_EXPR
4093 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4094 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4095 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4096 return build (EQ_EXPR, truth_type,
4097 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4098 ll_arg, rl_arg),
4099 integer_zero_node);
4101 return build (code, truth_type, lhs, rhs);
4104 /* See if the comparisons can be merged. Then get all the parameters for
4105 each side. */
4107 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4108 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4109 return 0;
4111 volatilep = 0;
4112 ll_inner = decode_field_reference (ll_arg,
4113 &ll_bitsize, &ll_bitpos, &ll_mode,
4114 &ll_unsignedp, &volatilep, &ll_mask,
4115 &ll_and_mask);
4116 lr_inner = decode_field_reference (lr_arg,
4117 &lr_bitsize, &lr_bitpos, &lr_mode,
4118 &lr_unsignedp, &volatilep, &lr_mask,
4119 &lr_and_mask);
4120 rl_inner = decode_field_reference (rl_arg,
4121 &rl_bitsize, &rl_bitpos, &rl_mode,
4122 &rl_unsignedp, &volatilep, &rl_mask,
4123 &rl_and_mask);
4124 rr_inner = decode_field_reference (rr_arg,
4125 &rr_bitsize, &rr_bitpos, &rr_mode,
4126 &rr_unsignedp, &volatilep, &rr_mask,
4127 &rr_and_mask);
4129 /* It must be true that the inner operation on the lhs of each
4130 comparison must be the same if we are to be able to do anything.
4131 Then see if we have constants. If not, the same must be true for
4132 the rhs's. */
4133 if (volatilep || ll_inner == 0 || rl_inner == 0
4134 || ! operand_equal_p (ll_inner, rl_inner, 0))
4135 return 0;
4137 if (TREE_CODE (lr_arg) == INTEGER_CST
4138 && TREE_CODE (rr_arg) == INTEGER_CST)
4139 l_const = lr_arg, r_const = rr_arg;
4140 else if (lr_inner == 0 || rr_inner == 0
4141 || ! operand_equal_p (lr_inner, rr_inner, 0))
4142 return 0;
4143 else
4144 l_const = r_const = 0;
4146 /* If either comparison code is not correct for our logical operation,
4147 fail. However, we can convert a one-bit comparison against zero into
4148 the opposite comparison against that bit being set in the field. */
4150 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4151 if (lcode != wanted_code)
4153 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4155 /* Make the left operand unsigned, since we are only interested
4156 in the value of one bit. Otherwise we are doing the wrong
4157 thing below. */
4158 ll_unsignedp = 1;
4159 l_const = ll_mask;
4161 else
4162 return 0;
4165 /* This is analogous to the code for l_const above. */
4166 if (rcode != wanted_code)
4168 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4170 rl_unsignedp = 1;
4171 r_const = rl_mask;
4173 else
4174 return 0;
4177 /* After this point all optimizations will generate bit-field
4178 references, which we might not want. */
4179 if (! (*lang_hooks.can_use_bit_fields_p) ())
4180 return 0;
4182 /* See if we can find a mode that contains both fields being compared on
4183 the left. If we can't, fail. Otherwise, update all constants and masks
4184 to be relative to a field of that size. */
4185 first_bit = MIN (ll_bitpos, rl_bitpos);
4186 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4187 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4188 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4189 volatilep);
4190 if (lnmode == VOIDmode)
4191 return 0;
4193 lnbitsize = GET_MODE_BITSIZE (lnmode);
4194 lnbitpos = first_bit & ~ (lnbitsize - 1);
4195 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
4196 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4198 if (BYTES_BIG_ENDIAN)
4200 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4201 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4204 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4205 size_int (xll_bitpos), 0);
4206 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4207 size_int (xrl_bitpos), 0);
4209 if (l_const)
4211 l_const = fold_convert (lntype, l_const);
4212 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4213 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4214 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4215 fold (build1 (BIT_NOT_EXPR,
4216 lntype, ll_mask)),
4217 0)))
4219 warning ("comparison is always %d", wanted_code == NE_EXPR);
4221 return fold_convert (truth_type,
4222 wanted_code == NE_EXPR
4223 ? integer_one_node : integer_zero_node);
4226 if (r_const)
4228 r_const = fold_convert (lntype, r_const);
4229 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4230 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4231 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4232 fold (build1 (BIT_NOT_EXPR,
4233 lntype, rl_mask)),
4234 0)))
4236 warning ("comparison is always %d", wanted_code == NE_EXPR);
4238 return fold_convert (truth_type,
4239 wanted_code == NE_EXPR
4240 ? integer_one_node : integer_zero_node);
4244 /* If the right sides are not constant, do the same for it. Also,
4245 disallow this optimization if a size or signedness mismatch occurs
4246 between the left and right sides. */
4247 if (l_const == 0)
4249 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4250 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4251 /* Make sure the two fields on the right
4252 correspond to the left without being swapped. */
4253 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4254 return 0;
4256 first_bit = MIN (lr_bitpos, rr_bitpos);
4257 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4258 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4259 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4260 volatilep);
4261 if (rnmode == VOIDmode)
4262 return 0;
4264 rnbitsize = GET_MODE_BITSIZE (rnmode);
4265 rnbitpos = first_bit & ~ (rnbitsize - 1);
4266 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
4267 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4269 if (BYTES_BIG_ENDIAN)
4271 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4272 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4275 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4276 size_int (xlr_bitpos), 0);
4277 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4278 size_int (xrr_bitpos), 0);
4280 /* Make a mask that corresponds to both fields being compared.
4281 Do this for both items being compared. If the operands are the
4282 same size and the bits being compared are in the same position
4283 then we can do this by masking both and comparing the masked
4284 results. */
4285 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4286 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4287 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4289 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4290 ll_unsignedp || rl_unsignedp);
4291 if (! all_ones_mask_p (ll_mask, lnbitsize))
4292 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4294 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4295 lr_unsignedp || rr_unsignedp);
4296 if (! all_ones_mask_p (lr_mask, rnbitsize))
4297 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4299 return build (wanted_code, truth_type, lhs, rhs);
4302 /* There is still another way we can do something: If both pairs of
4303 fields being compared are adjacent, we may be able to make a wider
4304 field containing them both.
4306 Note that we still must mask the lhs/rhs expressions. Furthermore,
4307 the mask must be shifted to account for the shift done by
4308 make_bit_field_ref. */
4309 if ((ll_bitsize + ll_bitpos == rl_bitpos
4310 && lr_bitsize + lr_bitpos == rr_bitpos)
4311 || (ll_bitpos == rl_bitpos + rl_bitsize
4312 && lr_bitpos == rr_bitpos + rr_bitsize))
4314 tree type;
4316 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4317 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4318 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4319 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4321 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4322 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4323 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4324 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4326 /* Convert to the smaller type before masking out unwanted bits. */
4327 type = lntype;
4328 if (lntype != rntype)
4330 if (lnbitsize > rnbitsize)
4332 lhs = fold_convert (rntype, lhs);
4333 ll_mask = fold_convert (rntype, ll_mask);
4334 type = rntype;
4336 else if (lnbitsize < rnbitsize)
4338 rhs = fold_convert (lntype, rhs);
4339 lr_mask = fold_convert (lntype, lr_mask);
4340 type = lntype;
4344 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4345 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4347 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4348 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4350 return build (wanted_code, truth_type, lhs, rhs);
4353 return 0;
4356 /* Handle the case of comparisons with constants. If there is something in
4357 common between the masks, those bits of the constants must be the same.
4358 If not, the condition is always false. Test for this to avoid generating
4359 incorrect code below. */
4360 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4361 if (! integer_zerop (result)
4362 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4363 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4365 if (wanted_code == NE_EXPR)
4367 warning ("`or' of unmatched not-equal tests is always 1");
4368 return fold_convert (truth_type, integer_one_node);
4370 else
4372 warning ("`and' of mutually exclusive equal-tests is always 0");
4373 return fold_convert (truth_type, integer_zero_node);
4377 /* Construct the expression we will return. First get the component
4378 reference we will make. Unless the mask is all ones the width of
4379 that field, perform the mask operation. Then compare with the
4380 merged constant. */
4381 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4382 ll_unsignedp || rl_unsignedp);
4384 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4385 if (! all_ones_mask_p (ll_mask, lnbitsize))
4386 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4388 return build (wanted_code, truth_type, result,
4389 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4392 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4393 constant. */
4395 static tree
4396 optimize_minmax_comparison (tree t)
4398 tree type = TREE_TYPE (t);
4399 tree arg0 = TREE_OPERAND (t, 0);
4400 enum tree_code op_code;
4401 tree comp_const = TREE_OPERAND (t, 1);
4402 tree minmax_const;
4403 int consts_equal, consts_lt;
4404 tree inner;
4406 STRIP_SIGN_NOPS (arg0);
4408 op_code = TREE_CODE (arg0);
4409 minmax_const = TREE_OPERAND (arg0, 1);
4410 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4411 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4412 inner = TREE_OPERAND (arg0, 0);
4414 /* If something does not permit us to optimize, return the original tree. */
4415 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4416 || TREE_CODE (comp_const) != INTEGER_CST
4417 || TREE_CONSTANT_OVERFLOW (comp_const)
4418 || TREE_CODE (minmax_const) != INTEGER_CST
4419 || TREE_CONSTANT_OVERFLOW (minmax_const))
4420 return t;
4422 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4423 and GT_EXPR, doing the rest with recursive calls using logical
4424 simplifications. */
4425 switch (TREE_CODE (t))
4427 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4428 return
4429 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4431 case GE_EXPR:
4432 return
4433 fold (build (TRUTH_ORIF_EXPR, type,
4434 optimize_minmax_comparison
4435 (build (EQ_EXPR, type, arg0, comp_const)),
4436 optimize_minmax_comparison
4437 (build (GT_EXPR, type, arg0, comp_const))));
4439 case EQ_EXPR:
4440 if (op_code == MAX_EXPR && consts_equal)
4441 /* MAX (X, 0) == 0 -> X <= 0 */
4442 return fold (build (LE_EXPR, type, inner, comp_const));
4444 else if (op_code == MAX_EXPR && consts_lt)
4445 /* MAX (X, 0) == 5 -> X == 5 */
4446 return fold (build (EQ_EXPR, type, inner, comp_const));
4448 else if (op_code == MAX_EXPR)
4449 /* MAX (X, 0) == -1 -> false */
4450 return omit_one_operand (type, integer_zero_node, inner);
4452 else if (consts_equal)
4453 /* MIN (X, 0) == 0 -> X >= 0 */
4454 return fold (build (GE_EXPR, type, inner, comp_const));
4456 else if (consts_lt)
4457 /* MIN (X, 0) == 5 -> false */
4458 return omit_one_operand (type, integer_zero_node, inner);
4460 else
4461 /* MIN (X, 0) == -1 -> X == -1 */
4462 return fold (build (EQ_EXPR, type, inner, comp_const));
4464 case GT_EXPR:
4465 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4466 /* MAX (X, 0) > 0 -> X > 0
4467 MAX (X, 0) > 5 -> X > 5 */
4468 return fold (build (GT_EXPR, type, inner, comp_const));
4470 else if (op_code == MAX_EXPR)
4471 /* MAX (X, 0) > -1 -> true */
4472 return omit_one_operand (type, integer_one_node, inner);
4474 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4475 /* MIN (X, 0) > 0 -> false
4476 MIN (X, 0) > 5 -> false */
4477 return omit_one_operand (type, integer_zero_node, inner);
4479 else
4480 /* MIN (X, 0) > -1 -> X > -1 */
4481 return fold (build (GT_EXPR, type, inner, comp_const));
4483 default:
4484 return t;
4488 /* T is an integer expression that is being multiplied, divided, or taken a
4489 modulus (CODE says which and what kind of divide or modulus) by a
4490 constant C. See if we can eliminate that operation by folding it with
4491 other operations already in T. WIDE_TYPE, if non-null, is a type that
4492 should be used for the computation if wider than our type.
4494 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4495 (X * 2) + (Y * 4). We must, however, be assured that either the original
4496 expression would not overflow or that overflow is undefined for the type
4497 in the language in question.
4499 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4500 the machine has a multiply-accumulate insn or that this is part of an
4501 addressing calculation.
4503 If we return a non-null expression, it is an equivalent form of the
4504 original computation, but need not be in the original type. */
4506 static tree
4507 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4509 /* To avoid exponential search depth, refuse to allow recursion past
4510 three levels. Beyond that (1) it's highly unlikely that we'll find
4511 something interesting and (2) we've probably processed it before
4512 when we built the inner expression. */
4514 static int depth;
4515 tree ret;
4517 if (depth > 3)
4518 return NULL;
4520 depth++;
4521 ret = extract_muldiv_1 (t, c, code, wide_type);
4522 depth--;
4524 return ret;
4527 static tree
4528 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4530 tree type = TREE_TYPE (t);
4531 enum tree_code tcode = TREE_CODE (t);
4532 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4533 > GET_MODE_SIZE (TYPE_MODE (type)))
4534 ? wide_type : type);
4535 tree t1, t2;
4536 int same_p = tcode == code;
4537 tree op0 = NULL_TREE, op1 = NULL_TREE;
4539 /* Don't deal with constants of zero here; they confuse the code below. */
4540 if (integer_zerop (c))
4541 return NULL_TREE;
4543 if (TREE_CODE_CLASS (tcode) == '1')
4544 op0 = TREE_OPERAND (t, 0);
4546 if (TREE_CODE_CLASS (tcode) == '2')
4547 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4549 /* Note that we need not handle conditional operations here since fold
4550 already handles those cases. So just do arithmetic here. */
4551 switch (tcode)
4553 case INTEGER_CST:
4554 /* For a constant, we can always simplify if we are a multiply
4555 or (for divide and modulus) if it is a multiple of our constant. */
4556 if (code == MULT_EXPR
4557 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4558 return const_binop (code, fold_convert (ctype, t),
4559 fold_convert (ctype, c), 0);
4560 break;
4562 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4563 /* If op0 is an expression ... */
4564 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4565 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4566 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4567 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4568 /* ... and is unsigned, and its type is smaller than ctype,
4569 then we cannot pass through as widening. */
4570 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4571 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4572 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4573 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4574 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4575 /* ... or this is a truncation (t is narrower than op0),
4576 then we cannot pass through this narrowing. */
4577 || (GET_MODE_SIZE (TYPE_MODE (type))
4578 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4579 /* ... or signedness changes for division or modulus,
4580 then we cannot pass through this conversion. */
4581 || (code != MULT_EXPR
4582 && (TREE_UNSIGNED (ctype)
4583 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4584 break;
4586 /* Pass the constant down and see if we can make a simplification. If
4587 we can, replace this expression with the inner simplification for
4588 possible later conversion to our or some other type. */
4589 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4590 && TREE_CODE (t2) == INTEGER_CST
4591 && ! TREE_CONSTANT_OVERFLOW (t2)
4592 && (0 != (t1 = extract_muldiv (op0, t2, code,
4593 code == MULT_EXPR
4594 ? ctype : NULL_TREE))))
4595 return t1;
4596 break;
4598 case ABS_EXPR:
4599 /* If widening the type changes it from signed to unsigned, then we
4600 must avoid building ABS_EXPR itself as unsigned. */
4601 if (TREE_UNSIGNED (ctype) && !TREE_UNSIGNED (type))
4603 tree cstype = (*lang_hooks.types.signed_type) (ctype);
4604 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
4606 t1 = fold (build1 (tcode, cstype, fold_convert (cstype, t1)));
4607 return fold_convert (ctype, t1);
4609 break;
4611 /* FALLTHROUGH */
4612 case NEGATE_EXPR:
4613 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4614 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4615 break;
4617 case MIN_EXPR: case MAX_EXPR:
4618 /* If widening the type changes the signedness, then we can't perform
4619 this optimization as that changes the result. */
4620 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4621 break;
4623 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4624 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4625 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4627 if (tree_int_cst_sgn (c) < 0)
4628 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4630 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4631 fold_convert (ctype, t2)));
4633 break;
4635 case WITH_RECORD_EXPR:
4636 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4637 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4638 TREE_OPERAND (t, 1));
4639 break;
4641 case LSHIFT_EXPR: case RSHIFT_EXPR:
4642 /* If the second operand is constant, this is a multiplication
4643 or floor division, by a power of two, so we can treat it that
4644 way unless the multiplier or divisor overflows. */
4645 if (TREE_CODE (op1) == INTEGER_CST
4646 /* const_binop may not detect overflow correctly,
4647 so check for it explicitly here. */
4648 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4649 && TREE_INT_CST_HIGH (op1) == 0
4650 && 0 != (t1 = fold_convert (ctype,
4651 const_binop (LSHIFT_EXPR,
4652 size_one_node,
4653 op1, 0)))
4654 && ! TREE_OVERFLOW (t1))
4655 return extract_muldiv (build (tcode == LSHIFT_EXPR
4656 ? MULT_EXPR : FLOOR_DIV_EXPR,
4657 ctype, fold_convert (ctype, op0), t1),
4658 c, code, wide_type);
4659 break;
4661 case PLUS_EXPR: case MINUS_EXPR:
4662 /* See if we can eliminate the operation on both sides. If we can, we
4663 can return a new PLUS or MINUS. If we can't, the only remaining
4664 cases where we can do anything are if the second operand is a
4665 constant. */
4666 t1 = extract_muldiv (op0, c, code, wide_type);
4667 t2 = extract_muldiv (op1, c, code, wide_type);
4668 if (t1 != 0 && t2 != 0
4669 && (code == MULT_EXPR
4670 /* If not multiplication, we can only do this if both operands
4671 are divisible by c. */
4672 || (multiple_of_p (ctype, op0, c)
4673 && multiple_of_p (ctype, op1, c))))
4674 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4675 fold_convert (ctype, t2)));
4677 /* If this was a subtraction, negate OP1 and set it to be an addition.
4678 This simplifies the logic below. */
4679 if (tcode == MINUS_EXPR)
4680 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4682 if (TREE_CODE (op1) != INTEGER_CST)
4683 break;
4685 /* If either OP1 or C are negative, this optimization is not safe for
4686 some of the division and remainder types while for others we need
4687 to change the code. */
4688 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4690 if (code == CEIL_DIV_EXPR)
4691 code = FLOOR_DIV_EXPR;
4692 else if (code == FLOOR_DIV_EXPR)
4693 code = CEIL_DIV_EXPR;
4694 else if (code != MULT_EXPR
4695 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4696 break;
4699 /* If it's a multiply or a division/modulus operation of a multiple
4700 of our constant, do the operation and verify it doesn't overflow. */
4701 if (code == MULT_EXPR
4702 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4704 op1 = const_binop (code, fold_convert (ctype, op1),
4705 fold_convert (ctype, c), 0);
4706 /* We allow the constant to overflow with wrapping semantics. */
4707 if (op1 == 0
4708 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4709 break;
4711 else
4712 break;
4714 /* If we have an unsigned type is not a sizetype, we cannot widen
4715 the operation since it will change the result if the original
4716 computation overflowed. */
4717 if (TREE_UNSIGNED (ctype)
4718 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4719 && ctype != type)
4720 break;
4722 /* If we were able to eliminate our operation from the first side,
4723 apply our operation to the second side and reform the PLUS. */
4724 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4725 return fold (build (tcode, ctype, fold_convert (ctype, t1), op1));
4727 /* The last case is if we are a multiply. In that case, we can
4728 apply the distributive law to commute the multiply and addition
4729 if the multiplication of the constants doesn't overflow. */
4730 if (code == MULT_EXPR)
4731 return fold (build (tcode, ctype,
4732 fold (build (code, ctype,
4733 fold_convert (ctype, op0),
4734 fold_convert (ctype, c))),
4735 op1));
4737 break;
4739 case MULT_EXPR:
4740 /* We have a special case here if we are doing something like
4741 (C * 8) % 4 since we know that's zero. */
4742 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4743 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4744 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4745 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4746 return omit_one_operand (type, integer_zero_node, op0);
4748 /* ... fall through ... */
4750 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4751 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4752 /* If we can extract our operation from the LHS, do so and return a
4753 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4754 do something only if the second operand is a constant. */
4755 if (same_p
4756 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4757 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4758 fold_convert (ctype, op1)));
4759 else if (tcode == MULT_EXPR && code == MULT_EXPR
4760 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4761 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4762 fold_convert (ctype, t1)));
4763 else if (TREE_CODE (op1) != INTEGER_CST)
4764 return 0;
4766 /* If these are the same operation types, we can associate them
4767 assuming no overflow. */
4768 if (tcode == code
4769 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4770 fold_convert (ctype, c), 0))
4771 && ! TREE_OVERFLOW (t1))
4772 return fold (build (tcode, ctype, fold_convert (ctype, op0), t1));
4774 /* If these operations "cancel" each other, we have the main
4775 optimizations of this pass, which occur when either constant is a
4776 multiple of the other, in which case we replace this with either an
4777 operation or CODE or TCODE.
4779 If we have an unsigned type that is not a sizetype, we cannot do
4780 this since it will change the result if the original computation
4781 overflowed. */
4782 if ((! TREE_UNSIGNED (ctype)
4783 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4784 && ! flag_wrapv
4785 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4786 || (tcode == MULT_EXPR
4787 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4788 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4790 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4791 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4792 fold_convert (ctype,
4793 const_binop (TRUNC_DIV_EXPR,
4794 op1, c, 0))));
4795 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4796 return fold (build (code, ctype, fold_convert (ctype, op0),
4797 fold_convert (ctype,
4798 const_binop (TRUNC_DIV_EXPR,
4799 c, op1, 0))));
4801 break;
4803 default:
4804 break;
4807 return 0;
4810 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4811 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4812 that we may sometimes modify the tree. */
4814 static tree
4815 strip_compound_expr (tree t, tree s)
4817 enum tree_code code = TREE_CODE (t);
4819 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4820 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4821 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4822 return TREE_OPERAND (t, 1);
4824 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4825 don't bother handling any other types. */
4826 else if (code == COND_EXPR)
4828 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4829 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4830 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4832 else if (TREE_CODE_CLASS (code) == '1')
4833 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4834 else if (TREE_CODE_CLASS (code) == '<'
4835 || TREE_CODE_CLASS (code) == '2')
4837 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4838 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4841 return t;
4844 /* Return a node which has the indicated constant VALUE (either 0 or
4845 1), and is of the indicated TYPE. */
4847 static tree
4848 constant_boolean_node (int value, tree type)
4850 if (type == integer_type_node)
4851 return value ? integer_one_node : integer_zero_node;
4852 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4853 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4854 integer_zero_node);
4855 else
4857 tree t = build_int_2 (value, 0);
4859 TREE_TYPE (t) = type;
4860 return t;
4864 /* Utility function for the following routine, to see how complex a nesting of
4865 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4866 we don't care (to avoid spending too much time on complex expressions.). */
4868 static int
4869 count_cond (tree expr, int lim)
4871 int ctrue, cfalse;
4873 if (TREE_CODE (expr) != COND_EXPR)
4874 return 0;
4875 else if (lim <= 0)
4876 return 0;
4878 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4879 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4880 return MIN (lim, 1 + ctrue + cfalse);
4883 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4884 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4885 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4886 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4887 COND is the first argument to CODE; otherwise (as in the example
4888 given here), it is the second argument. TYPE is the type of the
4889 original expression. */
4891 static tree
4892 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4893 tree cond, tree arg, int cond_first_p)
4895 tree test, true_value, false_value;
4896 tree lhs = NULL_TREE;
4897 tree rhs = NULL_TREE;
4898 /* In the end, we'll produce a COND_EXPR. Both arms of the
4899 conditional expression will be binary operations. The left-hand
4900 side of the expression to be executed if the condition is true
4901 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4902 of the expression to be executed if the condition is true will be
4903 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4904 but apply to the expression to be executed if the conditional is
4905 false. */
4906 tree *true_lhs;
4907 tree *true_rhs;
4908 tree *false_lhs;
4909 tree *false_rhs;
4910 /* These are the codes to use for the left-hand side and right-hand
4911 side of the COND_EXPR. Normally, they are the same as CODE. */
4912 enum tree_code lhs_code = code;
4913 enum tree_code rhs_code = code;
4914 /* And these are the types of the expressions. */
4915 tree lhs_type = type;
4916 tree rhs_type = type;
4917 int save = 0;
4919 if (cond_first_p)
4921 true_rhs = false_rhs = &arg;
4922 true_lhs = &true_value;
4923 false_lhs = &false_value;
4925 else
4927 true_lhs = false_lhs = &arg;
4928 true_rhs = &true_value;
4929 false_rhs = &false_value;
4932 if (TREE_CODE (cond) == COND_EXPR)
4934 test = TREE_OPERAND (cond, 0);
4935 true_value = TREE_OPERAND (cond, 1);
4936 false_value = TREE_OPERAND (cond, 2);
4937 /* If this operand throws an expression, then it does not make
4938 sense to try to perform a logical or arithmetic operation
4939 involving it. Instead of building `a + throw 3' for example,
4940 we simply build `a, throw 3'. */
4941 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4943 if (! cond_first_p)
4945 lhs_code = COMPOUND_EXPR;
4946 lhs_type = void_type_node;
4948 else
4949 lhs = true_value;
4951 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4953 if (! cond_first_p)
4955 rhs_code = COMPOUND_EXPR;
4956 rhs_type = void_type_node;
4958 else
4959 rhs = false_value;
4962 else
4964 tree testtype = TREE_TYPE (cond);
4965 test = cond;
4966 true_value = fold_convert (testtype, integer_one_node);
4967 false_value = fold_convert (testtype, integer_zero_node);
4970 /* If ARG is complex we want to make sure we only evaluate it once. Though
4971 this is only required if it is volatile, it might be more efficient even
4972 if it is not. However, if we succeed in folding one part to a constant,
4973 we do not need to make this SAVE_EXPR. Since we do this optimization
4974 primarily to see if we do end up with constant and this SAVE_EXPR
4975 interferes with later optimizations, suppressing it when we can is
4976 important.
4978 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4979 do so. Don't try to see if the result is a constant if an arm is a
4980 COND_EXPR since we get exponential behavior in that case. */
4982 if (saved_expr_p (arg))
4983 save = 1;
4984 else if (lhs == 0 && rhs == 0
4985 && !TREE_CONSTANT (arg)
4986 && (*lang_hooks.decls.global_bindings_p) () == 0
4987 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4988 || TREE_SIDE_EFFECTS (arg)))
4990 if (TREE_CODE (true_value) != COND_EXPR)
4991 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4993 if (TREE_CODE (false_value) != COND_EXPR)
4994 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4996 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4997 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4999 arg = save_expr (arg);
5000 lhs = rhs = 0;
5001 save = saved_expr_p (arg);
5005 if (lhs == 0)
5006 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
5007 if (rhs == 0)
5008 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
5010 test = fold (build (COND_EXPR, type, test, lhs, rhs));
5012 /* If ARG involves a SAVE_EXPR, we need to ensure it is evaluated
5013 ahead of the COND_EXPR we made. Otherwise we would have it only
5014 evaluated in one branch, with the other branch using the result
5015 but missing the evaluation code. Beware that the save_expr call
5016 above might not return a SAVE_EXPR, so testing the TREE_CODE
5017 of ARG is not enough to decide here.  */
5018 if (save)
5019 return build (COMPOUND_EXPR, type,
5020 fold_convert (void_type_node, arg),
5021 strip_compound_expr (test, arg));
5022 else
5023 return fold_convert (type, test);
5027 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5029 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5030 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5031 ADDEND is the same as X.
5033 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5034 and finite. The problematic cases are when X is zero, and its mode
5035 has signed zeros. In the case of rounding towards -infinity,
5036 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5037 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5039 static bool
5040 fold_real_zero_addition_p (tree type, tree addend, int negate)
5042 if (!real_zerop (addend))
5043 return false;
5045 /* Don't allow the fold with -fsignaling-nans. */
5046 if (HONOR_SNANS (TYPE_MODE (type)))
5047 return false;
5049 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5050 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5051 return true;
5053 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5054 if (TREE_CODE (addend) == REAL_CST
5055 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5056 negate = !negate;
5058 /* The mode has signed zeros, and we have to honor their sign.
5059 In this situation, there is only one case we can return true for.
5060 X - 0 is the same as X unless rounding towards -infinity is
5061 supported. */
5062 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5065 /* Subroutine of fold() that checks comparisons of built-in math
5066 functions against real constants.
5068 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5069 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5070 is the type of the result and ARG0 and ARG1 are the operands of the
5071 comparison. ARG1 must be a TREE_REAL_CST.
5073 The function returns the constant folded tree if a simplification
5074 can be made, and NULL_TREE otherwise. */
5076 static tree
5077 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5078 tree type, tree arg0, tree arg1)
5080 REAL_VALUE_TYPE c;
5082 if (fcode == BUILT_IN_SQRT
5083 || fcode == BUILT_IN_SQRTF
5084 || fcode == BUILT_IN_SQRTL)
5086 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5087 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5089 c = TREE_REAL_CST (arg1);
5090 if (REAL_VALUE_NEGATIVE (c))
5092 /* sqrt(x) < y is always false, if y is negative. */
5093 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5094 return omit_one_operand (type,
5095 fold_convert (type, integer_zero_node),
5096 arg);
5098 /* sqrt(x) > y is always true, if y is negative and we
5099 don't care about NaNs, i.e. negative values of x. */
5100 if (code == NE_EXPR || !HONOR_NANS (mode))
5101 return omit_one_operand (type,
5102 fold_convert (type, integer_one_node),
5103 arg);
5105 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5106 return fold (build (GE_EXPR, type, arg,
5107 build_real (TREE_TYPE (arg), dconst0)));
5109 else if (code == GT_EXPR || code == GE_EXPR)
5111 REAL_VALUE_TYPE c2;
5113 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5114 real_convert (&c2, mode, &c2);
5116 if (REAL_VALUE_ISINF (c2))
5118 /* sqrt(x) > y is x == +Inf, when y is very large. */
5119 if (HONOR_INFINITIES (mode))
5120 return fold (build (EQ_EXPR, type, arg,
5121 build_real (TREE_TYPE (arg), c2)));
5123 /* sqrt(x) > y is always false, when y is very large
5124 and we don't care about infinities. */
5125 return omit_one_operand (type,
5126 fold_convert (type, integer_zero_node),
5127 arg);
5130 /* sqrt(x) > c is the same as x > c*c. */
5131 return fold (build (code, type, arg,
5132 build_real (TREE_TYPE (arg), c2)));
5134 else if (code == LT_EXPR || code == LE_EXPR)
5136 REAL_VALUE_TYPE c2;
5138 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5139 real_convert (&c2, mode, &c2);
5141 if (REAL_VALUE_ISINF (c2))
5143 /* sqrt(x) < y is always true, when y is a very large
5144 value and we don't care about NaNs or Infinities. */
5145 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5146 return omit_one_operand (type,
5147 fold_convert (type, integer_one_node),
5148 arg);
5150 /* sqrt(x) < y is x != +Inf when y is very large and we
5151 don't care about NaNs. */
5152 if (! HONOR_NANS (mode))
5153 return fold (build (NE_EXPR, type, arg,
5154 build_real (TREE_TYPE (arg), c2)));
5156 /* sqrt(x) < y is x >= 0 when y is very large and we
5157 don't care about Infinities. */
5158 if (! HONOR_INFINITIES (mode))
5159 return fold (build (GE_EXPR, type, arg,
5160 build_real (TREE_TYPE (arg), dconst0)));
5162 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5163 if ((*lang_hooks.decls.global_bindings_p) () != 0
5164 || CONTAINS_PLACEHOLDER_P (arg))
5165 return NULL_TREE;
5167 arg = save_expr (arg);
5168 return fold (build (TRUTH_ANDIF_EXPR, type,
5169 fold (build (GE_EXPR, type, arg,
5170 build_real (TREE_TYPE (arg),
5171 dconst0))),
5172 fold (build (NE_EXPR, type, arg,
5173 build_real (TREE_TYPE (arg),
5174 c2)))));
5177 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5178 if (! HONOR_NANS (mode))
5179 return fold (build (code, type, arg,
5180 build_real (TREE_TYPE (arg), c2)));
5182 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5183 if ((*lang_hooks.decls.global_bindings_p) () == 0
5184 && ! CONTAINS_PLACEHOLDER_P (arg))
5186 arg = save_expr (arg);
5187 return fold (build (TRUTH_ANDIF_EXPR, type,
5188 fold (build (GE_EXPR, type, arg,
5189 build_real (TREE_TYPE (arg),
5190 dconst0))),
5191 fold (build (code, type, arg,
5192 build_real (TREE_TYPE (arg),
5193 c2)))));
5198 return NULL_TREE;
5201 /* Subroutine of fold() that optimizes comparisons against Infinities,
5202 either +Inf or -Inf.
5204 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5205 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5206 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5208 The function returns the constant folded tree if a simplification
5209 can be made, and NULL_TREE otherwise. */
5211 static tree
5212 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5214 enum machine_mode mode;
5215 REAL_VALUE_TYPE max;
5216 tree temp;
5217 bool neg;
5219 mode = TYPE_MODE (TREE_TYPE (arg0));
5221 /* For negative infinity swap the sense of the comparison. */
5222 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5223 if (neg)
5224 code = swap_tree_comparison (code);
5226 switch (code)
5228 case GT_EXPR:
5229 /* x > +Inf is always false, if with ignore sNANs. */
5230 if (HONOR_SNANS (mode))
5231 return NULL_TREE;
5232 return omit_one_operand (type,
5233 fold_convert (type, integer_zero_node),
5234 arg0);
5236 case LE_EXPR:
5237 /* x <= +Inf is always true, if we don't case about NaNs. */
5238 if (! HONOR_NANS (mode))
5239 return omit_one_operand (type,
5240 fold_convert (type, integer_one_node),
5241 arg0);
5243 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5244 if ((*lang_hooks.decls.global_bindings_p) () == 0
5245 && ! CONTAINS_PLACEHOLDER_P (arg0))
5247 arg0 = save_expr (arg0);
5248 return fold (build (EQ_EXPR, type, arg0, arg0));
5250 break;
5252 case EQ_EXPR:
5253 case GE_EXPR:
5254 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5255 real_maxval (&max, neg, mode);
5256 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
5257 arg0, build_real (TREE_TYPE (arg0), max)));
5259 case LT_EXPR:
5260 /* x < +Inf is always equal to x <= DBL_MAX. */
5261 real_maxval (&max, neg, mode);
5262 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5263 arg0, build_real (TREE_TYPE (arg0), max)));
5265 case NE_EXPR:
5266 /* x != +Inf is always equal to !(x > DBL_MAX). */
5267 real_maxval (&max, neg, mode);
5268 if (! HONOR_NANS (mode))
5269 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5270 arg0, build_real (TREE_TYPE (arg0), max)));
5271 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
5272 arg0, build_real (TREE_TYPE (arg0), max)));
5273 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5275 default:
5276 break;
5279 return NULL_TREE;
5282 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5283 equality/inequality test, then return a simplified form of
5284 the test using shifts and logical operations. Otherwise return
5285 NULL. TYPE is the desired result type. */
5287 tree
5288 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5289 tree result_type)
5291 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5292 operand 0. */
5293 if (code == TRUTH_NOT_EXPR)
5295 code = TREE_CODE (arg0);
5296 if (code != NE_EXPR && code != EQ_EXPR)
5297 return NULL_TREE;
5299 /* Extract the arguments of the EQ/NE. */
5300 arg1 = TREE_OPERAND (arg0, 1);
5301 arg0 = TREE_OPERAND (arg0, 0);
5303 /* This requires us to invert the code. */
5304 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5307 /* If this is testing a single bit, we can optimize the test. */
5308 if ((code == NE_EXPR || code == EQ_EXPR)
5309 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5310 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5312 tree inner = TREE_OPERAND (arg0, 0);
5313 tree type = TREE_TYPE (arg0);
5314 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5315 enum machine_mode operand_mode = TYPE_MODE (type);
5316 int ops_unsigned;
5317 tree signed_type, unsigned_type, intermediate_type;
5318 tree arg00;
5320 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5321 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5322 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5323 if (arg00 != NULL_TREE
5324 /* This is only a win if casting to a signed type is cheap,
5325 i.e. when arg00's type is not a partial mode. */
5326 && TYPE_PRECISION (TREE_TYPE (arg00))
5327 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5329 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
5330 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
5331 fold_convert (stype, arg00),
5332 fold_convert (stype, integer_zero_node)));
5335 /* Otherwise we have (A & C) != 0 where C is a single bit,
5336 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5337 Similarly for (A & C) == 0. */
5339 /* If INNER is a right shift of a constant and it plus BITNUM does
5340 not overflow, adjust BITNUM and INNER. */
5341 if (TREE_CODE (inner) == RSHIFT_EXPR
5342 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5343 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5344 && bitnum < TYPE_PRECISION (type)
5345 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5346 bitnum - TYPE_PRECISION (type)))
5348 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5349 inner = TREE_OPERAND (inner, 0);
5352 /* If we are going to be able to omit the AND below, we must do our
5353 operations as unsigned. If we must use the AND, we have a choice.
5354 Normally unsigned is faster, but for some machines signed is. */
5355 #ifdef LOAD_EXTEND_OP
5356 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
5357 && !flag_syntax_only) ? 0 : 1;
5358 #else
5359 ops_unsigned = 1;
5360 #endif
5362 signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
5363 unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
5364 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5365 inner = fold_convert (intermediate_type, inner);
5367 if (bitnum != 0)
5368 inner = build (RSHIFT_EXPR, intermediate_type,
5369 inner, size_int (bitnum));
5371 if (code == EQ_EXPR)
5372 inner = build (BIT_XOR_EXPR, intermediate_type,
5373 inner, integer_one_node);
5375 /* Put the AND last so it can combine with more things. */
5376 inner = build (BIT_AND_EXPR, intermediate_type,
5377 inner, integer_one_node);
5379 /* Make sure to return the proper type. */
5380 inner = fold_convert (result_type, inner);
5382 return inner;
5384 return NULL_TREE;
5387 /* Check whether we are allowed to reorder operands arg0 and arg1,
5388 such that the evaluation of arg1 occurs before arg0. */
5390 static bool
5391 reorder_operands_p (tree arg0, tree arg1)
5393 if (! flag_evaluation_order)
5394 return true;
5395 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5396 return true;
5397 return ! TREE_SIDE_EFFECTS (arg0)
5398 && ! TREE_SIDE_EFFECTS (arg1);
5401 /* Test whether it is preferable two swap two operands, ARG0 and
5402 ARG1, for example because ARG0 is an integer constant and ARG1
5403 isn't. If REORDER is true, only recommend swapping if we can
5404 evaluate the operands in reverse order. */
5406 static bool
5407 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5409 STRIP_SIGN_NOPS (arg0);
5410 STRIP_SIGN_NOPS (arg1);
5412 if (TREE_CODE (arg1) == INTEGER_CST)
5413 return 0;
5414 if (TREE_CODE (arg0) == INTEGER_CST)
5415 return 1;
5417 if (TREE_CODE (arg1) == REAL_CST)
5418 return 0;
5419 if (TREE_CODE (arg0) == REAL_CST)
5420 return 1;
5422 if (TREE_CODE (arg1) == COMPLEX_CST)
5423 return 0;
5424 if (TREE_CODE (arg0) == COMPLEX_CST)
5425 return 1;
5427 if (TREE_CONSTANT (arg1))
5428 return 0;
5429 if (TREE_CONSTANT (arg0))
5430 return 1;
5432 if (optimize_size)
5433 return 0;
5435 if (reorder && flag_evaluation_order
5436 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5437 return 0;
5439 if (DECL_P (arg1))
5440 return 0;
5441 if (DECL_P (arg0))
5442 return 1;
5444 return 0;
5447 /* Perform constant folding and related simplification of EXPR.
5448 The related simplifications include x*1 => x, x*0 => 0, etc.,
5449 and application of the associative law.
5450 NOP_EXPR conversions may be removed freely (as long as we
5451 are careful not to change the C type of the overall expression)
5452 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5453 but we can constant-fold them if they have constant operands. */
5455 #ifdef ENABLE_FOLD_CHECKING
5456 # define fold(x) fold_1 (x)
5457 static tree fold_1 (tree);
5458 static
5459 #endif
5460 tree
5461 fold (tree expr)
5463 tree t = expr, orig_t;
5464 tree t1 = NULL_TREE;
5465 tree tem;
5466 tree type = TREE_TYPE (expr);
5467 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5468 enum tree_code code = TREE_CODE (t);
5469 int kind = TREE_CODE_CLASS (code);
5470 int invert;
5471 /* WINS will be nonzero when the switch is done
5472 if all operands are constant. */
5473 int wins = 1;
5475 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5476 Likewise for a SAVE_EXPR that's already been evaluated. */
5477 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5478 return t;
5480 /* Return right away if a constant. */
5481 if (kind == 'c')
5482 return t;
5484 orig_t = t;
5486 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5488 tree subop;
5490 /* Special case for conversion ops that can have fixed point args. */
5491 arg0 = TREE_OPERAND (t, 0);
5493 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5494 if (arg0 != 0)
5495 STRIP_SIGN_NOPS (arg0);
5497 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5498 subop = TREE_REALPART (arg0);
5499 else
5500 subop = arg0;
5502 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5503 && TREE_CODE (subop) != REAL_CST)
5504 /* Note that TREE_CONSTANT isn't enough:
5505 static var addresses are constant but we can't
5506 do arithmetic on them. */
5507 wins = 0;
5509 else if (IS_EXPR_CODE_CLASS (kind))
5511 int len = first_rtl_op (code);
5512 int i;
5513 for (i = 0; i < len; i++)
5515 tree op = TREE_OPERAND (t, i);
5516 tree subop;
5518 if (op == 0)
5519 continue; /* Valid for CALL_EXPR, at least. */
5521 if (kind == '<' || code == RSHIFT_EXPR)
5523 /* Signedness matters here. Perhaps we can refine this
5524 later. */
5525 STRIP_SIGN_NOPS (op);
5527 else
5528 /* Strip any conversions that don't change the mode. */
5529 STRIP_NOPS (op);
5531 if (TREE_CODE (op) == COMPLEX_CST)
5532 subop = TREE_REALPART (op);
5533 else
5534 subop = op;
5536 if (TREE_CODE (subop) != INTEGER_CST
5537 && TREE_CODE (subop) != REAL_CST)
5538 /* Note that TREE_CONSTANT isn't enough:
5539 static var addresses are constant but we can't
5540 do arithmetic on them. */
5541 wins = 0;
5543 if (i == 0)
5544 arg0 = op;
5545 else if (i == 1)
5546 arg1 = op;
5550 /* If this is a commutative operation, and ARG0 is a constant, move it
5551 to ARG1 to reduce the number of tests below. */
5552 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
5553 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
5554 || code == BIT_AND_EXPR)
5555 && tree_swap_operands_p (arg0, arg1, true))
5556 return fold (build (code, type, TREE_OPERAND (t, 1),
5557 TREE_OPERAND (t, 0)));
5559 /* Now WINS is set as described above,
5560 ARG0 is the first operand of EXPR,
5561 and ARG1 is the second operand (if it has more than one operand).
5563 First check for cases where an arithmetic operation is applied to a
5564 compound, conditional, or comparison operation. Push the arithmetic
5565 operation inside the compound or conditional to see if any folding
5566 can then be done. Convert comparison to conditional for this purpose.
5567 The also optimizes non-constant cases that used to be done in
5568 expand_expr.
5570 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5571 one of the operands is a comparison and the other is a comparison, a
5572 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5573 code below would make the expression more complex. Change it to a
5574 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5575 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5577 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5578 || code == EQ_EXPR || code == NE_EXPR)
5579 && ((truth_value_p (TREE_CODE (arg0))
5580 && (truth_value_p (TREE_CODE (arg1))
5581 || (TREE_CODE (arg1) == BIT_AND_EXPR
5582 && integer_onep (TREE_OPERAND (arg1, 1)))))
5583 || (truth_value_p (TREE_CODE (arg1))
5584 && (truth_value_p (TREE_CODE (arg0))
5585 || (TREE_CODE (arg0) == BIT_AND_EXPR
5586 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5588 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5589 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5590 : TRUTH_XOR_EXPR,
5591 type, arg0, arg1));
5593 if (code == EQ_EXPR)
5594 t = invert_truthvalue (t);
5596 return t;
5599 if (TREE_CODE_CLASS (code) == '1')
5601 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5602 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5603 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5604 else if (TREE_CODE (arg0) == COND_EXPR)
5606 tree arg01 = TREE_OPERAND (arg0, 1);
5607 tree arg02 = TREE_OPERAND (arg0, 2);
5608 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5609 arg01 = fold (build1 (code, type, arg01));
5610 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5611 arg02 = fold (build1 (code, type, arg02));
5612 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5613 arg01, arg02));
5615 /* If this was a conversion, and all we did was to move into
5616 inside the COND_EXPR, bring it back out. But leave it if
5617 it is a conversion from integer to integer and the
5618 result precision is no wider than a word since such a
5619 conversion is cheap and may be optimized away by combine,
5620 while it couldn't if it were outside the COND_EXPR. Then return
5621 so we don't get into an infinite recursion loop taking the
5622 conversion out and then back in. */
5624 if ((code == NOP_EXPR || code == CONVERT_EXPR
5625 || code == NON_LVALUE_EXPR)
5626 && TREE_CODE (t) == COND_EXPR
5627 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5628 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5629 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5630 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5631 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5632 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5633 && (! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5634 && (INTEGRAL_TYPE_P
5635 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5636 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD)
5637 || flag_syntax_only))
5638 t = build1 (code, type,
5639 build (COND_EXPR,
5640 TREE_TYPE (TREE_OPERAND
5641 (TREE_OPERAND (t, 1), 0)),
5642 TREE_OPERAND (t, 0),
5643 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5644 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5645 return t;
5647 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5648 return fold (build (COND_EXPR, type, arg0,
5649 fold (build1 (code, type, integer_one_node)),
5650 fold (build1 (code, type, integer_zero_node))));
5652 else if (TREE_CODE_CLASS (code) == '<'
5653 && TREE_CODE (arg0) == COMPOUND_EXPR)
5654 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5655 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5656 else if (TREE_CODE_CLASS (code) == '<'
5657 && TREE_CODE (arg1) == COMPOUND_EXPR)
5658 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5659 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5660 else if (TREE_CODE_CLASS (code) == '2'
5661 || TREE_CODE_CLASS (code) == '<')
5663 if (TREE_CODE (arg1) == COMPOUND_EXPR
5664 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5665 && ! TREE_SIDE_EFFECTS (arg0))
5666 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5667 fold (build (code, type,
5668 arg0, TREE_OPERAND (arg1, 1))));
5669 else if ((TREE_CODE (arg1) == COND_EXPR
5670 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5671 && TREE_CODE_CLASS (code) != '<'))
5672 && (TREE_CODE (arg0) != COND_EXPR
5673 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5674 && (! TREE_SIDE_EFFECTS (arg0)
5675 || ((*lang_hooks.decls.global_bindings_p) () == 0
5676 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5677 return
5678 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5679 /*cond_first_p=*/0);
5680 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5681 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5682 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5683 else if ((TREE_CODE (arg0) == COND_EXPR
5684 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5685 && TREE_CODE_CLASS (code) != '<'))
5686 && (TREE_CODE (arg1) != COND_EXPR
5687 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5688 && (! TREE_SIDE_EFFECTS (arg1)
5689 || ((*lang_hooks.decls.global_bindings_p) () == 0
5690 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5691 return
5692 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5693 /*cond_first_p=*/1);
5696 switch (code)
5698 case INTEGER_CST:
5699 case REAL_CST:
5700 case VECTOR_CST:
5701 case STRING_CST:
5702 case COMPLEX_CST:
5703 case CONSTRUCTOR:
5704 return t;
5706 case CONST_DECL:
5707 return fold (DECL_INITIAL (t));
5709 case NOP_EXPR:
5710 case FLOAT_EXPR:
5711 case CONVERT_EXPR:
5712 case FIX_TRUNC_EXPR:
5713 /* Other kinds of FIX are not handled properly by fold_convert. */
5715 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5716 return TREE_OPERAND (t, 0);
5718 /* Handle cases of two conversions in a row. */
5719 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5720 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5722 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5723 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5724 tree final_type = TREE_TYPE (t);
5725 int inside_int = INTEGRAL_TYPE_P (inside_type);
5726 int inside_ptr = POINTER_TYPE_P (inside_type);
5727 int inside_float = FLOAT_TYPE_P (inside_type);
5728 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5729 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5730 int inter_int = INTEGRAL_TYPE_P (inter_type);
5731 int inter_ptr = POINTER_TYPE_P (inter_type);
5732 int inter_float = FLOAT_TYPE_P (inter_type);
5733 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5734 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5735 int final_int = INTEGRAL_TYPE_P (final_type);
5736 int final_ptr = POINTER_TYPE_P (final_type);
5737 int final_float = FLOAT_TYPE_P (final_type);
5738 unsigned int final_prec = TYPE_PRECISION (final_type);
5739 int final_unsignedp = TREE_UNSIGNED (final_type);
5741 /* In addition to the cases of two conversions in a row
5742 handled below, if we are converting something to its own
5743 type via an object of identical or wider precision, neither
5744 conversion is needed. */
5745 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5746 && ((inter_int && final_int) || (inter_float && final_float))
5747 && inter_prec >= final_prec)
5748 return fold (build1 (code, final_type,
5749 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5751 /* Likewise, if the intermediate and final types are either both
5752 float or both integer, we don't need the middle conversion if
5753 it is wider than the final type and doesn't change the signedness
5754 (for integers). Avoid this if the final type is a pointer
5755 since then we sometimes need the inner conversion. Likewise if
5756 the outer has a precision not equal to the size of its mode. */
5757 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5758 || (inter_float && inside_float))
5759 && inter_prec >= inside_prec
5760 && (inter_float || inter_unsignedp == inside_unsignedp)
5761 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5762 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5763 && ! final_ptr)
5764 return fold (build1 (code, final_type,
5765 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5767 /* If we have a sign-extension of a zero-extended value, we can
5768 replace that by a single zero-extension. */
5769 if (inside_int && inter_int && final_int
5770 && inside_prec < inter_prec && inter_prec < final_prec
5771 && inside_unsignedp && !inter_unsignedp)
5772 return fold (build1 (code, final_type,
5773 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5775 /* Two conversions in a row are not needed unless:
5776 - some conversion is floating-point (overstrict for now), or
5777 - the intermediate type is narrower than both initial and
5778 final, or
5779 - the intermediate type and innermost type differ in signedness,
5780 and the outermost type is wider than the intermediate, or
5781 - the initial type is a pointer type and the precisions of the
5782 intermediate and final types differ, or
5783 - the final type is a pointer type and the precisions of the
5784 initial and intermediate types differ. */
5785 if (! inside_float && ! inter_float && ! final_float
5786 && (inter_prec > inside_prec || inter_prec > final_prec)
5787 && ! (inside_int && inter_int
5788 && inter_unsignedp != inside_unsignedp
5789 && inter_prec < final_prec)
5790 && ((inter_unsignedp && inter_prec > inside_prec)
5791 == (final_unsignedp && final_prec > inter_prec))
5792 && ! (inside_ptr && inter_prec != final_prec)
5793 && ! (final_ptr && inside_prec != inter_prec)
5794 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5795 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5796 && ! final_ptr)
5797 return fold (build1 (code, final_type,
5798 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5801 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5802 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5803 /* Detect assigning a bitfield. */
5804 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5805 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5807 /* Don't leave an assignment inside a conversion
5808 unless assigning a bitfield. */
5809 tree prev = TREE_OPERAND (t, 0);
5810 if (t == orig_t)
5811 t = copy_node (t);
5812 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5813 /* First do the assignment, then return converted constant. */
5814 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5815 TREE_NO_UNUSED_WARNING (t) = 1;
5816 TREE_USED (t) = 1;
5817 return t;
5820 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5821 constants (if x has signed type, the sign bit cannot be set
5822 in c). This folds extension into the BIT_AND_EXPR. */
5823 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5824 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5825 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5826 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5828 tree and = TREE_OPERAND (t, 0);
5829 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5830 int change = 0;
5832 if (TREE_UNSIGNED (TREE_TYPE (and))
5833 || (TYPE_PRECISION (TREE_TYPE (t))
5834 <= TYPE_PRECISION (TREE_TYPE (and))))
5835 change = 1;
5836 else if (TYPE_PRECISION (TREE_TYPE (and1))
5837 <= HOST_BITS_PER_WIDE_INT
5838 && host_integerp (and1, 1))
5840 unsigned HOST_WIDE_INT cst;
5842 cst = tree_low_cst (and1, 1);
5843 cst &= (HOST_WIDE_INT) -1
5844 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5845 change = (cst == 0);
5846 #ifdef LOAD_EXTEND_OP
5847 if (change
5848 && !flag_syntax_only
5849 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5850 == ZERO_EXTEND))
5852 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5853 and0 = fold_convert (uns, and0);
5854 and1 = fold_convert (uns, and1);
5856 #endif
5858 if (change)
5859 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5860 fold_convert (TREE_TYPE (t), and0),
5861 fold_convert (TREE_TYPE (t), and1)));
5864 tem = fold_convert_const (code, TREE_TYPE (t), arg0);
5865 return tem ? tem : t;
5867 case VIEW_CONVERT_EXPR:
5868 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5869 return build1 (VIEW_CONVERT_EXPR, type,
5870 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5871 return t;
5873 case COMPONENT_REF:
5874 if (TREE_CODE (arg0) == CONSTRUCTOR
5875 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5877 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5878 if (m)
5879 t = TREE_VALUE (m);
5881 return t;
5883 case RANGE_EXPR:
5884 if (TREE_CONSTANT (t) != wins)
5886 if (t == orig_t)
5887 t = copy_node (t);
5888 TREE_CONSTANT (t) = wins;
5890 return t;
5892 case NEGATE_EXPR:
5893 if (negate_expr_p (arg0))
5894 return fold_convert (type, negate_expr (arg0));
5895 return t;
5897 case ABS_EXPR:
5898 if (wins)
5900 if (TREE_CODE (arg0) == INTEGER_CST)
5902 /* If the value is unsigned, then the absolute value is
5903 the same as the ordinary value. */
5904 if (TREE_UNSIGNED (type))
5905 return arg0;
5906 /* Similarly, if the value is non-negative. */
5907 else if (INT_CST_LT (integer_minus_one_node, arg0))
5908 return arg0;
5909 /* If the value is negative, then the absolute value is
5910 its negation. */
5911 else
5913 unsigned HOST_WIDE_INT low;
5914 HOST_WIDE_INT high;
5915 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5916 TREE_INT_CST_HIGH (arg0),
5917 &low, &high);
5918 t = build_int_2 (low, high);
5919 TREE_TYPE (t) = type;
5920 TREE_OVERFLOW (t)
5921 = (TREE_OVERFLOW (arg0)
5922 | force_fit_type (t, overflow));
5923 TREE_CONSTANT_OVERFLOW (t)
5924 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5927 else if (TREE_CODE (arg0) == REAL_CST)
5929 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5930 t = build_real (type,
5931 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5934 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5935 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5936 /* Convert fabs((double)float) into (double)fabsf(float). */
5937 else if (TREE_CODE (arg0) == NOP_EXPR
5938 && TREE_CODE (type) == REAL_TYPE)
5940 tree targ0 = strip_float_extensions (arg0);
5941 if (targ0 != arg0)
5942 return fold_convert (type, fold (build1 (ABS_EXPR,
5943 TREE_TYPE (targ0),
5944 targ0)));
5946 else if (tree_expr_nonnegative_p (arg0))
5947 return arg0;
5948 return t;
5950 case CONJ_EXPR:
5951 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5952 return fold_convert (type, arg0);
5953 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5954 return build (COMPLEX_EXPR, type,
5955 TREE_OPERAND (arg0, 0),
5956 negate_expr (TREE_OPERAND (arg0, 1)));
5957 else if (TREE_CODE (arg0) == COMPLEX_CST)
5958 return build_complex (type, TREE_REALPART (arg0),
5959 negate_expr (TREE_IMAGPART (arg0)));
5960 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5961 return fold (build (TREE_CODE (arg0), type,
5962 fold (build1 (CONJ_EXPR, type,
5963 TREE_OPERAND (arg0, 0))),
5964 fold (build1 (CONJ_EXPR,
5965 type, TREE_OPERAND (arg0, 1)))));
5966 else if (TREE_CODE (arg0) == CONJ_EXPR)
5967 return TREE_OPERAND (arg0, 0);
5968 return t;
5970 case BIT_NOT_EXPR:
5971 if (wins)
5973 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5974 ~ TREE_INT_CST_HIGH (arg0));
5975 TREE_TYPE (t) = type;
5976 force_fit_type (t, 0);
5977 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5978 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5980 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5981 return TREE_OPERAND (arg0, 0);
5982 return t;
5984 case PLUS_EXPR:
5985 /* A + (-B) -> A - B */
5986 if (TREE_CODE (arg1) == NEGATE_EXPR)
5987 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5988 /* (-A) + B -> B - A */
5989 if (TREE_CODE (arg0) == NEGATE_EXPR)
5990 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5991 else if (! FLOAT_TYPE_P (type))
5993 if (integer_zerop (arg1))
5994 return non_lvalue (fold_convert (type, arg0));
5996 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5997 with a constant, and the two constants have no bits in common,
5998 we should treat this as a BIT_IOR_EXPR since this may produce more
5999 simplifications. */
6000 if (TREE_CODE (arg0) == BIT_AND_EXPR
6001 && TREE_CODE (arg1) == BIT_AND_EXPR
6002 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6003 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6004 && integer_zerop (const_binop (BIT_AND_EXPR,
6005 TREE_OPERAND (arg0, 1),
6006 TREE_OPERAND (arg1, 1), 0)))
6008 code = BIT_IOR_EXPR;
6009 goto bit_ior;
6012 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6013 (plus (plus (mult) (mult)) (foo)) so that we can
6014 take advantage of the factoring cases below. */
6015 if ((TREE_CODE (arg0) == PLUS_EXPR
6016 && TREE_CODE (arg1) == MULT_EXPR)
6017 || (TREE_CODE (arg1) == PLUS_EXPR
6018 && TREE_CODE (arg0) == MULT_EXPR))
6020 tree parg0, parg1, parg, marg;
6022 if (TREE_CODE (arg0) == PLUS_EXPR)
6023 parg = arg0, marg = arg1;
6024 else
6025 parg = arg1, marg = arg0;
6026 parg0 = TREE_OPERAND (parg, 0);
6027 parg1 = TREE_OPERAND (parg, 1);
6028 STRIP_NOPS (parg0);
6029 STRIP_NOPS (parg1);
6031 if (TREE_CODE (parg0) == MULT_EXPR
6032 && TREE_CODE (parg1) != MULT_EXPR)
6033 return fold (build (PLUS_EXPR, type,
6034 fold (build (PLUS_EXPR, type,
6035 fold_convert (type, parg0),
6036 fold_convert (type, marg))),
6037 fold_convert (type, parg1)));
6038 if (TREE_CODE (parg0) != MULT_EXPR
6039 && TREE_CODE (parg1) == MULT_EXPR)
6040 return fold (build (PLUS_EXPR, type,
6041 fold (build (PLUS_EXPR, type,
6042 fold_convert (type, parg1),
6043 fold_convert (type, marg))),
6044 fold_convert (type, parg0)));
6047 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6049 tree arg00, arg01, arg10, arg11;
6050 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6052 /* (A * C) + (B * C) -> (A+B) * C.
6053 We are most concerned about the case where C is a constant,
6054 but other combinations show up during loop reduction. Since
6055 it is not difficult, try all four possibilities. */
6057 arg00 = TREE_OPERAND (arg0, 0);
6058 arg01 = TREE_OPERAND (arg0, 1);
6059 arg10 = TREE_OPERAND (arg1, 0);
6060 arg11 = TREE_OPERAND (arg1, 1);
6061 same = NULL_TREE;
6063 if (operand_equal_p (arg01, arg11, 0))
6064 same = arg01, alt0 = arg00, alt1 = arg10;
6065 else if (operand_equal_p (arg00, arg10, 0))
6066 same = arg00, alt0 = arg01, alt1 = arg11;
6067 else if (operand_equal_p (arg00, arg11, 0))
6068 same = arg00, alt0 = arg01, alt1 = arg10;
6069 else if (operand_equal_p (arg01, arg10, 0))
6070 same = arg01, alt0 = arg00, alt1 = arg11;
6072 /* No identical multiplicands; see if we can find a common
6073 power-of-two factor in non-power-of-two multiplies. This
6074 can help in multi-dimensional array access. */
6075 else if (TREE_CODE (arg01) == INTEGER_CST
6076 && TREE_CODE (arg11) == INTEGER_CST
6077 && TREE_INT_CST_HIGH (arg01) == 0
6078 && TREE_INT_CST_HIGH (arg11) == 0)
6080 HOST_WIDE_INT int01, int11, tmp;
6081 int01 = TREE_INT_CST_LOW (arg01);
6082 int11 = TREE_INT_CST_LOW (arg11);
6084 /* Move min of absolute values to int11. */
6085 if ((int01 >= 0 ? int01 : -int01)
6086 < (int11 >= 0 ? int11 : -int11))
6088 tmp = int01, int01 = int11, int11 = tmp;
6089 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6090 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6093 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6095 alt0 = fold (build (MULT_EXPR, type, arg00,
6096 build_int_2 (int01 / int11, 0)));
6097 alt1 = arg10;
6098 same = arg11;
6102 if (same)
6103 return fold (build (MULT_EXPR, type,
6104 fold (build (PLUS_EXPR, type, alt0, alt1)),
6105 same));
6108 else
6110 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6111 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6112 return non_lvalue (fold_convert (type, arg0));
6114 /* Likewise if the operands are reversed. */
6115 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6116 return non_lvalue (fold_convert (type, arg1));
6118 /* Convert x+x into x*2.0. */
6119 if (operand_equal_p (arg0, arg1, 0)
6120 && SCALAR_FLOAT_TYPE_P (type))
6121 return fold (build (MULT_EXPR, type, arg0,
6122 build_real (type, dconst2)));
6124 /* Convert x*c+x into x*(c+1). */
6125 if (flag_unsafe_math_optimizations
6126 && TREE_CODE (arg0) == MULT_EXPR
6127 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6128 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6129 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6131 REAL_VALUE_TYPE c;
6133 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6134 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6135 return fold (build (MULT_EXPR, type, arg1,
6136 build_real (type, c)));
6139 /* Convert x+x*c into x*(c+1). */
6140 if (flag_unsafe_math_optimizations
6141 && TREE_CODE (arg1) == MULT_EXPR
6142 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6143 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6144 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6146 REAL_VALUE_TYPE c;
6148 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6149 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6150 return fold (build (MULT_EXPR, type, arg0,
6151 build_real (type, c)));
6154 /* Convert x*c1+x*c2 into x*(c1+c2). */
6155 if (flag_unsafe_math_optimizations
6156 && TREE_CODE (arg0) == MULT_EXPR
6157 && TREE_CODE (arg1) == MULT_EXPR
6158 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6159 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6160 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6161 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6162 && operand_equal_p (TREE_OPERAND (arg0, 0),
6163 TREE_OPERAND (arg1, 0), 0))
6165 REAL_VALUE_TYPE c1, c2;
6167 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6168 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6169 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6170 return fold (build (MULT_EXPR, type,
6171 TREE_OPERAND (arg0, 0),
6172 build_real (type, c1)));
6176 bit_rotate:
6177 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6178 is a rotate of A by C1 bits. */
6179 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6180 is a rotate of A by B bits. */
6182 enum tree_code code0, code1;
6183 code0 = TREE_CODE (arg0);
6184 code1 = TREE_CODE (arg1);
6185 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6186 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6187 && operand_equal_p (TREE_OPERAND (arg0, 0),
6188 TREE_OPERAND (arg1, 0), 0)
6189 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6191 tree tree01, tree11;
6192 enum tree_code code01, code11;
6194 tree01 = TREE_OPERAND (arg0, 1);
6195 tree11 = TREE_OPERAND (arg1, 1);
6196 STRIP_NOPS (tree01);
6197 STRIP_NOPS (tree11);
6198 code01 = TREE_CODE (tree01);
6199 code11 = TREE_CODE (tree11);
6200 if (code01 == INTEGER_CST
6201 && code11 == INTEGER_CST
6202 && TREE_INT_CST_HIGH (tree01) == 0
6203 && TREE_INT_CST_HIGH (tree11) == 0
6204 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6205 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6206 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6207 code0 == LSHIFT_EXPR ? tree01 : tree11);
6208 else if (code11 == MINUS_EXPR)
6210 tree tree110, tree111;
6211 tree110 = TREE_OPERAND (tree11, 0);
6212 tree111 = TREE_OPERAND (tree11, 1);
6213 STRIP_NOPS (tree110);
6214 STRIP_NOPS (tree111);
6215 if (TREE_CODE (tree110) == INTEGER_CST
6216 && 0 == compare_tree_int (tree110,
6217 TYPE_PRECISION
6218 (TREE_TYPE (TREE_OPERAND
6219 (arg0, 0))))
6220 && operand_equal_p (tree01, tree111, 0))
6221 return build ((code0 == LSHIFT_EXPR
6222 ? LROTATE_EXPR
6223 : RROTATE_EXPR),
6224 type, TREE_OPERAND (arg0, 0), tree01);
6226 else if (code01 == MINUS_EXPR)
6228 tree tree010, tree011;
6229 tree010 = TREE_OPERAND (tree01, 0);
6230 tree011 = TREE_OPERAND (tree01, 1);
6231 STRIP_NOPS (tree010);
6232 STRIP_NOPS (tree011);
6233 if (TREE_CODE (tree010) == INTEGER_CST
6234 && 0 == compare_tree_int (tree010,
6235 TYPE_PRECISION
6236 (TREE_TYPE (TREE_OPERAND
6237 (arg0, 0))))
6238 && operand_equal_p (tree11, tree011, 0))
6239 return build ((code0 != LSHIFT_EXPR
6240 ? LROTATE_EXPR
6241 : RROTATE_EXPR),
6242 type, TREE_OPERAND (arg0, 0), tree11);
6247 associate:
6248 /* In most languages, can't associate operations on floats through
6249 parentheses. Rather than remember where the parentheses were, we
6250 don't associate floats at all, unless the user has specified
6251 -funsafe-math-optimizations. */
6253 if (! wins
6254 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6256 tree var0, con0, lit0, minus_lit0;
6257 tree var1, con1, lit1, minus_lit1;
6259 /* Split both trees into variables, constants, and literals. Then
6260 associate each group together, the constants with literals,
6261 then the result with variables. This increases the chances of
6262 literals being recombined later and of generating relocatable
6263 expressions for the sum of a constant and literal. */
6264 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6265 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6266 code == MINUS_EXPR);
6268 /* Only do something if we found more than two objects. Otherwise,
6269 nothing has changed and we risk infinite recursion. */
6270 if (2 < ((var0 != 0) + (var1 != 0)
6271 + (con0 != 0) + (con1 != 0)
6272 + (lit0 != 0) + (lit1 != 0)
6273 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6275 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6276 if (code == MINUS_EXPR)
6277 code = PLUS_EXPR;
6279 var0 = associate_trees (var0, var1, code, type);
6280 con0 = associate_trees (con0, con1, code, type);
6281 lit0 = associate_trees (lit0, lit1, code, type);
6282 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6284 /* Preserve the MINUS_EXPR if the negative part of the literal is
6285 greater than the positive part. Otherwise, the multiplicative
6286 folding code (i.e extract_muldiv) may be fooled in case
6287 unsigned constants are subtracted, like in the following
6288 example: ((X*2 + 4) - 8U)/2. */
6289 if (minus_lit0 && lit0)
6291 if (TREE_CODE (lit0) == INTEGER_CST
6292 && TREE_CODE (minus_lit0) == INTEGER_CST
6293 && tree_int_cst_lt (lit0, minus_lit0))
6295 minus_lit0 = associate_trees (minus_lit0, lit0,
6296 MINUS_EXPR, type);
6297 lit0 = 0;
6299 else
6301 lit0 = associate_trees (lit0, minus_lit0,
6302 MINUS_EXPR, type);
6303 minus_lit0 = 0;
6306 if (minus_lit0)
6308 if (con0 == 0)
6309 return fold_convert (type,
6310 associate_trees (var0, minus_lit0,
6311 MINUS_EXPR, type));
6312 else
6314 con0 = associate_trees (con0, minus_lit0,
6315 MINUS_EXPR, type);
6316 return fold_convert (type,
6317 associate_trees (var0, con0,
6318 PLUS_EXPR, type));
6322 con0 = associate_trees (con0, lit0, code, type);
6323 return fold_convert (type, associate_trees (var0, con0,
6324 code, type));
6328 binary:
6329 if (wins)
6330 t1 = const_binop (code, arg0, arg1, 0);
6331 if (t1 != NULL_TREE)
6333 /* The return value should always have
6334 the same type as the original expression. */
6335 if (TREE_TYPE (t1) != TREE_TYPE (t))
6336 t1 = fold_convert (TREE_TYPE (t), t1);
6338 return t1;
6340 return t;
6342 case MINUS_EXPR:
6343 /* A - (-B) -> A + B */
6344 if (TREE_CODE (arg1) == NEGATE_EXPR)
6345 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6346 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6347 if (TREE_CODE (arg0) == NEGATE_EXPR
6348 && (FLOAT_TYPE_P (type)
6349 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6350 && negate_expr_p (arg1)
6351 && reorder_operands_p (arg0, arg1))
6352 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
6353 TREE_OPERAND (arg0, 0)));
6355 if (! FLOAT_TYPE_P (type))
6357 if (! wins && integer_zerop (arg0))
6358 return negate_expr (fold_convert (type, arg1));
6359 if (integer_zerop (arg1))
6360 return non_lvalue (fold_convert (type, arg0));
6362 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
6363 about the case where C is a constant, just try one of the
6364 four possibilities. */
6366 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
6367 && operand_equal_p (TREE_OPERAND (arg0, 1),
6368 TREE_OPERAND (arg1, 1), 0))
6369 return fold (build (MULT_EXPR, type,
6370 fold (build (MINUS_EXPR, type,
6371 TREE_OPERAND (arg0, 0),
6372 TREE_OPERAND (arg1, 0))),
6373 TREE_OPERAND (arg0, 1)));
6375 /* Fold A - (A & B) into ~B & A. */
6376 if (!TREE_SIDE_EFFECTS (arg0)
6377 && TREE_CODE (arg1) == BIT_AND_EXPR)
6379 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6380 return fold (build (BIT_AND_EXPR, type,
6381 fold (build1 (BIT_NOT_EXPR, type,
6382 TREE_OPERAND (arg1, 0))),
6383 arg0));
6384 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6385 return fold (build (BIT_AND_EXPR, type,
6386 fold (build1 (BIT_NOT_EXPR, type,
6387 TREE_OPERAND (arg1, 1))),
6388 arg0));
6391 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6392 any power of 2 minus 1. */
6393 if (TREE_CODE (arg0) == BIT_AND_EXPR
6394 && TREE_CODE (arg1) == BIT_AND_EXPR
6395 && operand_equal_p (TREE_OPERAND (arg0, 0),
6396 TREE_OPERAND (arg1, 0), 0))
6398 tree mask0 = TREE_OPERAND (arg0, 1);
6399 tree mask1 = TREE_OPERAND (arg1, 1);
6400 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6402 if (operand_equal_p (tem, mask1, 0))
6404 tem = fold (build (BIT_XOR_EXPR, type,
6405 TREE_OPERAND (arg0, 0), mask1));
6406 return fold (build (MINUS_EXPR, type, tem, mask1));
6411 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6412 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6413 return non_lvalue (fold_convert (type, arg0));
6415 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6416 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6417 (-ARG1 + ARG0) reduces to -ARG1. */
6418 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6419 return negate_expr (fold_convert (type, arg1));
6421 /* Fold &x - &x. This can happen from &x.foo - &x.
6422 This is unsafe for certain floats even in non-IEEE formats.
6423 In IEEE, it is unsafe because it does wrong for NaNs.
6424 Also note that operand_equal_p is always false if an operand
6425 is volatile. */
6427 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6428 && operand_equal_p (arg0, arg1, 0))
6429 return fold_convert (type, integer_zero_node);
6431 goto associate;
6433 case MULT_EXPR:
6434 /* (-A) * (-B) -> A * B */
6435 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6436 return fold (build (MULT_EXPR, type,
6437 TREE_OPERAND (arg0, 0),
6438 negate_expr (arg1)));
6439 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6440 return fold (build (MULT_EXPR, type,
6441 negate_expr (arg0),
6442 TREE_OPERAND (arg1, 0)));
6444 if (! FLOAT_TYPE_P (type))
6446 if (integer_zerop (arg1))
6447 return omit_one_operand (type, arg1, arg0);
6448 if (integer_onep (arg1))
6449 return non_lvalue (fold_convert (type, arg0));
6451 /* (a * (1 << b)) is (a << b) */
6452 if (TREE_CODE (arg1) == LSHIFT_EXPR
6453 && integer_onep (TREE_OPERAND (arg1, 0)))
6454 return fold (build (LSHIFT_EXPR, type, arg0,
6455 TREE_OPERAND (arg1, 1)));
6456 if (TREE_CODE (arg0) == LSHIFT_EXPR
6457 && integer_onep (TREE_OPERAND (arg0, 0)))
6458 return fold (build (LSHIFT_EXPR, type, arg1,
6459 TREE_OPERAND (arg0, 1)));
6461 if (TREE_CODE (arg1) == INTEGER_CST
6462 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6463 fold_convert (type, arg1),
6464 code, NULL_TREE)))
6465 return fold_convert (type, tem);
6468 else
6470 /* Maybe fold x * 0 to 0. The expressions aren't the same
6471 when x is NaN, since x * 0 is also NaN. Nor are they the
6472 same in modes with signed zeros, since multiplying a
6473 negative value by 0 gives -0, not +0. */
6474 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6475 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6476 && real_zerop (arg1))
6477 return omit_one_operand (type, arg1, arg0);
6478 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6479 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6480 && real_onep (arg1))
6481 return non_lvalue (fold_convert (type, arg0));
6483 /* Transform x * -1.0 into -x. */
6484 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6485 && real_minus_onep (arg1))
6486 return fold (build1 (NEGATE_EXPR, type, arg0));
6488 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6489 if (flag_unsafe_math_optimizations
6490 && TREE_CODE (arg0) == RDIV_EXPR
6491 && TREE_CODE (arg1) == REAL_CST
6492 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6494 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6495 arg1, 0);
6496 if (tem)
6497 return fold (build (RDIV_EXPR, type, tem,
6498 TREE_OPERAND (arg0, 1)));
6501 if (flag_unsafe_math_optimizations)
6503 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6504 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6506 /* Optimizations of sqrt(...)*sqrt(...). */
6507 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
6508 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
6509 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
6511 tree sqrtfn, arg, arglist;
6512 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6513 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6515 /* Optimize sqrt(x)*sqrt(x) as x. */
6516 if (operand_equal_p (arg00, arg10, 0)
6517 && ! HONOR_SNANS (TYPE_MODE (type)))
6518 return arg00;
6520 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6521 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6522 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6523 arglist = build_tree_list (NULL_TREE, arg);
6524 return build_function_call_expr (sqrtfn, arglist);
6527 /* Optimize expN(x)*expN(y) as expN(x+y). */
6528 if (fcode0 == fcode1
6529 && (fcode0 == BUILT_IN_EXP
6530 || fcode0 == BUILT_IN_EXPF
6531 || fcode0 == BUILT_IN_EXPL
6532 || fcode0 == BUILT_IN_EXP2
6533 || fcode0 == BUILT_IN_EXP2F
6534 || fcode0 == BUILT_IN_EXP2L
6535 || fcode0 == BUILT_IN_EXP10
6536 || fcode0 == BUILT_IN_EXP10F
6537 || fcode0 == BUILT_IN_EXP10L
6538 || fcode0 == BUILT_IN_POW10
6539 || fcode0 == BUILT_IN_POW10F
6540 || fcode0 == BUILT_IN_POW10L))
6542 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6543 tree arg = build (PLUS_EXPR, type,
6544 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6545 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6546 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6547 return build_function_call_expr (expfn, arglist);
6550 /* Optimizations of pow(...)*pow(...). */
6551 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6552 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6553 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6555 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6556 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6557 1)));
6558 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6559 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6560 1)));
6562 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6563 if (operand_equal_p (arg01, arg11, 0))
6565 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6566 tree arg = build (MULT_EXPR, type, arg00, arg10);
6567 tree arglist = tree_cons (NULL_TREE, fold (arg),
6568 build_tree_list (NULL_TREE,
6569 arg01));
6570 return build_function_call_expr (powfn, arglist);
6573 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6574 if (operand_equal_p (arg00, arg10, 0))
6576 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6577 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6578 tree arglist = tree_cons (NULL_TREE, arg00,
6579 build_tree_list (NULL_TREE,
6580 arg));
6581 return build_function_call_expr (powfn, arglist);
6585 /* Optimize tan(x)*cos(x) as sin(x). */
6586 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6587 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6588 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6589 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6590 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6591 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6592 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6593 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6595 tree sinfn;
6597 switch (fcode0)
6599 case BUILT_IN_TAN:
6600 case BUILT_IN_COS:
6601 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6602 break;
6603 case BUILT_IN_TANF:
6604 case BUILT_IN_COSF:
6605 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6606 break;
6607 case BUILT_IN_TANL:
6608 case BUILT_IN_COSL:
6609 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6610 break;
6611 default:
6612 sinfn = NULL_TREE;
6615 if (sinfn != NULL_TREE)
6616 return build_function_call_expr (sinfn,
6617 TREE_OPERAND (arg0, 1));
6620 /* Optimize x*pow(x,c) as pow(x,c+1). */
6621 if (fcode1 == BUILT_IN_POW
6622 || fcode1 == BUILT_IN_POWF
6623 || fcode1 == BUILT_IN_POWL)
6625 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6626 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6627 1)));
6628 if (TREE_CODE (arg11) == REAL_CST
6629 && ! TREE_CONSTANT_OVERFLOW (arg11)
6630 && operand_equal_p (arg0, arg10, 0))
6632 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6633 REAL_VALUE_TYPE c;
6634 tree arg, arglist;
6636 c = TREE_REAL_CST (arg11);
6637 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6638 arg = build_real (type, c);
6639 arglist = build_tree_list (NULL_TREE, arg);
6640 arglist = tree_cons (NULL_TREE, arg0, arglist);
6641 return build_function_call_expr (powfn, arglist);
6645 /* Optimize pow(x,c)*x as pow(x,c+1). */
6646 if (fcode0 == BUILT_IN_POW
6647 || fcode0 == BUILT_IN_POWF
6648 || fcode0 == BUILT_IN_POWL)
6650 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6651 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6652 1)));
6653 if (TREE_CODE (arg01) == REAL_CST
6654 && ! TREE_CONSTANT_OVERFLOW (arg01)
6655 && operand_equal_p (arg1, arg00, 0))
6657 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6658 REAL_VALUE_TYPE c;
6659 tree arg, arglist;
6661 c = TREE_REAL_CST (arg01);
6662 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6663 arg = build_real (type, c);
6664 arglist = build_tree_list (NULL_TREE, arg);
6665 arglist = tree_cons (NULL_TREE, arg1, arglist);
6666 return build_function_call_expr (powfn, arglist);
6670 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6671 if (! optimize_size
6672 && operand_equal_p (arg0, arg1, 0))
6674 tree powfn;
6676 if (type == double_type_node)
6677 powfn = implicit_built_in_decls[BUILT_IN_POW];
6678 else if (type == float_type_node)
6679 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6680 else if (type == long_double_type_node)
6681 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6682 else
6683 powfn = NULL_TREE;
6685 if (powfn)
6687 tree arg = build_real (type, dconst2);
6688 tree arglist = build_tree_list (NULL_TREE, arg);
6689 arglist = tree_cons (NULL_TREE, arg0, arglist);
6690 return build_function_call_expr (powfn, arglist);
6695 goto associate;
6697 case BIT_IOR_EXPR:
6698 bit_ior:
6699 if (integer_all_onesp (arg1))
6700 return omit_one_operand (type, arg1, arg0);
6701 if (integer_zerop (arg1))
6702 return non_lvalue (fold_convert (type, arg0));
6703 t1 = distribute_bit_expr (code, type, arg0, arg1);
6704 if (t1 != NULL_TREE)
6705 return t1;
6707 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6709 This results in more efficient code for machines without a NAND
6710 instruction. Combine will canonicalize to the first form
6711 which will allow use of NAND instructions provided by the
6712 backend if they exist. */
6713 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6714 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6716 return fold (build1 (BIT_NOT_EXPR, type,
6717 build (BIT_AND_EXPR, type,
6718 TREE_OPERAND (arg0, 0),
6719 TREE_OPERAND (arg1, 0))));
6722 /* See if this can be simplified into a rotate first. If that
6723 is unsuccessful continue in the association code. */
6724 goto bit_rotate;
6726 case BIT_XOR_EXPR:
6727 if (integer_zerop (arg1))
6728 return non_lvalue (fold_convert (type, arg0));
6729 if (integer_all_onesp (arg1))
6730 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6732 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6733 with a constant, and the two constants have no bits in common,
6734 we should treat this as a BIT_IOR_EXPR since this may produce more
6735 simplifications. */
6736 if (TREE_CODE (arg0) == BIT_AND_EXPR
6737 && TREE_CODE (arg1) == BIT_AND_EXPR
6738 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6739 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6740 && integer_zerop (const_binop (BIT_AND_EXPR,
6741 TREE_OPERAND (arg0, 1),
6742 TREE_OPERAND (arg1, 1), 0)))
6744 code = BIT_IOR_EXPR;
6745 goto bit_ior;
6748 /* See if this can be simplified into a rotate first. If that
6749 is unsuccessful continue in the association code. */
6750 goto bit_rotate;
6752 case BIT_AND_EXPR:
6753 if (integer_all_onesp (arg1))
6754 return non_lvalue (fold_convert (type, arg0));
6755 if (integer_zerop (arg1))
6756 return omit_one_operand (type, arg1, arg0);
6757 t1 = distribute_bit_expr (code, type, arg0, arg1);
6758 if (t1 != NULL_TREE)
6759 return t1;
6760 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6761 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6762 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6764 unsigned int prec
6765 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6767 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6768 && (~TREE_INT_CST_LOW (arg1)
6769 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6770 return fold_convert (type, TREE_OPERAND (arg0, 0));
6773 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6775 This results in more efficient code for machines without a NOR
6776 instruction. Combine will canonicalize to the first form
6777 which will allow use of NOR instructions provided by the
6778 backend if they exist. */
6779 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6780 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6782 return fold (build1 (BIT_NOT_EXPR, type,
6783 build (BIT_IOR_EXPR, type,
6784 TREE_OPERAND (arg0, 0),
6785 TREE_OPERAND (arg1, 0))));
6788 goto associate;
6790 case RDIV_EXPR:
6791 /* Don't touch a floating-point divide by zero unless the mode
6792 of the constant can represent infinity. */
6793 if (TREE_CODE (arg1) == REAL_CST
6794 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6795 && real_zerop (arg1))
6796 return t;
6798 /* (-A) / (-B) -> A / B */
6799 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6800 return fold (build (RDIV_EXPR, type,
6801 TREE_OPERAND (arg0, 0),
6802 negate_expr (arg1)));
6803 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6804 return fold (build (RDIV_EXPR, type,
6805 negate_expr (arg0),
6806 TREE_OPERAND (arg1, 0)));
6808 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6809 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6810 && real_onep (arg1))
6811 return non_lvalue (fold_convert (type, arg0));
6813 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6814 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6815 && real_minus_onep (arg1))
6816 return non_lvalue (fold_convert (type, negate_expr (arg0)));
6818 /* If ARG1 is a constant, we can convert this to a multiply by the
6819 reciprocal. This does not have the same rounding properties,
6820 so only do this if -funsafe-math-optimizations. We can actually
6821 always safely do it if ARG1 is a power of two, but it's hard to
6822 tell if it is or not in a portable manner. */
6823 if (TREE_CODE (arg1) == REAL_CST)
6825 if (flag_unsafe_math_optimizations
6826 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6827 arg1, 0)))
6828 return fold (build (MULT_EXPR, type, arg0, tem));
6829 /* Find the reciprocal if optimizing and the result is exact. */
6830 if (optimize)
6832 REAL_VALUE_TYPE r;
6833 r = TREE_REAL_CST (arg1);
6834 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6836 tem = build_real (type, r);
6837 return fold (build (MULT_EXPR, type, arg0, tem));
6841 /* Convert A/B/C to A/(B*C). */
6842 if (flag_unsafe_math_optimizations
6843 && TREE_CODE (arg0) == RDIV_EXPR)
6844 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6845 fold (build (MULT_EXPR, type,
6846 TREE_OPERAND (arg0, 1), arg1))));
6848 /* Convert A/(B/C) to (A/B)*C. */
6849 if (flag_unsafe_math_optimizations
6850 && TREE_CODE (arg1) == RDIV_EXPR)
6851 return fold (build (MULT_EXPR, type,
6852 fold (build (RDIV_EXPR, type, arg0,
6853 TREE_OPERAND (arg1, 0))),
6854 TREE_OPERAND (arg1, 1)));
6856 /* Convert C1/(X*C2) into (C1/C2)/X. */
6857 if (flag_unsafe_math_optimizations
6858 && TREE_CODE (arg1) == MULT_EXPR
6859 && TREE_CODE (arg0) == REAL_CST
6860 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6862 tree tem = const_binop (RDIV_EXPR, arg0,
6863 TREE_OPERAND (arg1, 1), 0);
6864 if (tem)
6865 return fold (build (RDIV_EXPR, type, tem,
6866 TREE_OPERAND (arg1, 0)));
6869 if (flag_unsafe_math_optimizations)
6871 enum built_in_function fcode = builtin_mathfn_code (arg1);
6872 /* Optimize x/expN(y) into x*expN(-y). */
6873 if (fcode == BUILT_IN_EXP
6874 || fcode == BUILT_IN_EXPF
6875 || fcode == BUILT_IN_EXPL
6876 || fcode == BUILT_IN_EXP2
6877 || fcode == BUILT_IN_EXP2F
6878 || fcode == BUILT_IN_EXP2L
6879 || fcode == BUILT_IN_EXP10
6880 || fcode == BUILT_IN_EXP10F
6881 || fcode == BUILT_IN_EXP10L
6882 || fcode == BUILT_IN_POW10
6883 || fcode == BUILT_IN_POW10F
6884 || fcode == BUILT_IN_POW10L)
6886 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6887 tree arg = build1 (NEGATE_EXPR, type,
6888 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6889 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6890 arg1 = build_function_call_expr (expfn, arglist);
6891 return fold (build (MULT_EXPR, type, arg0, arg1));
6894 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6895 if (fcode == BUILT_IN_POW
6896 || fcode == BUILT_IN_POWF
6897 || fcode == BUILT_IN_POWL)
6899 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6900 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6901 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6902 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6903 tree arglist = tree_cons(NULL_TREE, arg10,
6904 build_tree_list (NULL_TREE, neg11));
6905 arg1 = build_function_call_expr (powfn, arglist);
6906 return fold (build (MULT_EXPR, type, arg0, arg1));
6910 if (flag_unsafe_math_optimizations)
6912 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6913 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6915 /* Optimize sin(x)/cos(x) as tan(x). */
6916 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6917 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6918 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6919 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6920 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6922 tree tanfn;
6924 if (fcode0 == BUILT_IN_SIN)
6925 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6926 else if (fcode0 == BUILT_IN_SINF)
6927 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6928 else if (fcode0 == BUILT_IN_SINL)
6929 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6930 else
6931 tanfn = NULL_TREE;
6933 if (tanfn != NULL_TREE)
6934 return build_function_call_expr (tanfn,
6935 TREE_OPERAND (arg0, 1));
6938 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6939 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6940 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6941 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6942 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6943 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6945 tree tanfn;
6947 if (fcode0 == BUILT_IN_COS)
6948 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6949 else if (fcode0 == BUILT_IN_COSF)
6950 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6951 else if (fcode0 == BUILT_IN_COSL)
6952 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6953 else
6954 tanfn = NULL_TREE;
6956 if (tanfn != NULL_TREE)
6958 tree tmp = TREE_OPERAND (arg0, 1);
6959 tmp = build_function_call_expr (tanfn, tmp);
6960 return fold (build (RDIV_EXPR, type,
6961 build_real (type, dconst1),
6962 tmp));
6966 /* Optimize pow(x,c)/x as pow(x,c-1). */
6967 if (fcode0 == BUILT_IN_POW
6968 || fcode0 == BUILT_IN_POWF
6969 || fcode0 == BUILT_IN_POWL)
6971 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6972 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6973 if (TREE_CODE (arg01) == REAL_CST
6974 && ! TREE_CONSTANT_OVERFLOW (arg01)
6975 && operand_equal_p (arg1, arg00, 0))
6977 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6978 REAL_VALUE_TYPE c;
6979 tree arg, arglist;
6981 c = TREE_REAL_CST (arg01);
6982 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6983 arg = build_real (type, c);
6984 arglist = build_tree_list (NULL_TREE, arg);
6985 arglist = tree_cons (NULL_TREE, arg1, arglist);
6986 return build_function_call_expr (powfn, arglist);
6990 goto binary;
6992 case TRUNC_DIV_EXPR:
6993 case ROUND_DIV_EXPR:
6994 case FLOOR_DIV_EXPR:
6995 case CEIL_DIV_EXPR:
6996 case EXACT_DIV_EXPR:
6997 if (integer_onep (arg1))
6998 return non_lvalue (fold_convert (type, arg0));
6999 if (integer_zerop (arg1))
7000 return t;
7002 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7003 operation, EXACT_DIV_EXPR.
7005 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7006 At one time others generated faster code, it's not clear if they do
7007 after the last round to changes to the DIV code in expmed.c. */
7008 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7009 && multiple_of_p (type, arg0, arg1))
7010 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
7012 if (TREE_CODE (arg1) == INTEGER_CST
7013 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7014 code, NULL_TREE)))
7015 return fold_convert (type, tem);
7017 goto binary;
7019 case CEIL_MOD_EXPR:
7020 case FLOOR_MOD_EXPR:
7021 case ROUND_MOD_EXPR:
7022 case TRUNC_MOD_EXPR:
7023 if (integer_onep (arg1))
7024 return omit_one_operand (type, integer_zero_node, arg0);
7025 if (integer_zerop (arg1))
7026 return t;
7028 if (TREE_CODE (arg1) == INTEGER_CST
7029 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7030 code, NULL_TREE)))
7031 return fold_convert (type, tem);
7033 goto binary;
7035 case LROTATE_EXPR:
7036 case RROTATE_EXPR:
7037 if (integer_all_onesp (arg0))
7038 return omit_one_operand (type, arg0, arg1);
7039 goto shift;
7041 case RSHIFT_EXPR:
7042 /* Optimize -1 >> x for arithmetic right shifts. */
7043 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
7044 return omit_one_operand (type, arg0, arg1);
7045 /* ... fall through ... */
7047 case LSHIFT_EXPR:
7048 shift:
7049 if (integer_zerop (arg1))
7050 return non_lvalue (fold_convert (type, arg0));
7051 if (integer_zerop (arg0))
7052 return omit_one_operand (type, arg0, arg1);
7054 /* Since negative shift count is not well-defined,
7055 don't try to compute it in the compiler. */
7056 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7057 return t;
7058 /* Rewrite an LROTATE_EXPR by a constant into an
7059 RROTATE_EXPR by a new constant. */
7060 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7062 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
7063 tem = fold_convert (TREE_TYPE (arg1), tem);
7064 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7065 return fold (build (RROTATE_EXPR, type, arg0, tem));
7068 /* If we have a rotate of a bit operation with the rotate count and
7069 the second operand of the bit operation both constant,
7070 permute the two operations. */
7071 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7072 && (TREE_CODE (arg0) == BIT_AND_EXPR
7073 || TREE_CODE (arg0) == BIT_IOR_EXPR
7074 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7075 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7076 return fold (build (TREE_CODE (arg0), type,
7077 fold (build (code, type,
7078 TREE_OPERAND (arg0, 0), arg1)),
7079 fold (build (code, type,
7080 TREE_OPERAND (arg0, 1), arg1))));
7082 /* Two consecutive rotates adding up to the width of the mode can
7083 be ignored. */
7084 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7085 && TREE_CODE (arg0) == RROTATE_EXPR
7086 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7087 && TREE_INT_CST_HIGH (arg1) == 0
7088 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7089 && ((TREE_INT_CST_LOW (arg1)
7090 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7091 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7092 return TREE_OPERAND (arg0, 0);
7094 goto binary;
7096 case MIN_EXPR:
7097 if (operand_equal_p (arg0, arg1, 0))
7098 return omit_one_operand (type, arg0, arg1);
7099 if (INTEGRAL_TYPE_P (type)
7100 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
7101 return omit_one_operand (type, arg1, arg0);
7102 goto associate;
7104 case MAX_EXPR:
7105 if (operand_equal_p (arg0, arg1, 0))
7106 return omit_one_operand (type, arg0, arg1);
7107 if (INTEGRAL_TYPE_P (type)
7108 && TYPE_MAX_VALUE (type)
7109 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
7110 return omit_one_operand (type, arg1, arg0);
7111 goto associate;
7113 case TRUTH_NOT_EXPR:
7114 /* Note that the operand of this must be an int
7115 and its values must be 0 or 1.
7116 ("true" is a fixed value perhaps depending on the language,
7117 but we don't handle values other than 1 correctly yet.) */
7118 tem = invert_truthvalue (arg0);
7119 /* Avoid infinite recursion. */
7120 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7122 tem = fold_single_bit_test (code, arg0, arg1, type);
7123 if (tem)
7124 return tem;
7125 return t;
7127 return fold_convert (type, tem);
7129 case TRUTH_ANDIF_EXPR:
7130 /* Note that the operands of this must be ints
7131 and their values must be 0 or 1.
7132 ("true" is a fixed value perhaps depending on the language.) */
7133 /* If first arg is constant zero, return it. */
7134 if (integer_zerop (arg0))
7135 return fold_convert (type, arg0);
7136 case TRUTH_AND_EXPR:
7137 /* If either arg is constant true, drop it. */
7138 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7139 return non_lvalue (fold_convert (type, arg1));
7140 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7141 /* Preserve sequence points. */
7142 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7143 return non_lvalue (fold_convert (type, arg0));
7144 /* If second arg is constant zero, result is zero, but first arg
7145 must be evaluated. */
7146 if (integer_zerop (arg1))
7147 return omit_one_operand (type, arg1, arg0);
7148 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7149 case will be handled here. */
7150 if (integer_zerop (arg0))
7151 return omit_one_operand (type, arg0, arg1);
7153 truth_andor:
7154 /* We only do these simplifications if we are optimizing. */
7155 if (!optimize)
7156 return t;
7158 /* Check for things like (A || B) && (A || C). We can convert this
7159 to A || (B && C). Note that either operator can be any of the four
7160 truth and/or operations and the transformation will still be
7161 valid. Also note that we only care about order for the
7162 ANDIF and ORIF operators. If B contains side effects, this
7163 might change the truth-value of A. */
7164 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7165 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7166 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7167 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7168 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7169 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7171 tree a00 = TREE_OPERAND (arg0, 0);
7172 tree a01 = TREE_OPERAND (arg0, 1);
7173 tree a10 = TREE_OPERAND (arg1, 0);
7174 tree a11 = TREE_OPERAND (arg1, 1);
7175 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7176 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7177 && (code == TRUTH_AND_EXPR
7178 || code == TRUTH_OR_EXPR));
7180 if (operand_equal_p (a00, a10, 0))
7181 return fold (build (TREE_CODE (arg0), type, a00,
7182 fold (build (code, type, a01, a11))));
7183 else if (commutative && operand_equal_p (a00, a11, 0))
7184 return fold (build (TREE_CODE (arg0), type, a00,
7185 fold (build (code, type, a01, a10))));
7186 else if (commutative && operand_equal_p (a01, a10, 0))
7187 return fold (build (TREE_CODE (arg0), type, a01,
7188 fold (build (code, type, a00, a11))));
7190 /* This case if tricky because we must either have commutative
7191 operators or else A10 must not have side-effects. */
7193 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7194 && operand_equal_p (a01, a11, 0))
7195 return fold (build (TREE_CODE (arg0), type,
7196 fold (build (code, type, a00, a10)),
7197 a01));
7200 /* See if we can build a range comparison. */
7201 if (0 != (tem = fold_range_test (t)))
7202 return tem;
7204 /* Check for the possibility of merging component references. If our
7205 lhs is another similar operation, try to merge its rhs with our
7206 rhs. Then try to merge our lhs and rhs. */
7207 if (TREE_CODE (arg0) == code
7208 && 0 != (tem = fold_truthop (code, type,
7209 TREE_OPERAND (arg0, 1), arg1)))
7210 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7212 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7213 return tem;
7215 return t;
7217 case TRUTH_ORIF_EXPR:
7218 /* Note that the operands of this must be ints
7219 and their values must be 0 or true.
7220 ("true" is a fixed value perhaps depending on the language.) */
7221 /* If first arg is constant true, return it. */
7222 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7223 return fold_convert (type, arg0);
7224 case TRUTH_OR_EXPR:
7225 /* If either arg is constant zero, drop it. */
7226 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7227 return non_lvalue (fold_convert (type, arg1));
7228 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7229 /* Preserve sequence points. */
7230 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7231 return non_lvalue (fold_convert (type, arg0));
7232 /* If second arg is constant true, result is true, but we must
7233 evaluate first arg. */
7234 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7235 return omit_one_operand (type, arg1, arg0);
7236 /* Likewise for first arg, but note this only occurs here for
7237 TRUTH_OR_EXPR. */
7238 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7239 return omit_one_operand (type, arg0, arg1);
7240 goto truth_andor;
7242 case TRUTH_XOR_EXPR:
7243 /* If either arg is constant zero, drop it. */
7244 if (integer_zerop (arg0))
7245 return non_lvalue (fold_convert (type, arg1));
7246 if (integer_zerop (arg1))
7247 return non_lvalue (fold_convert (type, arg0));
7248 /* If either arg is constant true, this is a logical inversion. */
7249 if (integer_onep (arg0))
7250 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7251 if (integer_onep (arg1))
7252 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7253 return t;
7255 case EQ_EXPR:
7256 case NE_EXPR:
7257 case LT_EXPR:
7258 case GT_EXPR:
7259 case LE_EXPR:
7260 case GE_EXPR:
7261 /* If one arg is a real or integer constant, put it last. */
7262 if (tree_swap_operands_p (arg0, arg1, true))
7263 return fold (build (swap_tree_comparison (code), type, arg1, arg0));
7265 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7267 tree targ0 = strip_float_extensions (arg0);
7268 tree targ1 = strip_float_extensions (arg1);
7269 tree newtype = TREE_TYPE (targ0);
7271 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7272 newtype = TREE_TYPE (targ1);
7274 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7275 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7276 return fold (build (code, type, fold_convert (newtype, targ0),
7277 fold_convert (newtype, targ1)));
7279 /* (-a) CMP (-b) -> b CMP a */
7280 if (TREE_CODE (arg0) == NEGATE_EXPR
7281 && TREE_CODE (arg1) == NEGATE_EXPR)
7282 return fold (build (code, type, TREE_OPERAND (arg1, 0),
7283 TREE_OPERAND (arg0, 0)));
7285 if (TREE_CODE (arg1) == REAL_CST)
7287 REAL_VALUE_TYPE cst;
7288 cst = TREE_REAL_CST (arg1);
7290 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7291 if (TREE_CODE (arg0) == NEGATE_EXPR)
7292 return
7293 fold (build (swap_tree_comparison (code), type,
7294 TREE_OPERAND (arg0, 0),
7295 build_real (TREE_TYPE (arg1),
7296 REAL_VALUE_NEGATE (cst))));
7298 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7299 /* a CMP (-0) -> a CMP 0 */
7300 if (REAL_VALUE_MINUS_ZERO (cst))
7301 return fold (build (code, type, arg0,
7302 build_real (TREE_TYPE (arg1), dconst0)));
7304 /* x != NaN is always true, other ops are always false. */
7305 if (REAL_VALUE_ISNAN (cst)
7306 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7308 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7309 return omit_one_operand (type, fold_convert (type, t), arg0);
7312 /* Fold comparisons against infinity. */
7313 if (REAL_VALUE_ISINF (cst))
7315 tem = fold_inf_compare (code, type, arg0, arg1);
7316 if (tem != NULL_TREE)
7317 return tem;
7321 /* If this is a comparison of a real constant with a PLUS_EXPR
7322 or a MINUS_EXPR of a real constant, we can convert it into a
7323 comparison with a revised real constant as long as no overflow
7324 occurs when unsafe_math_optimizations are enabled. */
7325 if (flag_unsafe_math_optimizations
7326 && TREE_CODE (arg1) == REAL_CST
7327 && (TREE_CODE (arg0) == PLUS_EXPR
7328 || TREE_CODE (arg0) == MINUS_EXPR)
7329 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7330 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7331 ? MINUS_EXPR : PLUS_EXPR,
7332 arg1, TREE_OPERAND (arg0, 1), 0))
7333 && ! TREE_CONSTANT_OVERFLOW (tem))
7334 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7336 /* Likewise, we can simplify a comparison of a real constant with
7337 a MINUS_EXPR whose first operand is also a real constant, i.e.
7338 (c1 - x) < c2 becomes x > c1-c2. */
7339 if (flag_unsafe_math_optimizations
7340 && TREE_CODE (arg1) == REAL_CST
7341 && TREE_CODE (arg0) == MINUS_EXPR
7342 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7343 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7344 arg1, 0))
7345 && ! TREE_CONSTANT_OVERFLOW (tem))
7346 return fold (build (swap_tree_comparison (code), type,
7347 TREE_OPERAND (arg0, 1), tem));
7349 /* Fold comparisons against built-in math functions. */
7350 if (TREE_CODE (arg1) == REAL_CST
7351 && flag_unsafe_math_optimizations
7352 && ! flag_errno_math)
7354 enum built_in_function fcode = builtin_mathfn_code (arg0);
7356 if (fcode != END_BUILTINS)
7358 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7359 if (tem != NULL_TREE)
7360 return tem;
7365 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7366 if (TREE_CONSTANT (arg1)
7367 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7368 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7369 /* This optimization is invalid for ordered comparisons
7370 if CONST+INCR overflows or if foo+incr might overflow.
7371 This optimization is invalid for floating point due to rounding.
7372 For pointer types we assume overflow doesn't happen. */
7373 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7374 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7375 && (code == EQ_EXPR || code == NE_EXPR))))
7377 tree varop, newconst;
7379 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7381 newconst = fold (build (PLUS_EXPR, TREE_TYPE (arg0),
7382 arg1, TREE_OPERAND (arg0, 1)));
7383 varop = build (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7384 TREE_OPERAND (arg0, 0),
7385 TREE_OPERAND (arg0, 1));
7387 else
7389 newconst = fold (build (MINUS_EXPR, TREE_TYPE (arg0),
7390 arg1, TREE_OPERAND (arg0, 1)));
7391 varop = build (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7392 TREE_OPERAND (arg0, 0),
7393 TREE_OPERAND (arg0, 1));
7397 /* If VAROP is a reference to a bitfield, we must mask
7398 the constant by the width of the field. */
7399 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7400 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
7402 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7403 int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
7404 tree folded_compare, shift;
7406 /* First check whether the comparison would come out
7407 always the same. If we don't do that we would
7408 change the meaning with the masking. */
7409 folded_compare = fold (build (code, type,
7410 TREE_OPERAND (varop, 0),
7411 arg1));
7412 if (integer_zerop (folded_compare)
7413 || integer_onep (folded_compare))
7414 return omit_one_operand (type, folded_compare, varop);
7416 shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
7418 newconst = fold (build (LSHIFT_EXPR, TREE_TYPE (varop),
7419 newconst, shift));
7420 newconst = fold (build (RSHIFT_EXPR, TREE_TYPE (varop),
7421 newconst, shift));
7424 return fold (build (code, type, varop, newconst));
7427 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7428 This transformation affects the cases which are handled in later
7429 optimizations involving comparisons with non-negative constants. */
7430 if (TREE_CODE (arg1) == INTEGER_CST
7431 && TREE_CODE (arg0) != INTEGER_CST
7432 && tree_int_cst_sgn (arg1) > 0)
7434 switch (code)
7436 case GE_EXPR:
7437 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7438 return fold (build (GT_EXPR, type, arg0, arg1));
7440 case LT_EXPR:
7441 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7442 return fold (build (LE_EXPR, type, arg0, arg1));
7444 default:
7445 break;
7449 /* Comparisons with the highest or lowest possible integer of
7450 the specified size will have known values. */
7452 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7454 if (TREE_CODE (arg1) == INTEGER_CST
7455 && ! TREE_CONSTANT_OVERFLOW (arg1)
7456 && width <= HOST_BITS_PER_WIDE_INT
7457 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7458 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7460 unsigned HOST_WIDE_INT signed_max;
7461 unsigned HOST_WIDE_INT max, min;
7463 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7465 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7467 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7468 min = 0;
7470 else
7472 max = signed_max;
7473 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7476 if (TREE_INT_CST_HIGH (arg1) == 0
7477 && TREE_INT_CST_LOW (arg1) == max)
7478 switch (code)
7480 case GT_EXPR:
7481 return omit_one_operand (type,
7482 fold_convert (type,
7483 integer_zero_node),
7484 arg0);
7485 case GE_EXPR:
7486 return fold (build (EQ_EXPR, type, arg0, arg1));
7488 case LE_EXPR:
7489 return omit_one_operand (type,
7490 fold_convert (type,
7491 integer_one_node),
7492 arg0);
7493 case LT_EXPR:
7494 return fold (build (NE_EXPR, type, arg0, arg1));
7496 /* The GE_EXPR and LT_EXPR cases above are not normally
7497 reached because of previous transformations. */
7499 default:
7500 break;
7502 else if (TREE_INT_CST_HIGH (arg1) == 0
7503 && TREE_INT_CST_LOW (arg1) == max - 1)
7504 switch (code)
7506 case GT_EXPR:
7507 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7508 return fold (build (EQ_EXPR, type, arg0, arg1));
7509 case LE_EXPR:
7510 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7511 return fold (build (NE_EXPR, type, arg0, arg1));
7512 default:
7513 break;
7515 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7516 && TREE_INT_CST_LOW (arg1) == min)
7517 switch (code)
7519 case LT_EXPR:
7520 return omit_one_operand (type,
7521 fold_convert (type,
7522 integer_zero_node),
7523 arg0);
7524 case LE_EXPR:
7525 return fold (build (EQ_EXPR, type, arg0, arg1));
7527 case GE_EXPR:
7528 return omit_one_operand (type,
7529 fold_convert (type,
7530 integer_one_node),
7531 arg0);
7532 case GT_EXPR:
7533 return fold (build (NE_EXPR, type, arg0, arg1));
7535 default:
7536 break;
7538 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7539 && TREE_INT_CST_LOW (arg1) == min + 1)
7540 switch (code)
7542 case GE_EXPR:
7543 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7544 return fold (build (NE_EXPR, type, arg0, arg1));
7545 case LT_EXPR:
7546 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7547 return fold (build (EQ_EXPR, type, arg0, arg1));
7548 default:
7549 break;
7552 else if (TREE_INT_CST_HIGH (arg1) == 0
7553 && TREE_INT_CST_LOW (arg1) == signed_max
7554 && TREE_UNSIGNED (TREE_TYPE (arg1))
7555 /* signed_type does not work on pointer types. */
7556 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7558 /* The following case also applies to X < signed_max+1
7559 and X >= signed_max+1 because previous transformations. */
7560 if (code == LE_EXPR || code == GT_EXPR)
7562 tree st0, st1;
7563 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7564 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7565 return fold
7566 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7567 type, fold_convert (st0, arg0),
7568 fold_convert (st1, integer_zero_node)));
7574 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7575 a MINUS_EXPR of a constant, we can convert it into a comparison with
7576 a revised constant as long as no overflow occurs. */
7577 if ((code == EQ_EXPR || code == NE_EXPR)
7578 && TREE_CODE (arg1) == INTEGER_CST
7579 && (TREE_CODE (arg0) == PLUS_EXPR
7580 || TREE_CODE (arg0) == MINUS_EXPR)
7581 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7582 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7583 ? MINUS_EXPR : PLUS_EXPR,
7584 arg1, TREE_OPERAND (arg0, 1), 0))
7585 && ! TREE_CONSTANT_OVERFLOW (tem))
7586 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7588 /* Similarly for a NEGATE_EXPR. */
7589 else if ((code == EQ_EXPR || code == NE_EXPR)
7590 && TREE_CODE (arg0) == NEGATE_EXPR
7591 && TREE_CODE (arg1) == INTEGER_CST
7592 && 0 != (tem = negate_expr (arg1))
7593 && TREE_CODE (tem) == INTEGER_CST
7594 && ! TREE_CONSTANT_OVERFLOW (tem))
7595 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7597 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7598 for !=. Don't do this for ordered comparisons due to overflow. */
7599 else if ((code == NE_EXPR || code == EQ_EXPR)
7600 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7601 return fold (build (code, type,
7602 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7604 /* If we are widening one operand of an integer comparison,
7605 see if the other operand is similarly being widened. Perhaps we
7606 can do the comparison in the narrower type. */
7607 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7608 && TREE_CODE (arg0) == NOP_EXPR
7609 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7610 && (TYPE_PRECISION (TREE_TYPE (tem))
7611 > TYPE_PRECISION (TREE_TYPE (arg0)))
7612 && (code == EQ_EXPR || code == NE_EXPR
7613 || TREE_UNSIGNED (TREE_TYPE (arg0))
7614 == TREE_UNSIGNED (TREE_TYPE (tem)))
7615 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7616 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7617 || (TREE_CODE (t1) == INTEGER_CST
7618 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7619 return fold (build (code, type, tem,
7620 fold_convert (TREE_TYPE (tem), t1)));
7622 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7623 constant, we can simplify it. */
7624 else if (TREE_CODE (arg1) == INTEGER_CST
7625 && (TREE_CODE (arg0) == MIN_EXPR
7626 || TREE_CODE (arg0) == MAX_EXPR)
7627 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7628 return optimize_minmax_comparison (t);
7630 /* If we are comparing an ABS_EXPR with a constant, we can
7631 convert all the cases into explicit comparisons, but they may
7632 well not be faster than doing the ABS and one comparison.
7633 But ABS (X) <= C is a range comparison, which becomes a subtraction
7634 and a comparison, and is probably faster. */
7635 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7636 && TREE_CODE (arg0) == ABS_EXPR
7637 && ! TREE_SIDE_EFFECTS (arg0)
7638 && (0 != (tem = negate_expr (arg1)))
7639 && TREE_CODE (tem) == INTEGER_CST
7640 && ! TREE_CONSTANT_OVERFLOW (tem))
7641 return fold (build (TRUTH_ANDIF_EXPR, type,
7642 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7643 build (LE_EXPR, type,
7644 TREE_OPERAND (arg0, 0), arg1)));
7646 /* If this is an EQ or NE comparison with zero and ARG0 is
7647 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7648 two operations, but the latter can be done in one less insn
7649 on machines that have only two-operand insns or on which a
7650 constant cannot be the first operand. */
7651 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7652 && TREE_CODE (arg0) == BIT_AND_EXPR)
7654 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7655 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7656 return
7657 fold (build (code, type,
7658 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7659 build (RSHIFT_EXPR,
7660 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7661 TREE_OPERAND (arg0, 1),
7662 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7663 fold_convert (TREE_TYPE (arg0),
7664 integer_one_node)),
7665 arg1));
7666 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7667 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7668 return
7669 fold (build (code, type,
7670 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7671 build (RSHIFT_EXPR,
7672 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7673 TREE_OPERAND (arg0, 0),
7674 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7675 fold_convert (TREE_TYPE (arg0),
7676 integer_one_node)),
7677 arg1));
7680 /* If this is an NE or EQ comparison of zero against the result of a
7681 signed MOD operation whose second operand is a power of 2, make
7682 the MOD operation unsigned since it is simpler and equivalent. */
7683 if ((code == NE_EXPR || code == EQ_EXPR)
7684 && integer_zerop (arg1)
7685 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7686 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7687 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7688 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7689 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7690 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7692 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7693 tree newmod = build (TREE_CODE (arg0), newtype,
7694 fold_convert (newtype,
7695 TREE_OPERAND (arg0, 0)),
7696 fold_convert (newtype,
7697 TREE_OPERAND (arg0, 1)));
7699 return build (code, type, newmod, fold_convert (newtype, arg1));
7702 /* If this is an NE comparison of zero with an AND of one, remove the
7703 comparison since the AND will give the correct value. */
7704 if (code == NE_EXPR && integer_zerop (arg1)
7705 && TREE_CODE (arg0) == BIT_AND_EXPR
7706 && integer_onep (TREE_OPERAND (arg0, 1)))
7707 return fold_convert (type, arg0);
7709 /* If we have (A & C) == C where C is a power of 2, convert this into
7710 (A & C) != 0. Similarly for NE_EXPR. */
7711 if ((code == EQ_EXPR || code == NE_EXPR)
7712 && TREE_CODE (arg0) == BIT_AND_EXPR
7713 && integer_pow2p (TREE_OPERAND (arg0, 1))
7714 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7715 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7716 arg0, integer_zero_node));
7718 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7719 2, then fold the expression into shifts and logical operations. */
7720 tem = fold_single_bit_test (code, arg0, arg1, type);
7721 if (tem)
7722 return tem;
7724 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7725 Similarly for NE_EXPR. */
7726 if ((code == EQ_EXPR || code == NE_EXPR)
7727 && TREE_CODE (arg0) == BIT_AND_EXPR
7728 && TREE_CODE (arg1) == INTEGER_CST
7729 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7731 tree dandnotc
7732 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7733 arg1, build1 (BIT_NOT_EXPR,
7734 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7735 TREE_OPERAND (arg0, 1))));
7736 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7737 if (integer_nonzerop (dandnotc))
7738 return omit_one_operand (type, rslt, arg0);
7741 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7742 Similarly for NE_EXPR. */
7743 if ((code == EQ_EXPR || code == NE_EXPR)
7744 && TREE_CODE (arg0) == BIT_IOR_EXPR
7745 && TREE_CODE (arg1) == INTEGER_CST
7746 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7748 tree candnotd
7749 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7750 TREE_OPERAND (arg0, 1),
7751 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7752 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7753 if (integer_nonzerop (candnotd))
7754 return omit_one_operand (type, rslt, arg0);
7757 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7758 and similarly for >= into !=. */
7759 if ((code == LT_EXPR || code == GE_EXPR)
7760 && TREE_UNSIGNED (TREE_TYPE (arg0))
7761 && TREE_CODE (arg1) == LSHIFT_EXPR
7762 && integer_onep (TREE_OPERAND (arg1, 0)))
7763 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7764 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7765 TREE_OPERAND (arg1, 1)),
7766 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7768 else if ((code == LT_EXPR || code == GE_EXPR)
7769 && TREE_UNSIGNED (TREE_TYPE (arg0))
7770 && (TREE_CODE (arg1) == NOP_EXPR
7771 || TREE_CODE (arg1) == CONVERT_EXPR)
7772 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7773 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7774 return
7775 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7776 fold_convert (TREE_TYPE (arg0),
7777 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7778 TREE_OPERAND (TREE_OPERAND (arg1, 0),
7779 1))),
7780 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7782 /* Simplify comparison of something with itself. (For IEEE
7783 floating-point, we can only do some of these simplifications.) */
7784 if (operand_equal_p (arg0, arg1, 0))
7786 switch (code)
7788 case EQ_EXPR:
7789 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7790 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7791 return constant_boolean_node (1, type);
7792 break;
7794 case GE_EXPR:
7795 case LE_EXPR:
7796 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7797 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7798 return constant_boolean_node (1, type);
7799 return fold (build (EQ_EXPR, type, arg0, arg1));
7801 case NE_EXPR:
7802 /* For NE, we can only do this simplification if integer
7803 or we don't honor IEEE floating point NaNs. */
7804 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7805 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7806 break;
7807 /* ... fall through ... */
7808 case GT_EXPR:
7809 case LT_EXPR:
7810 return constant_boolean_node (0, type);
7811 default:
7812 abort ();
7816 /* If we are comparing an expression that just has comparisons
7817 of two integer values, arithmetic expressions of those comparisons,
7818 and constants, we can simplify it. There are only three cases
7819 to check: the two values can either be equal, the first can be
7820 greater, or the second can be greater. Fold the expression for
7821 those three values. Since each value must be 0 or 1, we have
7822 eight possibilities, each of which corresponds to the constant 0
7823 or 1 or one of the six possible comparisons.
7825 This handles common cases like (a > b) == 0 but also handles
7826 expressions like ((x > y) - (y > x)) > 0, which supposedly
7827 occur in macroized code. */
7829 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7831 tree cval1 = 0, cval2 = 0;
7832 int save_p = 0;
7834 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7835 /* Don't handle degenerate cases here; they should already
7836 have been handled anyway. */
7837 && cval1 != 0 && cval2 != 0
7838 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7839 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7840 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7841 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7842 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7843 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7844 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7846 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7847 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7849 /* We can't just pass T to eval_subst in case cval1 or cval2
7850 was the same as ARG1. */
7852 tree high_result
7853 = fold (build (code, type,
7854 eval_subst (arg0, cval1, maxval, cval2, minval),
7855 arg1));
7856 tree equal_result
7857 = fold (build (code, type,
7858 eval_subst (arg0, cval1, maxval, cval2, maxval),
7859 arg1));
7860 tree low_result
7861 = fold (build (code, type,
7862 eval_subst (arg0, cval1, minval, cval2, maxval),
7863 arg1));
7865 /* All three of these results should be 0 or 1. Confirm they
7866 are. Then use those values to select the proper code
7867 to use. */
7869 if ((integer_zerop (high_result)
7870 || integer_onep (high_result))
7871 && (integer_zerop (equal_result)
7872 || integer_onep (equal_result))
7873 && (integer_zerop (low_result)
7874 || integer_onep (low_result)))
7876 /* Make a 3-bit mask with the high-order bit being the
7877 value for `>', the next for '=', and the low for '<'. */
7878 switch ((integer_onep (high_result) * 4)
7879 + (integer_onep (equal_result) * 2)
7880 + integer_onep (low_result))
7882 case 0:
7883 /* Always false. */
7884 return omit_one_operand (type, integer_zero_node, arg0);
7885 case 1:
7886 code = LT_EXPR;
7887 break;
7888 case 2:
7889 code = EQ_EXPR;
7890 break;
7891 case 3:
7892 code = LE_EXPR;
7893 break;
7894 case 4:
7895 code = GT_EXPR;
7896 break;
7897 case 5:
7898 code = NE_EXPR;
7899 break;
7900 case 6:
7901 code = GE_EXPR;
7902 break;
7903 case 7:
7904 /* Always true. */
7905 return omit_one_operand (type, integer_one_node, arg0);
7908 t = build (code, type, cval1, cval2);
7909 if (save_p)
7910 return save_expr (t);
7911 else
7912 return fold (t);
7917 /* If this is a comparison of a field, we may be able to simplify it. */
7918 if (((TREE_CODE (arg0) == COMPONENT_REF
7919 && (*lang_hooks.can_use_bit_fields_p) ())
7920 || TREE_CODE (arg0) == BIT_FIELD_REF)
7921 && (code == EQ_EXPR || code == NE_EXPR)
7922 /* Handle the constant case even without -O
7923 to make sure the warnings are given. */
7924 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7926 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7927 if (t1)
7928 return t1;
7931 /* If this is a comparison of complex values and either or both sides
7932 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7933 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7934 This may prevent needless evaluations. */
7935 if ((code == EQ_EXPR || code == NE_EXPR)
7936 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7937 && (TREE_CODE (arg0) == COMPLEX_EXPR
7938 || TREE_CODE (arg1) == COMPLEX_EXPR
7939 || TREE_CODE (arg0) == COMPLEX_CST
7940 || TREE_CODE (arg1) == COMPLEX_CST))
7942 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7943 tree real0, imag0, real1, imag1;
7945 arg0 = save_expr (arg0);
7946 arg1 = save_expr (arg1);
7947 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7948 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7949 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7950 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7952 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7953 : TRUTH_ORIF_EXPR),
7954 type,
7955 fold (build (code, type, real0, real1)),
7956 fold (build (code, type, imag0, imag1))));
7959 /* Optimize comparisons of strlen vs zero to a compare of the
7960 first character of the string vs zero. To wit,
7961 strlen(ptr) == 0 => *ptr == 0
7962 strlen(ptr) != 0 => *ptr != 0
7963 Other cases should reduce to one of these two (or a constant)
7964 due to the return value of strlen being unsigned. */
7965 if ((code == EQ_EXPR || code == NE_EXPR)
7966 && integer_zerop (arg1)
7967 && TREE_CODE (arg0) == CALL_EXPR)
7969 tree fndecl = get_callee_fndecl (arg0);
7970 tree arglist;
7972 if (fndecl
7973 && DECL_BUILT_IN (fndecl)
7974 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7975 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7976 && (arglist = TREE_OPERAND (arg0, 1))
7977 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7978 && ! TREE_CHAIN (arglist))
7979 return fold (build (code, type,
7980 build1 (INDIRECT_REF, char_type_node,
7981 TREE_VALUE(arglist)),
7982 integer_zero_node));
7985 /* From here on, the only cases we handle are when the result is
7986 known to be a constant.
7988 To compute GT, swap the arguments and do LT.
7989 To compute GE, do LT and invert the result.
7990 To compute LE, swap the arguments, do LT and invert the result.
7991 To compute NE, do EQ and invert the result.
7993 Therefore, the code below must handle only EQ and LT. */
7995 if (code == LE_EXPR || code == GT_EXPR)
7997 tem = arg0, arg0 = arg1, arg1 = tem;
7998 code = swap_tree_comparison (code);
8001 /* Note that it is safe to invert for real values here because we
8002 will check below in the one case that it matters. */
8004 t1 = NULL_TREE;
8005 invert = 0;
8006 if (code == NE_EXPR || code == GE_EXPR)
8008 invert = 1;
8009 code = invert_tree_comparison (code);
8012 /* Compute a result for LT or EQ if args permit;
8013 otherwise return T. */
8014 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8016 if (code == EQ_EXPR)
8017 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
8018 else
8019 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
8020 ? INT_CST_LT_UNSIGNED (arg0, arg1)
8021 : INT_CST_LT (arg0, arg1)),
8025 #if 0 /* This is no longer useful, but breaks some real code. */
8026 /* Assume a nonexplicit constant cannot equal an explicit one,
8027 since such code would be undefined anyway.
8028 Exception: on sysvr4, using #pragma weak,
8029 a label can come out as 0. */
8030 else if (TREE_CODE (arg1) == INTEGER_CST
8031 && !integer_zerop (arg1)
8032 && TREE_CONSTANT (arg0)
8033 && TREE_CODE (arg0) == ADDR_EXPR
8034 && code == EQ_EXPR)
8035 t1 = build_int_2 (0, 0);
8036 #endif
8037 /* Two real constants can be compared explicitly. */
8038 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8040 /* If either operand is a NaN, the result is false with two
8041 exceptions: First, an NE_EXPR is true on NaNs, but that case
8042 is already handled correctly since we will be inverting the
8043 result for NE_EXPR. Second, if we had inverted a LE_EXPR
8044 or a GE_EXPR into a LT_EXPR, we must return true so that it
8045 will be inverted into false. */
8047 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8048 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
8049 t1 = build_int_2 (invert && code == LT_EXPR, 0);
8051 else if (code == EQ_EXPR)
8052 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
8053 TREE_REAL_CST (arg1)),
8055 else
8056 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
8057 TREE_REAL_CST (arg1)),
8061 if (t1 == NULL_TREE)
8062 return t;
8064 if (invert)
8065 TREE_INT_CST_LOW (t1) ^= 1;
8067 TREE_TYPE (t1) = type;
8068 if (TREE_CODE (type) == BOOLEAN_TYPE)
8069 return (*lang_hooks.truthvalue_conversion) (t1);
8070 return t1;
8072 case COND_EXPR:
8073 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8074 so all simple results must be passed through pedantic_non_lvalue. */
8075 if (TREE_CODE (arg0) == INTEGER_CST)
8077 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8078 /* Only optimize constant conditions when the selected branch
8079 has the same type as the COND_EXPR. This avoids optimizing
8080 away "c ? x : throw", where the throw has a void type. */
8081 if (! VOID_TYPE_P (TREE_TYPE (tem))
8082 || VOID_TYPE_P (TREE_TYPE (t)))
8083 return pedantic_non_lvalue (tem);
8084 return t;
8086 if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
8087 return pedantic_omit_one_operand (type, arg1, arg0);
8089 /* If we have A op B ? A : C, we may be able to convert this to a
8090 simpler expression, depending on the operation and the values
8091 of B and C. Signed zeros prevent all of these transformations,
8092 for reasons given above each one. */
8094 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8095 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8096 arg1, TREE_OPERAND (arg0, 1))
8097 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8099 tree arg2 = TREE_OPERAND (t, 2);
8100 enum tree_code comp_code = TREE_CODE (arg0);
8102 STRIP_NOPS (arg2);
8104 /* If we have A op 0 ? A : -A, consider applying the following
8105 transformations:
8107 A == 0? A : -A same as -A
8108 A != 0? A : -A same as A
8109 A >= 0? A : -A same as abs (A)
8110 A > 0? A : -A same as abs (A)
8111 A <= 0? A : -A same as -abs (A)
8112 A < 0? A : -A same as -abs (A)
8114 None of these transformations work for modes with signed
8115 zeros. If A is +/-0, the first two transformations will
8116 change the sign of the result (from +0 to -0, or vice
8117 versa). The last four will fix the sign of the result,
8118 even though the original expressions could be positive or
8119 negative, depending on the sign of A.
8121 Note that all these transformations are correct if A is
8122 NaN, since the two alternatives (A and -A) are also NaNs. */
8123 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
8124 ? real_zerop (TREE_OPERAND (arg0, 1))
8125 : integer_zerop (TREE_OPERAND (arg0, 1)))
8126 && TREE_CODE (arg2) == NEGATE_EXPR
8127 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
8128 switch (comp_code)
8130 case EQ_EXPR:
8131 tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
8132 tem = fold_convert (type, negate_expr (tem));
8133 return pedantic_non_lvalue (tem);
8134 case NE_EXPR:
8135 return pedantic_non_lvalue (fold_convert (type, arg1));
8136 case GE_EXPR:
8137 case GT_EXPR:
8138 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8139 arg1 = fold_convert ((*lang_hooks.types.signed_type)
8140 (TREE_TYPE (arg1)), arg1);
8141 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8142 return pedantic_non_lvalue (fold_convert (type, arg1));
8143 case LE_EXPR:
8144 case LT_EXPR:
8145 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8146 arg1 = fold_convert ((lang_hooks.types.signed_type)
8147 (TREE_TYPE (arg1)), arg1);
8148 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8149 arg1 = negate_expr (fold_convert (type, arg1));
8150 return pedantic_non_lvalue (arg1);
8151 default:
8152 abort ();
8155 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
8156 A == 0 ? A : 0 is always 0 unless A is -0. Note that
8157 both transformations are correct when A is NaN: A != 0
8158 is then true, and A == 0 is false. */
8160 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
8162 if (comp_code == NE_EXPR)
8163 return pedantic_non_lvalue (fold_convert (type, arg1));
8164 else if (comp_code == EQ_EXPR)
8165 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8168 /* Try some transformations of A op B ? A : B.
8170 A == B? A : B same as B
8171 A != B? A : B same as A
8172 A >= B? A : B same as max (A, B)
8173 A > B? A : B same as max (B, A)
8174 A <= B? A : B same as min (A, B)
8175 A < B? A : B same as min (B, A)
8177 As above, these transformations don't work in the presence
8178 of signed zeros. For example, if A and B are zeros of
8179 opposite sign, the first two transformations will change
8180 the sign of the result. In the last four, the original
8181 expressions give different results for (A=+0, B=-0) and
8182 (A=-0, B=+0), but the transformed expressions do not.
8184 The first two transformations are correct if either A or B
8185 is a NaN. In the first transformation, the condition will
8186 be false, and B will indeed be chosen. In the case of the
8187 second transformation, the condition A != B will be true,
8188 and A will be chosen.
8190 The conversions to max() and min() are not correct if B is
8191 a number and A is not. The conditions in the original
8192 expressions will be false, so all four give B. The min()
8193 and max() versions would give a NaN instead. */
8194 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8195 arg2, TREE_OPERAND (arg0, 0)))
8197 tree comp_op0 = TREE_OPERAND (arg0, 0);
8198 tree comp_op1 = TREE_OPERAND (arg0, 1);
8199 tree comp_type = TREE_TYPE (comp_op0);
8201 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8202 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8204 comp_type = type;
8205 comp_op0 = arg1;
8206 comp_op1 = arg2;
8209 switch (comp_code)
8211 case EQ_EXPR:
8212 return pedantic_non_lvalue (fold_convert (type, arg2));
8213 case NE_EXPR:
8214 return pedantic_non_lvalue (fold_convert (type, arg1));
8215 case LE_EXPR:
8216 case LT_EXPR:
8217 /* In C++ a ?: expression can be an lvalue, so put the
8218 operand which will be used if they are equal first
8219 so that we can convert this back to the
8220 corresponding COND_EXPR. */
8221 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8222 return pedantic_non_lvalue (fold_convert
8223 (type, fold (build (MIN_EXPR, comp_type,
8224 (comp_code == LE_EXPR
8225 ? comp_op0 : comp_op1),
8226 (comp_code == LE_EXPR
8227 ? comp_op1 : comp_op0)))));
8228 break;
8229 case GE_EXPR:
8230 case GT_EXPR:
8231 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8232 return pedantic_non_lvalue (fold_convert
8233 (type, fold (build (MAX_EXPR, comp_type,
8234 (comp_code == GE_EXPR
8235 ? comp_op0 : comp_op1),
8236 (comp_code == GE_EXPR
8237 ? comp_op1 : comp_op0)))));
8238 break;
8239 default:
8240 abort ();
8244 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8245 we might still be able to simplify this. For example,
8246 if C1 is one less or one more than C2, this might have started
8247 out as a MIN or MAX and been transformed by this function.
8248 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8250 if (INTEGRAL_TYPE_P (type)
8251 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8252 && TREE_CODE (arg2) == INTEGER_CST
8253 /* ??? We somehow can end up here with
8254 (unsigned int)1 == 1 ? 1U : 2U
8255 for which we won't make any progress but recurse
8256 indefinitely. Just stop here in this case. */
8257 && TREE_CODE (arg1) != INTEGER_CST)
8258 switch (comp_code)
8260 case EQ_EXPR:
8261 /* We can replace A with C1 in this case. */
8262 arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8263 return fold (build (code, type, TREE_OPERAND (t, 0), arg1,
8264 TREE_OPERAND (t, 2)));
8266 case LT_EXPR:
8267 /* If C1 is C2 + 1, this is min(A, C2). */
8268 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8269 && operand_equal_p (TREE_OPERAND (arg0, 1),
8270 const_binop (PLUS_EXPR, arg2,
8271 integer_one_node, 0), 1))
8272 return pedantic_non_lvalue
8273 (fold (build (MIN_EXPR, type, arg1, arg2)));
8274 break;
8276 case LE_EXPR:
8277 /* If C1 is C2 - 1, this is min(A, C2). */
8278 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8279 && operand_equal_p (TREE_OPERAND (arg0, 1),
8280 const_binop (MINUS_EXPR, arg2,
8281 integer_one_node, 0), 1))
8282 return pedantic_non_lvalue
8283 (fold (build (MIN_EXPR, type, arg1, arg2)));
8284 break;
8286 case GT_EXPR:
8287 /* If C1 is C2 - 1, this is max(A, C2). */
8288 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8289 && operand_equal_p (TREE_OPERAND (arg0, 1),
8290 const_binop (MINUS_EXPR, arg2,
8291 integer_one_node, 0), 1))
8292 return pedantic_non_lvalue
8293 (fold (build (MAX_EXPR, type, arg1, arg2)));
8294 break;
8296 case GE_EXPR:
8297 /* If C1 is C2 + 1, this is max(A, C2). */
8298 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8299 && operand_equal_p (TREE_OPERAND (arg0, 1),
8300 const_binop (PLUS_EXPR, arg2,
8301 integer_one_node, 0), 1))
8302 return pedantic_non_lvalue
8303 (fold (build (MAX_EXPR, type, arg1, arg2)));
8304 break;
8305 case NE_EXPR:
8306 break;
8307 default:
8308 abort ();
8312 /* If the second operand is simpler than the third, swap them
8313 since that produces better jump optimization results. */
8314 if (truth_value_p (TREE_CODE (arg0))
8315 && tree_swap_operands_p (TREE_OPERAND (t, 1),
8316 TREE_OPERAND (t, 2), false))
8318 /* See if this can be inverted. If it can't, possibly because
8319 it was a floating-point inequality comparison, don't do
8320 anything. */
8321 tem = invert_truthvalue (arg0);
8323 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8324 return fold (build (code, type, tem,
8325 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8328 /* Convert A ? 1 : 0 to simply A. */
8329 if (integer_onep (TREE_OPERAND (t, 1))
8330 && integer_zerop (TREE_OPERAND (t, 2))
8331 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8332 call to fold will try to move the conversion inside
8333 a COND, which will recurse. In that case, the COND_EXPR
8334 is probably the best choice, so leave it alone. */
8335 && type == TREE_TYPE (arg0))
8336 return pedantic_non_lvalue (arg0);
8338 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8339 over COND_EXPR in cases such as floating point comparisons. */
8340 if (integer_zerop (TREE_OPERAND (t, 1))
8341 && integer_onep (TREE_OPERAND (t, 2))
8342 && truth_value_p (TREE_CODE (arg0)))
8343 return pedantic_non_lvalue (fold_convert (type,
8344 invert_truthvalue (arg0)));
8346 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8347 operation is simply A & 2. */
8349 if (integer_zerop (TREE_OPERAND (t, 2))
8350 && TREE_CODE (arg0) == NE_EXPR
8351 && integer_zerop (TREE_OPERAND (arg0, 1))
8352 && integer_pow2p (arg1)
8353 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8354 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8355 arg1, 1))
8356 return pedantic_non_lvalue (fold_convert (type,
8357 TREE_OPERAND (arg0, 0)));
8359 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8360 if (integer_zerop (TREE_OPERAND (t, 2))
8361 && truth_value_p (TREE_CODE (arg0))
8362 && truth_value_p (TREE_CODE (arg1)))
8363 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8364 arg0, arg1)));
8366 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8367 if (integer_onep (TREE_OPERAND (t, 2))
8368 && truth_value_p (TREE_CODE (arg0))
8369 && truth_value_p (TREE_CODE (arg1)))
8371 /* Only perform transformation if ARG0 is easily inverted. */
8372 tem = invert_truthvalue (arg0);
8373 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8374 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8375 tem, arg1)));
8378 return t;
8380 case COMPOUND_EXPR:
8381 /* When pedantic, a compound expression can be neither an lvalue
8382 nor an integer constant expression. */
8383 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
8384 return t;
8385 /* Don't let (0, 0) be null pointer constant. */
8386 if (integer_zerop (arg1))
8387 return build1 (NOP_EXPR, type, arg1);
8388 return fold_convert (type, arg1);
8390 case COMPLEX_EXPR:
8391 if (wins)
8392 return build_complex (type, arg0, arg1);
8393 return t;
8395 case REALPART_EXPR:
8396 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8397 return t;
8398 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8399 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8400 TREE_OPERAND (arg0, 1));
8401 else if (TREE_CODE (arg0) == COMPLEX_CST)
8402 return TREE_REALPART (arg0);
8403 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8404 return fold (build (TREE_CODE (arg0), type,
8405 fold (build1 (REALPART_EXPR, type,
8406 TREE_OPERAND (arg0, 0))),
8407 fold (build1 (REALPART_EXPR,
8408 type, TREE_OPERAND (arg0, 1)))));
8409 return t;
8411 case IMAGPART_EXPR:
8412 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8413 return fold_convert (type, integer_zero_node);
8414 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8415 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8416 TREE_OPERAND (arg0, 0));
8417 else if (TREE_CODE (arg0) == COMPLEX_CST)
8418 return TREE_IMAGPART (arg0);
8419 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8420 return fold (build (TREE_CODE (arg0), type,
8421 fold (build1 (IMAGPART_EXPR, type,
8422 TREE_OPERAND (arg0, 0))),
8423 fold (build1 (IMAGPART_EXPR, type,
8424 TREE_OPERAND (arg0, 1)))));
8425 return t;
8427 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8428 appropriate. */
8429 case CLEANUP_POINT_EXPR:
8430 if (! has_cleanups (arg0))
8431 return TREE_OPERAND (t, 0);
8434 enum tree_code code0 = TREE_CODE (arg0);
8435 int kind0 = TREE_CODE_CLASS (code0);
8436 tree arg00 = TREE_OPERAND (arg0, 0);
8437 tree arg01;
8439 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8440 return fold (build1 (code0, type,
8441 fold (build1 (CLEANUP_POINT_EXPR,
8442 TREE_TYPE (arg00), arg00))));
8444 if (kind0 == '<' || kind0 == '2'
8445 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8446 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8447 || code0 == TRUTH_XOR_EXPR)
8449 arg01 = TREE_OPERAND (arg0, 1);
8451 if (TREE_CONSTANT (arg00)
8452 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8453 && ! has_cleanups (arg00)))
8454 return fold (build (code0, type, arg00,
8455 fold (build1 (CLEANUP_POINT_EXPR,
8456 TREE_TYPE (arg01), arg01))));
8458 if (TREE_CONSTANT (arg01))
8459 return fold (build (code0, type,
8460 fold (build1 (CLEANUP_POINT_EXPR,
8461 TREE_TYPE (arg00), arg00)),
8462 arg01));
8465 return t;
8468 case CALL_EXPR:
8469 /* Check for a built-in function. */
8470 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8471 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8472 == FUNCTION_DECL)
8473 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8475 tree tmp = fold_builtin (expr);
8476 if (tmp)
8477 return tmp;
8479 return t;
8481 default:
8482 return t;
8483 } /* switch (code) */
8486 #ifdef ENABLE_FOLD_CHECKING
8487 #undef fold
8489 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8490 static void fold_check_failed (tree, tree);
8491 void print_fold_checksum (tree);
8493 /* When --enable-checking=fold, compute a digest of expr before
8494 and after actual fold call to see if fold did not accidentally
8495 change original expr. */
8497 tree
8498 fold (tree expr)
8500 tree ret;
8501 struct md5_ctx ctx;
8502 unsigned char checksum_before[16], checksum_after[16];
8503 htab_t ht;
8505 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8506 md5_init_ctx (&ctx);
8507 fold_checksum_tree (expr, &ctx, ht);
8508 md5_finish_ctx (&ctx, checksum_before);
8509 htab_empty (ht);
8511 ret = fold_1 (expr);
8513 md5_init_ctx (&ctx);
8514 fold_checksum_tree (expr, &ctx, ht);
8515 md5_finish_ctx (&ctx, checksum_after);
8516 htab_delete (ht);
8518 if (memcmp (checksum_before, checksum_after, 16))
8519 fold_check_failed (expr, ret);
8521 return ret;
8524 void
8525 print_fold_checksum (tree expr)
8527 struct md5_ctx ctx;
8528 unsigned char checksum[16], cnt;
8529 htab_t ht;
8531 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8532 md5_init_ctx (&ctx);
8533 fold_checksum_tree (expr, &ctx, ht);
8534 md5_finish_ctx (&ctx, checksum);
8535 htab_delete (ht);
8536 for (cnt = 0; cnt < 16; ++cnt)
8537 fprintf (stderr, "%02x", checksum[cnt]);
8538 putc ('\n', stderr);
8541 static void
8542 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8544 internal_error ("fold check: original tree changed by fold");
8547 static void
8548 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8550 void **slot;
8551 enum tree_code code;
8552 char buf[sizeof (struct tree_decl)];
8553 int i, len;
8555 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8556 > sizeof (struct tree_decl)
8557 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8558 abort ();
8559 if (expr == NULL)
8560 return;
8561 slot = htab_find_slot (ht, expr, INSERT);
8562 if (*slot != NULL)
8563 return;
8564 *slot = expr;
8565 code = TREE_CODE (expr);
8566 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8568 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8569 memcpy (buf, expr, tree_size (expr));
8570 expr = (tree) buf;
8571 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8573 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8575 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8576 memcpy (buf, expr, tree_size (expr));
8577 expr = (tree) buf;
8578 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8580 else if (TREE_CODE_CLASS (code) == 't'
8581 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8583 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8584 memcpy (buf, expr, tree_size (expr));
8585 expr = (tree) buf;
8586 TYPE_POINTER_TO (expr) = NULL;
8587 TYPE_REFERENCE_TO (expr) = NULL;
8589 md5_process_bytes (expr, tree_size (expr), ctx);
8590 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8591 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8592 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8593 len = TREE_CODE_LENGTH (code);
8594 switch (TREE_CODE_CLASS (code))
8596 case 'c':
8597 switch (code)
8599 case STRING_CST:
8600 md5_process_bytes (TREE_STRING_POINTER (expr),
8601 TREE_STRING_LENGTH (expr), ctx);
8602 break;
8603 case COMPLEX_CST:
8604 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8605 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8606 break;
8607 case VECTOR_CST:
8608 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8609 break;
8610 default:
8611 break;
8613 break;
8614 case 'x':
8615 switch (code)
8617 case TREE_LIST:
8618 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8619 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8620 break;
8621 case TREE_VEC:
8622 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8623 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8624 break;
8625 default:
8626 break;
8628 break;
8629 case 'e':
8630 switch (code)
8632 case SAVE_EXPR: len = 2; break;
8633 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8634 case RTL_EXPR: len = 0; break;
8635 case WITH_CLEANUP_EXPR: len = 2; break;
8636 default: break;
8638 /* Fall through. */
8639 case 'r':
8640 case '<':
8641 case '1':
8642 case '2':
8643 case 's':
8644 for (i = 0; i < len; ++i)
8645 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8646 break;
8647 case 'd':
8648 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8649 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8650 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8651 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8652 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8653 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8654 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8655 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8656 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8657 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8658 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8659 break;
8660 case 't':
8661 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8662 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8663 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8664 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8665 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8666 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8667 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8668 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8669 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8670 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8671 break;
8672 default:
8673 break;
8677 #endif
8679 /* Perform constant folding and related simplification of initializer
8680 expression EXPR. This behaves identically to "fold" but ignores
8681 potential run-time traps and exceptions that fold must preserve. */
8683 tree
8684 fold_initializer (tree expr)
8686 int saved_signaling_nans = flag_signaling_nans;
8687 int saved_trapping_math = flag_trapping_math;
8688 int saved_trapv = flag_trapv;
8689 tree result;
8691 flag_signaling_nans = 0;
8692 flag_trapping_math = 0;
8693 flag_trapv = 0;
8695 result = fold (expr);
8697 flag_signaling_nans = saved_signaling_nans;
8698 flag_trapping_math = saved_trapping_math;
8699 flag_trapv = saved_trapv;
8701 return result;
8704 /* Determine if first argument is a multiple of second argument. Return 0 if
8705 it is not, or we cannot easily determined it to be.
8707 An example of the sort of thing we care about (at this point; this routine
8708 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8709 fold cases do now) is discovering that
8711 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8713 is a multiple of
8715 SAVE_EXPR (J * 8)
8717 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8719 This code also handles discovering that
8721 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8723 is a multiple of 8 so we don't have to worry about dealing with a
8724 possible remainder.
8726 Note that we *look* inside a SAVE_EXPR only to determine how it was
8727 calculated; it is not safe for fold to do much of anything else with the
8728 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8729 at run time. For example, the latter example above *cannot* be implemented
8730 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8731 evaluation time of the original SAVE_EXPR is not necessarily the same at
8732 the time the new expression is evaluated. The only optimization of this
8733 sort that would be valid is changing
8735 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8737 divided by 8 to
8739 SAVE_EXPR (I) * SAVE_EXPR (J)
8741 (where the same SAVE_EXPR (J) is used in the original and the
8742 transformed version). */
8744 static int
8745 multiple_of_p (tree type, tree top, tree bottom)
8747 if (operand_equal_p (top, bottom, 0))
8748 return 1;
8750 if (TREE_CODE (type) != INTEGER_TYPE)
8751 return 0;
8753 switch (TREE_CODE (top))
8755 case MULT_EXPR:
8756 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8757 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8759 case PLUS_EXPR:
8760 case MINUS_EXPR:
8761 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8762 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8764 case LSHIFT_EXPR:
8765 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8767 tree op1, t1;
8769 op1 = TREE_OPERAND (top, 1);
8770 /* const_binop may not detect overflow correctly,
8771 so check for it explicitly here. */
8772 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8773 > TREE_INT_CST_LOW (op1)
8774 && TREE_INT_CST_HIGH (op1) == 0
8775 && 0 != (t1 = fold_convert (type,
8776 const_binop (LSHIFT_EXPR,
8777 size_one_node,
8778 op1, 0)))
8779 && ! TREE_OVERFLOW (t1))
8780 return multiple_of_p (type, t1, bottom);
8782 return 0;
8784 case NOP_EXPR:
8785 /* Can't handle conversions from non-integral or wider integral type. */
8786 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8787 || (TYPE_PRECISION (type)
8788 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8789 return 0;
8791 /* .. fall through ... */
8793 case SAVE_EXPR:
8794 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8796 case INTEGER_CST:
8797 if (TREE_CODE (bottom) != INTEGER_CST
8798 || (TREE_UNSIGNED (type)
8799 && (tree_int_cst_sgn (top) < 0
8800 || tree_int_cst_sgn (bottom) < 0)))
8801 return 0;
8802 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8803 top, bottom, 0));
8805 default:
8806 return 0;
8810 /* Return true if `t' is known to be non-negative. */
8813 tree_expr_nonnegative_p (tree t)
8815 switch (TREE_CODE (t))
8817 case ABS_EXPR:
8818 return 1;
8820 case INTEGER_CST:
8821 return tree_int_cst_sgn (t) >= 0;
8823 case REAL_CST:
8824 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8826 case PLUS_EXPR:
8827 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8828 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8829 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8831 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8832 both unsigned and at least 2 bits shorter than the result. */
8833 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8834 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8835 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8837 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8838 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8839 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8840 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8842 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8843 TYPE_PRECISION (inner2)) + 1;
8844 return prec < TYPE_PRECISION (TREE_TYPE (t));
8847 break;
8849 case MULT_EXPR:
8850 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8852 /* x * x for floating point x is always non-negative. */
8853 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8854 return 1;
8855 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8856 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8859 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8860 both unsigned and their total bits is shorter than the result. */
8861 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8862 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8863 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8865 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8866 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8867 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8868 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8869 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8870 < TYPE_PRECISION (TREE_TYPE (t));
8872 return 0;
8874 case TRUNC_DIV_EXPR:
8875 case CEIL_DIV_EXPR:
8876 case FLOOR_DIV_EXPR:
8877 case ROUND_DIV_EXPR:
8878 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8879 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8881 case TRUNC_MOD_EXPR:
8882 case CEIL_MOD_EXPR:
8883 case FLOOR_MOD_EXPR:
8884 case ROUND_MOD_EXPR:
8885 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8887 case RDIV_EXPR:
8888 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8889 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8891 case NOP_EXPR:
8893 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8894 tree outer_type = TREE_TYPE (t);
8896 if (TREE_CODE (outer_type) == REAL_TYPE)
8898 if (TREE_CODE (inner_type) == REAL_TYPE)
8899 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8900 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8902 if (TREE_UNSIGNED (inner_type))
8903 return 1;
8904 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8907 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8909 if (TREE_CODE (inner_type) == REAL_TYPE)
8910 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8911 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8912 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8913 && TREE_UNSIGNED (inner_type);
8916 break;
8918 case COND_EXPR:
8919 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8920 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8921 case COMPOUND_EXPR:
8922 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8923 case MIN_EXPR:
8924 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8925 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8926 case MAX_EXPR:
8927 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8928 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8929 case MODIFY_EXPR:
8930 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8931 case BIND_EXPR:
8932 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8933 case SAVE_EXPR:
8934 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8935 case NON_LVALUE_EXPR:
8936 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8937 case FLOAT_EXPR:
8938 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8939 case RTL_EXPR:
8940 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8942 case CALL_EXPR:
8944 tree fndecl = get_callee_fndecl (t);
8945 tree arglist = TREE_OPERAND (t, 1);
8946 if (fndecl
8947 && DECL_BUILT_IN (fndecl)
8948 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8949 switch (DECL_FUNCTION_CODE (fndecl))
8951 case BUILT_IN_CABS:
8952 case BUILT_IN_CABSL:
8953 case BUILT_IN_CABSF:
8954 case BUILT_IN_EXP:
8955 case BUILT_IN_EXPF:
8956 case BUILT_IN_EXPL:
8957 case BUILT_IN_EXP2:
8958 case BUILT_IN_EXP2F:
8959 case BUILT_IN_EXP2L:
8960 case BUILT_IN_EXP10:
8961 case BUILT_IN_EXP10F:
8962 case BUILT_IN_EXP10L:
8963 case BUILT_IN_FABS:
8964 case BUILT_IN_FABSF:
8965 case BUILT_IN_FABSL:
8966 case BUILT_IN_FFS:
8967 case BUILT_IN_FFSL:
8968 case BUILT_IN_FFSLL:
8969 case BUILT_IN_PARITY:
8970 case BUILT_IN_PARITYL:
8971 case BUILT_IN_PARITYLL:
8972 case BUILT_IN_POPCOUNT:
8973 case BUILT_IN_POPCOUNTL:
8974 case BUILT_IN_POPCOUNTLL:
8975 case BUILT_IN_POW10:
8976 case BUILT_IN_POW10F:
8977 case BUILT_IN_POW10L:
8978 case BUILT_IN_SQRT:
8979 case BUILT_IN_SQRTF:
8980 case BUILT_IN_SQRTL:
8981 return 1;
8983 case BUILT_IN_ATAN:
8984 case BUILT_IN_ATANF:
8985 case BUILT_IN_ATANL:
8986 case BUILT_IN_CEIL:
8987 case BUILT_IN_CEILF:
8988 case BUILT_IN_CEILL:
8989 case BUILT_IN_FLOOR:
8990 case BUILT_IN_FLOORF:
8991 case BUILT_IN_FLOORL:
8992 case BUILT_IN_NEARBYINT:
8993 case BUILT_IN_NEARBYINTF:
8994 case BUILT_IN_NEARBYINTL:
8995 case BUILT_IN_ROUND:
8996 case BUILT_IN_ROUNDF:
8997 case BUILT_IN_ROUNDL:
8998 case BUILT_IN_TRUNC:
8999 case BUILT_IN_TRUNCF:
9000 case BUILT_IN_TRUNCL:
9001 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9003 case BUILT_IN_POW:
9004 case BUILT_IN_POWF:
9005 case BUILT_IN_POWL:
9006 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9008 default:
9009 break;
9013 /* ... fall through ... */
9015 default:
9016 if (truth_value_p (TREE_CODE (t)))
9017 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9018 return 1;
9021 /* We don't know sign of `t', so be conservative and return false. */
9022 return 0;
9025 /* Return true if `r' is known to be non-negative.
9026 Only handles constants at the moment. */
9029 rtl_expr_nonnegative_p (rtx r)
9031 switch (GET_CODE (r))
9033 case CONST_INT:
9034 return INTVAL (r) >= 0;
9036 case CONST_DOUBLE:
9037 if (GET_MODE (r) == VOIDmode)
9038 return CONST_DOUBLE_HIGH (r) >= 0;
9039 return 0;
9041 case CONST_VECTOR:
9043 int units, i;
9044 rtx elt;
9046 units = CONST_VECTOR_NUNITS (r);
9048 for (i = 0; i < units; ++i)
9050 elt = CONST_VECTOR_ELT (r, i);
9051 if (!rtl_expr_nonnegative_p (elt))
9052 return 0;
9055 return 1;
9058 case SYMBOL_REF:
9059 case LABEL_REF:
9060 /* These are always nonnegative. */
9061 return 1;
9063 default:
9064 return 0;
9068 #include "gt-fold-const.h"