2004-02-19 David Daney <ddaney@avtrex.com>
[official-gcc.git] / gcc / fold-const.c
blob5b39098d0db8cc5486043b840bd4e8cbe27311d2
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_mathfn_p (enum built_in_function);
64 static bool negate_expr_p (tree);
65 static tree negate_expr (tree);
66 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
67 static tree associate_trees (tree, tree, enum tree_code, tree);
68 static tree int_const_binop (enum tree_code, tree, tree, int);
69 static tree const_binop (enum tree_code, tree, tree, int);
70 static hashval_t size_htab_hash (const void *);
71 static int size_htab_eq (const void *, const void *);
72 static tree fold_convert_const (enum tree_code, tree, tree);
73 static tree fold_convert (tree, tree);
74 static enum tree_code invert_tree_comparison (enum tree_code);
75 static enum tree_code swap_tree_comparison (enum tree_code);
76 static int comparison_to_compcode (enum tree_code);
77 static enum tree_code compcode_to_comparison (int);
78 static int truth_value_p (enum tree_code);
79 static int operand_equal_for_comparison_p (tree, tree, tree);
80 static int twoval_comparison_p (tree, tree *, tree *, int *);
81 static tree eval_subst (tree, tree, tree, tree, tree);
82 static tree pedantic_omit_one_operand (tree, tree, tree);
83 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
84 static tree make_bit_field_ref (tree, tree, int, int, int);
85 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
86 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
87 enum machine_mode *, int *, int *,
88 tree *, tree *);
89 static int all_ones_mask_p (tree, int);
90 static tree sign_bit_p (tree, tree);
91 static int simple_operand_p (tree);
92 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
93 static tree make_range (tree, int *, tree *, tree *);
94 static tree build_range_check (tree, tree, int, tree, tree);
95 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
96 tree);
97 static tree fold_range_test (tree);
98 static tree unextend (tree, int, int, tree);
99 static tree fold_truthop (enum tree_code, tree, tree, tree);
100 static tree optimize_minmax_comparison (tree);
101 static tree extract_muldiv (tree, tree, enum tree_code, tree);
102 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
103 static tree strip_compound_expr (tree, tree);
104 static int multiple_of_p (tree, tree, tree);
105 static tree constant_boolean_node (int, tree);
106 static int count_cond (tree, int);
107 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
108 tree, int);
109 static bool fold_real_zero_addition_p (tree, tree, int);
110 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
111 tree, tree, tree);
112 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
113 static bool reorder_operands_p (tree, tree);
114 static bool tree_swap_operands_p (tree, tree, bool);
116 /* The following constants represent a bit based encoding of GCC's
117 comparison operators. This encoding simplifies transformations
118 on relational comparison operators, such as AND and OR. */
119 #define COMPCODE_FALSE 0
120 #define COMPCODE_LT 1
121 #define COMPCODE_EQ 2
122 #define COMPCODE_LE 3
123 #define COMPCODE_GT 4
124 #define COMPCODE_NE 5
125 #define COMPCODE_GE 6
126 #define COMPCODE_TRUE 7
128 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
129 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
130 and SUM1. Then this yields nonzero if overflow occurred during the
131 addition.
133 Overflow occurs if A and B have the same sign, but A and SUM differ in
134 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
135 sign. */
136 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
138 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
139 We do that by representing the two-word integer in 4 words, with only
140 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
141 number. The value of the word is LOWPART + HIGHPART * BASE. */
143 #define LOWPART(x) \
144 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
145 #define HIGHPART(x) \
146 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
147 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
149 /* Unpack a two-word integer into 4 words.
150 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
151 WORDS points to the array of HOST_WIDE_INTs. */
153 static void
154 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
156 words[0] = LOWPART (low);
157 words[1] = HIGHPART (low);
158 words[2] = LOWPART (hi);
159 words[3] = HIGHPART (hi);
162 /* Pack an array of 4 words into a two-word integer.
163 WORDS points to the array of words.
164 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
166 static void
167 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
168 HOST_WIDE_INT *hi)
170 *low = words[0] + words[1] * BASE;
171 *hi = words[2] + words[3] * BASE;
174 /* Make the integer constant T valid for its type by setting to 0 or 1 all
175 the bits in the constant that don't belong in the type.
177 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
178 nonzero, a signed overflow has already occurred in calculating T, so
179 propagate it. */
182 force_fit_type (tree t, int overflow)
184 unsigned HOST_WIDE_INT low;
185 HOST_WIDE_INT high;
186 unsigned int prec;
188 if (TREE_CODE (t) == REAL_CST)
190 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
191 Consider doing it via real_convert now. */
192 return overflow;
195 else if (TREE_CODE (t) != INTEGER_CST)
196 return overflow;
198 low = TREE_INT_CST_LOW (t);
199 high = TREE_INT_CST_HIGH (t);
201 if (POINTER_TYPE_P (TREE_TYPE (t))
202 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
203 prec = POINTER_SIZE;
204 else
205 prec = TYPE_PRECISION (TREE_TYPE (t));
207 /* First clear all bits that are beyond the type's precision. */
209 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
211 else if (prec > HOST_BITS_PER_WIDE_INT)
212 TREE_INT_CST_HIGH (t)
213 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
214 else
216 TREE_INT_CST_HIGH (t) = 0;
217 if (prec < HOST_BITS_PER_WIDE_INT)
218 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
221 /* Unsigned types do not suffer sign extension or overflow unless they
222 are a sizetype. */
223 if (TREE_UNSIGNED (TREE_TYPE (t))
224 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
225 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
226 return overflow;
228 /* If the value's sign bit is set, extend the sign. */
229 if (prec != 2 * HOST_BITS_PER_WIDE_INT
230 && (prec > HOST_BITS_PER_WIDE_INT
231 ? 0 != (TREE_INT_CST_HIGH (t)
232 & ((HOST_WIDE_INT) 1
233 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
234 : 0 != (TREE_INT_CST_LOW (t)
235 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
237 /* Value is negative:
238 set to 1 all the bits that are outside this type's precision. */
239 if (prec > HOST_BITS_PER_WIDE_INT)
240 TREE_INT_CST_HIGH (t)
241 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
242 else
244 TREE_INT_CST_HIGH (t) = -1;
245 if (prec < HOST_BITS_PER_WIDE_INT)
246 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
250 /* Return nonzero if signed overflow occurred. */
251 return
252 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
253 != 0);
256 /* Add two doubleword integers with doubleword result.
257 Each argument is given as two `HOST_WIDE_INT' pieces.
258 One argument is L1 and H1; the other, L2 and H2.
259 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
262 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
263 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
264 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
266 unsigned HOST_WIDE_INT l;
267 HOST_WIDE_INT h;
269 l = l1 + l2;
270 h = h1 + h2 + (l < l1);
272 *lv = l;
273 *hv = h;
274 return OVERFLOW_SUM_SIGN (h1, h2, h);
277 /* Negate a doubleword integer with doubleword result.
278 Return nonzero if the operation overflows, assuming it's signed.
279 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
280 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
283 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
284 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
286 if (l1 == 0)
288 *lv = 0;
289 *hv = - h1;
290 return (*hv & h1) < 0;
292 else
294 *lv = -l1;
295 *hv = ~h1;
296 return 0;
300 /* Multiply two doubleword integers with doubleword result.
301 Return nonzero if the operation overflows, assuming it's signed.
302 Each argument is given as two `HOST_WIDE_INT' pieces.
303 One argument is L1 and H1; the other, L2 and H2.
304 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
307 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
308 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
309 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
311 HOST_WIDE_INT arg1[4];
312 HOST_WIDE_INT arg2[4];
313 HOST_WIDE_INT prod[4 * 2];
314 unsigned HOST_WIDE_INT carry;
315 int i, j, k;
316 unsigned HOST_WIDE_INT toplow, neglow;
317 HOST_WIDE_INT tophigh, neghigh;
319 encode (arg1, l1, h1);
320 encode (arg2, l2, h2);
322 memset (prod, 0, sizeof prod);
324 for (i = 0; i < 4; i++)
326 carry = 0;
327 for (j = 0; j < 4; j++)
329 k = i + j;
330 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
331 carry += arg1[i] * arg2[j];
332 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
333 carry += prod[k];
334 prod[k] = LOWPART (carry);
335 carry = HIGHPART (carry);
337 prod[i + 4] = carry;
340 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
342 /* Check for overflow by calculating the top half of the answer in full;
343 it should agree with the low half's sign bit. */
344 decode (prod + 4, &toplow, &tophigh);
345 if (h1 < 0)
347 neg_double (l2, h2, &neglow, &neghigh);
348 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
350 if (h2 < 0)
352 neg_double (l1, h1, &neglow, &neghigh);
353 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
355 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
358 /* Shift the doubleword integer in L1, H1 left by COUNT places
359 keeping only PREC bits of result.
360 Shift right if COUNT is negative.
361 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
362 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
364 void
365 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
366 HOST_WIDE_INT count, unsigned int prec,
367 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
369 unsigned HOST_WIDE_INT signmask;
371 if (count < 0)
373 rshift_double (l1, h1, -count, prec, lv, hv, arith);
374 return;
377 #ifdef SHIFT_COUNT_TRUNCATED
378 if (SHIFT_COUNT_TRUNCATED)
379 count %= prec;
380 #endif
382 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
384 /* Shifting by the host word size is undefined according to the
385 ANSI standard, so we must handle this as a special case. */
386 *hv = 0;
387 *lv = 0;
389 else if (count >= HOST_BITS_PER_WIDE_INT)
391 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
392 *lv = 0;
394 else
396 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
397 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
398 *lv = l1 << count;
401 /* Sign extend all bits that are beyond the precision. */
403 signmask = -((prec > HOST_BITS_PER_WIDE_INT
404 ? ((unsigned HOST_WIDE_INT) *hv
405 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
406 : (*lv >> (prec - 1))) & 1);
408 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
410 else if (prec >= HOST_BITS_PER_WIDE_INT)
412 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
413 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
415 else
417 *hv = signmask;
418 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
419 *lv |= signmask << prec;
423 /* Shift the doubleword integer in L1, H1 right by COUNT places
424 keeping only PREC bits of result. COUNT must be positive.
425 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
426 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
428 void
429 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
430 HOST_WIDE_INT count, unsigned int prec,
431 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
432 int arith)
434 unsigned HOST_WIDE_INT signmask;
436 signmask = (arith
437 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
438 : 0);
440 #ifdef SHIFT_COUNT_TRUNCATED
441 if (SHIFT_COUNT_TRUNCATED)
442 count %= prec;
443 #endif
445 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
447 /* Shifting by the host word size is undefined according to the
448 ANSI standard, so we must handle this as a special case. */
449 *hv = 0;
450 *lv = 0;
452 else if (count >= HOST_BITS_PER_WIDE_INT)
454 *hv = 0;
455 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
457 else
459 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
460 *lv = ((l1 >> count)
461 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
464 /* Zero / sign extend all bits that are beyond the precision. */
466 if (count >= (HOST_WIDE_INT)prec)
468 *hv = signmask;
469 *lv = signmask;
471 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
473 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
475 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
476 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
478 else
480 *hv = signmask;
481 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
482 *lv |= signmask << (prec - count);
486 /* Rotate the doubleword integer in L1, H1 left by COUNT places
487 keeping only PREC bits of result.
488 Rotate right if COUNT is negative.
489 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
491 void
492 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
493 HOST_WIDE_INT count, unsigned int prec,
494 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
496 unsigned HOST_WIDE_INT s1l, s2l;
497 HOST_WIDE_INT s1h, s2h;
499 count %= prec;
500 if (count < 0)
501 count += prec;
503 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
504 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
505 *lv = s1l | s2l;
506 *hv = s1h | s2h;
509 /* Rotate the doubleword integer in L1, H1 left by COUNT places
510 keeping only PREC bits of result. COUNT must be positive.
511 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
513 void
514 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
515 HOST_WIDE_INT count, unsigned int prec,
516 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
518 unsigned HOST_WIDE_INT s1l, s2l;
519 HOST_WIDE_INT s1h, s2h;
521 count %= prec;
522 if (count < 0)
523 count += prec;
525 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
526 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
527 *lv = s1l | s2l;
528 *hv = s1h | s2h;
531 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
532 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
533 CODE is a tree code for a kind of division, one of
534 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
535 or EXACT_DIV_EXPR
536 It controls how the quotient is rounded to an integer.
537 Return nonzero if the operation overflows.
538 UNS nonzero says do unsigned division. */
541 div_and_round_double (enum tree_code code, int uns,
542 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
543 HOST_WIDE_INT hnum_orig,
544 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
545 HOST_WIDE_INT hden_orig,
546 unsigned HOST_WIDE_INT *lquo,
547 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
548 HOST_WIDE_INT *hrem)
550 int quo_neg = 0;
551 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
552 HOST_WIDE_INT den[4], quo[4];
553 int i, j;
554 unsigned HOST_WIDE_INT work;
555 unsigned HOST_WIDE_INT carry = 0;
556 unsigned HOST_WIDE_INT lnum = lnum_orig;
557 HOST_WIDE_INT hnum = hnum_orig;
558 unsigned HOST_WIDE_INT lden = lden_orig;
559 HOST_WIDE_INT hden = hden_orig;
560 int overflow = 0;
562 if (hden == 0 && lden == 0)
563 overflow = 1, lden = 1;
565 /* Calculate quotient sign and convert operands to unsigned. */
566 if (!uns)
568 if (hnum < 0)
570 quo_neg = ~ quo_neg;
571 /* (minimum integer) / (-1) is the only overflow case. */
572 if (neg_double (lnum, hnum, &lnum, &hnum)
573 && ((HOST_WIDE_INT) lden & hden) == -1)
574 overflow = 1;
576 if (hden < 0)
578 quo_neg = ~ quo_neg;
579 neg_double (lden, hden, &lden, &hden);
583 if (hnum == 0 && hden == 0)
584 { /* single precision */
585 *hquo = *hrem = 0;
586 /* This unsigned division rounds toward zero. */
587 *lquo = lnum / lden;
588 goto finish_up;
591 if (hnum == 0)
592 { /* trivial case: dividend < divisor */
593 /* hden != 0 already checked. */
594 *hquo = *lquo = 0;
595 *hrem = hnum;
596 *lrem = lnum;
597 goto finish_up;
600 memset (quo, 0, sizeof quo);
602 memset (num, 0, sizeof num); /* to zero 9th element */
603 memset (den, 0, sizeof den);
605 encode (num, lnum, hnum);
606 encode (den, lden, hden);
608 /* Special code for when the divisor < BASE. */
609 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
611 /* hnum != 0 already checked. */
612 for (i = 4 - 1; i >= 0; i--)
614 work = num[i] + carry * BASE;
615 quo[i] = work / lden;
616 carry = work % lden;
619 else
621 /* Full double precision division,
622 with thanks to Don Knuth's "Seminumerical Algorithms". */
623 int num_hi_sig, den_hi_sig;
624 unsigned HOST_WIDE_INT quo_est, scale;
626 /* Find the highest nonzero divisor digit. */
627 for (i = 4 - 1;; i--)
628 if (den[i] != 0)
630 den_hi_sig = i;
631 break;
634 /* Insure that the first digit of the divisor is at least BASE/2.
635 This is required by the quotient digit estimation algorithm. */
637 scale = BASE / (den[den_hi_sig] + 1);
638 if (scale > 1)
639 { /* scale divisor and dividend */
640 carry = 0;
641 for (i = 0; i <= 4 - 1; i++)
643 work = (num[i] * scale) + carry;
644 num[i] = LOWPART (work);
645 carry = HIGHPART (work);
648 num[4] = carry;
649 carry = 0;
650 for (i = 0; i <= 4 - 1; i++)
652 work = (den[i] * scale) + carry;
653 den[i] = LOWPART (work);
654 carry = HIGHPART (work);
655 if (den[i] != 0) den_hi_sig = i;
659 num_hi_sig = 4;
661 /* Main loop */
662 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
664 /* Guess the next quotient digit, quo_est, by dividing the first
665 two remaining dividend digits by the high order quotient digit.
666 quo_est is never low and is at most 2 high. */
667 unsigned HOST_WIDE_INT tmp;
669 num_hi_sig = i + den_hi_sig + 1;
670 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
671 if (num[num_hi_sig] != den[den_hi_sig])
672 quo_est = work / den[den_hi_sig];
673 else
674 quo_est = BASE - 1;
676 /* Refine quo_est so it's usually correct, and at most one high. */
677 tmp = work - quo_est * den[den_hi_sig];
678 if (tmp < BASE
679 && (den[den_hi_sig - 1] * quo_est
680 > (tmp * BASE + num[num_hi_sig - 2])))
681 quo_est--;
683 /* Try QUO_EST as the quotient digit, by multiplying the
684 divisor by QUO_EST and subtracting from the remaining dividend.
685 Keep in mind that QUO_EST is the I - 1st digit. */
687 carry = 0;
688 for (j = 0; j <= den_hi_sig; j++)
690 work = quo_est * den[j] + carry;
691 carry = HIGHPART (work);
692 work = num[i + j] - LOWPART (work);
693 num[i + j] = LOWPART (work);
694 carry += HIGHPART (work) != 0;
697 /* If quo_est was high by one, then num[i] went negative and
698 we need to correct things. */
699 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
701 quo_est--;
702 carry = 0; /* add divisor back in */
703 for (j = 0; j <= den_hi_sig; j++)
705 work = num[i + j] + den[j] + carry;
706 carry = HIGHPART (work);
707 num[i + j] = LOWPART (work);
710 num [num_hi_sig] += carry;
713 /* Store the quotient digit. */
714 quo[i] = quo_est;
718 decode (quo, lquo, hquo);
720 finish_up:
721 /* If result is negative, make it so. */
722 if (quo_neg)
723 neg_double (*lquo, *hquo, lquo, hquo);
725 /* compute trial remainder: rem = num - (quo * den) */
726 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
727 neg_double (*lrem, *hrem, lrem, hrem);
728 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
730 switch (code)
732 case TRUNC_DIV_EXPR:
733 case TRUNC_MOD_EXPR: /* round toward zero */
734 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
735 return overflow;
737 case FLOOR_DIV_EXPR:
738 case FLOOR_MOD_EXPR: /* round toward negative infinity */
739 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
741 /* quo = quo - 1; */
742 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
743 lquo, hquo);
745 else
746 return overflow;
747 break;
749 case CEIL_DIV_EXPR:
750 case CEIL_MOD_EXPR: /* round toward positive infinity */
751 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
753 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
754 lquo, hquo);
756 else
757 return overflow;
758 break;
760 case ROUND_DIV_EXPR:
761 case ROUND_MOD_EXPR: /* round to closest integer */
763 unsigned HOST_WIDE_INT labs_rem = *lrem;
764 HOST_WIDE_INT habs_rem = *hrem;
765 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
766 HOST_WIDE_INT habs_den = hden, htwice;
768 /* Get absolute values. */
769 if (*hrem < 0)
770 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
771 if (hden < 0)
772 neg_double (lden, hden, &labs_den, &habs_den);
774 /* If (2 * abs (lrem) >= abs (lden)) */
775 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
776 labs_rem, habs_rem, &ltwice, &htwice);
778 if (((unsigned HOST_WIDE_INT) habs_den
779 < (unsigned HOST_WIDE_INT) htwice)
780 || (((unsigned HOST_WIDE_INT) habs_den
781 == (unsigned HOST_WIDE_INT) htwice)
782 && (labs_den < ltwice)))
784 if (*hquo < 0)
785 /* quo = quo - 1; */
786 add_double (*lquo, *hquo,
787 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
788 else
789 /* quo = quo + 1; */
790 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
791 lquo, hquo);
793 else
794 return overflow;
796 break;
798 default:
799 abort ();
802 /* Compute true remainder: rem = num - (quo * den) */
803 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
804 neg_double (*lrem, *hrem, lrem, hrem);
805 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
806 return overflow;
809 /* Return true if built-in mathematical function specified by CODE
810 preserves the sign of it argument, i.e. -f(x) == f(-x). */
812 static bool
813 negate_mathfn_p (enum built_in_function code)
815 switch (code)
817 case BUILT_IN_ASIN:
818 case BUILT_IN_ASINF:
819 case BUILT_IN_ASINL:
820 case BUILT_IN_ATAN:
821 case BUILT_IN_ATANF:
822 case BUILT_IN_ATANL:
823 case BUILT_IN_SIN:
824 case BUILT_IN_SINF:
825 case BUILT_IN_SINL:
826 case BUILT_IN_TAN:
827 case BUILT_IN_TANF:
828 case BUILT_IN_TANL:
829 return true;
831 default:
832 break;
834 return false;
837 /* Determine whether an expression T can be cheaply negated using
838 the function negate_expr. */
840 static bool
841 negate_expr_p (tree t)
843 unsigned HOST_WIDE_INT val;
844 unsigned int prec;
845 tree type;
847 if (t == 0)
848 return false;
850 type = TREE_TYPE (t);
852 STRIP_SIGN_NOPS (t);
853 switch (TREE_CODE (t))
855 case INTEGER_CST:
856 if (TREE_UNSIGNED (type) || ! flag_trapv)
857 return true;
859 /* Check that -CST will not overflow type. */
860 prec = TYPE_PRECISION (type);
861 if (prec > HOST_BITS_PER_WIDE_INT)
863 if (TREE_INT_CST_LOW (t) != 0)
864 return true;
865 prec -= HOST_BITS_PER_WIDE_INT;
866 val = TREE_INT_CST_HIGH (t);
868 else
869 val = TREE_INT_CST_LOW (t);
870 if (prec < HOST_BITS_PER_WIDE_INT)
871 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
872 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
874 case REAL_CST:
875 case NEGATE_EXPR:
876 return true;
878 case COMPLEX_CST:
879 return negate_expr_p (TREE_REALPART (t))
880 && negate_expr_p (TREE_IMAGPART (t));
882 case PLUS_EXPR:
883 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
884 return false;
885 /* -(A + B) -> (-B) - A. */
886 if (negate_expr_p (TREE_OPERAND (t, 1))
887 && reorder_operands_p (TREE_OPERAND (t, 0),
888 TREE_OPERAND (t, 1)))
889 return true;
890 /* -(A + B) -> (-A) - B. */
891 return negate_expr_p (TREE_OPERAND (t, 0));
893 case MINUS_EXPR:
894 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
895 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
896 && reorder_operands_p (TREE_OPERAND (t, 0),
897 TREE_OPERAND (t, 1));
899 case MULT_EXPR:
900 if (TREE_UNSIGNED (TREE_TYPE (t)))
901 break;
903 /* Fall through. */
905 case RDIV_EXPR:
906 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
907 return negate_expr_p (TREE_OPERAND (t, 1))
908 || negate_expr_p (TREE_OPERAND (t, 0));
909 break;
911 case NOP_EXPR:
912 /* Negate -((double)float) as (double)(-float). */
913 if (TREE_CODE (type) == REAL_TYPE)
915 tree tem = strip_float_extensions (t);
916 if (tem != t)
917 return negate_expr_p (tem);
919 break;
921 case CALL_EXPR:
922 /* Negate -f(x) as f(-x). */
923 if (negate_mathfn_p (builtin_mathfn_code (t)))
924 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
925 break;
927 default:
928 break;
930 return false;
933 /* Given T, an expression, return the negation of T. Allow for T to be
934 null, in which case return null. */
936 static tree
937 negate_expr (tree t)
939 tree type;
940 tree tem;
942 if (t == 0)
943 return 0;
945 type = TREE_TYPE (t);
946 STRIP_SIGN_NOPS (t);
948 switch (TREE_CODE (t))
950 case INTEGER_CST:
952 unsigned HOST_WIDE_INT low;
953 HOST_WIDE_INT high;
954 int overflow = neg_double (TREE_INT_CST_LOW (t),
955 TREE_INT_CST_HIGH (t),
956 &low, &high);
957 tem = build_int_2 (low, high);
958 TREE_TYPE (tem) = type;
959 TREE_OVERFLOW (tem)
960 = (TREE_OVERFLOW (t)
961 | force_fit_type (tem, overflow && !TREE_UNSIGNED (type)));
962 TREE_CONSTANT_OVERFLOW (tem)
963 = TREE_OVERFLOW (tem) | TREE_CONSTANT_OVERFLOW (t);
965 if (! TREE_OVERFLOW (tem)
966 || TREE_UNSIGNED (type)
967 || ! flag_trapv)
968 return tem;
969 break;
971 case REAL_CST:
972 tem = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (t)));
973 /* Two's complement FP formats, such as c4x, may overflow. */
974 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
975 return fold_convert (type, tem);
976 break;
978 case COMPLEX_CST:
980 tree rpart = negate_expr (TREE_REALPART (t));
981 tree ipart = negate_expr (TREE_IMAGPART (t));
983 if ((TREE_CODE (rpart) == REAL_CST
984 && TREE_CODE (ipart) == REAL_CST)
985 || (TREE_CODE (rpart) == INTEGER_CST
986 && TREE_CODE (ipart) == INTEGER_CST))
987 return build_complex (type, rpart, ipart);
989 break;
991 case NEGATE_EXPR:
992 return fold_convert (type, TREE_OPERAND (t, 0));
994 case PLUS_EXPR:
995 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
997 /* -(A + B) -> (-B) - A. */
998 if (negate_expr_p (TREE_OPERAND (t, 1))
999 && reorder_operands_p (TREE_OPERAND (t, 0),
1000 TREE_OPERAND (t, 1)))
1001 return fold_convert (type,
1002 fold (build (MINUS_EXPR, TREE_TYPE (t),
1003 negate_expr (TREE_OPERAND (t, 1)),
1004 TREE_OPERAND (t, 0))));
1005 /* -(A + B) -> (-A) - B. */
1006 if (negate_expr_p (TREE_OPERAND (t, 0)))
1007 return fold_convert (type,
1008 fold (build (MINUS_EXPR, TREE_TYPE (t),
1009 negate_expr (TREE_OPERAND (t, 0)),
1010 TREE_OPERAND (t, 1))));
1012 break;
1014 case MINUS_EXPR:
1015 /* - (A - B) -> B - A */
1016 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1017 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1018 return fold_convert (type,
1019 fold (build (MINUS_EXPR, TREE_TYPE (t),
1020 TREE_OPERAND (t, 1),
1021 TREE_OPERAND (t, 0))));
1022 break;
1024 case MULT_EXPR:
1025 if (TREE_UNSIGNED (TREE_TYPE (t)))
1026 break;
1028 /* Fall through. */
1030 case RDIV_EXPR:
1031 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1033 tem = TREE_OPERAND (t, 1);
1034 if (negate_expr_p (tem))
1035 return fold_convert (type,
1036 fold (build (TREE_CODE (t), TREE_TYPE (t),
1037 TREE_OPERAND (t, 0),
1038 negate_expr (tem))));
1039 tem = TREE_OPERAND (t, 0);
1040 if (negate_expr_p (tem))
1041 return fold_convert (type,
1042 fold (build (TREE_CODE (t), TREE_TYPE (t),
1043 negate_expr (tem),
1044 TREE_OPERAND (t, 1))));
1046 break;
1048 case NOP_EXPR:
1049 /* Convert -((double)float) into (double)(-float). */
1050 if (TREE_CODE (type) == REAL_TYPE)
1052 tem = strip_float_extensions (t);
1053 if (tem != t && negate_expr_p (tem))
1054 return fold_convert (type, negate_expr (tem));
1056 break;
1058 case CALL_EXPR:
1059 /* Negate -f(x) as f(-x). */
1060 if (negate_mathfn_p (builtin_mathfn_code (t))
1061 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1063 tree fndecl, arg, arglist;
1065 fndecl = get_callee_fndecl (t);
1066 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1067 arglist = build_tree_list (NULL_TREE, arg);
1068 return build_function_call_expr (fndecl, arglist);
1070 break;
1072 default:
1073 break;
1076 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1077 return fold_convert (type, tem);
1080 /* Split a tree IN into a constant, literal and variable parts that could be
1081 combined with CODE to make IN. "constant" means an expression with
1082 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1083 commutative arithmetic operation. Store the constant part into *CONP,
1084 the literal in *LITP and return the variable part. If a part isn't
1085 present, set it to null. If the tree does not decompose in this way,
1086 return the entire tree as the variable part and the other parts as null.
1088 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1089 case, we negate an operand that was subtracted. Except if it is a
1090 literal for which we use *MINUS_LITP instead.
1092 If NEGATE_P is true, we are negating all of IN, again except a literal
1093 for which we use *MINUS_LITP instead.
1095 If IN is itself a literal or constant, return it as appropriate.
1097 Note that we do not guarantee that any of the three values will be the
1098 same type as IN, but they will have the same signedness and mode. */
1100 static tree
1101 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1102 tree *minus_litp, int negate_p)
1104 tree var = 0;
1106 *conp = 0;
1107 *litp = 0;
1108 *minus_litp = 0;
1110 /* Strip any conversions that don't change the machine mode or signedness. */
1111 STRIP_SIGN_NOPS (in);
1113 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1114 *litp = in;
1115 else if (TREE_CODE (in) == code
1116 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1117 /* We can associate addition and subtraction together (even
1118 though the C standard doesn't say so) for integers because
1119 the value is not affected. For reals, the value might be
1120 affected, so we can't. */
1121 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1122 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1124 tree op0 = TREE_OPERAND (in, 0);
1125 tree op1 = TREE_OPERAND (in, 1);
1126 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1127 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1129 /* First see if either of the operands is a literal, then a constant. */
1130 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1131 *litp = op0, op0 = 0;
1132 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1133 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1135 if (op0 != 0 && TREE_CONSTANT (op0))
1136 *conp = op0, op0 = 0;
1137 else if (op1 != 0 && TREE_CONSTANT (op1))
1138 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1140 /* If we haven't dealt with either operand, this is not a case we can
1141 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1142 if (op0 != 0 && op1 != 0)
1143 var = in;
1144 else if (op0 != 0)
1145 var = op0;
1146 else
1147 var = op1, neg_var_p = neg1_p;
1149 /* Now do any needed negations. */
1150 if (neg_litp_p)
1151 *minus_litp = *litp, *litp = 0;
1152 if (neg_conp_p)
1153 *conp = negate_expr (*conp);
1154 if (neg_var_p)
1155 var = negate_expr (var);
1157 else if (TREE_CONSTANT (in))
1158 *conp = in;
1159 else
1160 var = in;
1162 if (negate_p)
1164 if (*litp)
1165 *minus_litp = *litp, *litp = 0;
1166 else if (*minus_litp)
1167 *litp = *minus_litp, *minus_litp = 0;
1168 *conp = negate_expr (*conp);
1169 var = negate_expr (var);
1172 return var;
1175 /* Re-associate trees split by the above function. T1 and T2 are either
1176 expressions to associate or null. Return the new expression, if any. If
1177 we build an operation, do it in TYPE and with CODE. */
1179 static tree
1180 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1182 if (t1 == 0)
1183 return t2;
1184 else if (t2 == 0)
1185 return t1;
1187 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1188 try to fold this since we will have infinite recursion. But do
1189 deal with any NEGATE_EXPRs. */
1190 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1191 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1193 if (code == PLUS_EXPR)
1195 if (TREE_CODE (t1) == NEGATE_EXPR)
1196 return build (MINUS_EXPR, type, fold_convert (type, t2),
1197 fold_convert (type, TREE_OPERAND (t1, 0)));
1198 else if (TREE_CODE (t2) == NEGATE_EXPR)
1199 return build (MINUS_EXPR, type, fold_convert (type, t1),
1200 fold_convert (type, TREE_OPERAND (t2, 0)));
1202 return build (code, type, fold_convert (type, t1),
1203 fold_convert (type, t2));
1206 return fold (build (code, type, fold_convert (type, t1),
1207 fold_convert (type, t2)));
1210 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1211 to produce a new constant.
1213 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1215 static tree
1216 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1218 unsigned HOST_WIDE_INT int1l, int2l;
1219 HOST_WIDE_INT int1h, int2h;
1220 unsigned HOST_WIDE_INT low;
1221 HOST_WIDE_INT hi;
1222 unsigned HOST_WIDE_INT garbagel;
1223 HOST_WIDE_INT garbageh;
1224 tree t;
1225 tree type = TREE_TYPE (arg1);
1226 int uns = TREE_UNSIGNED (type);
1227 int is_sizetype
1228 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1229 int overflow = 0;
1230 int no_overflow = 0;
1232 int1l = TREE_INT_CST_LOW (arg1);
1233 int1h = TREE_INT_CST_HIGH (arg1);
1234 int2l = TREE_INT_CST_LOW (arg2);
1235 int2h = TREE_INT_CST_HIGH (arg2);
1237 switch (code)
1239 case BIT_IOR_EXPR:
1240 low = int1l | int2l, hi = int1h | int2h;
1241 break;
1243 case BIT_XOR_EXPR:
1244 low = int1l ^ int2l, hi = int1h ^ int2h;
1245 break;
1247 case BIT_AND_EXPR:
1248 low = int1l & int2l, hi = int1h & int2h;
1249 break;
1251 case RSHIFT_EXPR:
1252 int2l = -int2l;
1253 case LSHIFT_EXPR:
1254 /* It's unclear from the C standard whether shifts can overflow.
1255 The following code ignores overflow; perhaps a C standard
1256 interpretation ruling is needed. */
1257 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1258 &low, &hi, !uns);
1259 no_overflow = 1;
1260 break;
1262 case RROTATE_EXPR:
1263 int2l = - int2l;
1264 case LROTATE_EXPR:
1265 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1266 &low, &hi);
1267 break;
1269 case PLUS_EXPR:
1270 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1271 break;
1273 case MINUS_EXPR:
1274 neg_double (int2l, int2h, &low, &hi);
1275 add_double (int1l, int1h, low, hi, &low, &hi);
1276 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1277 break;
1279 case MULT_EXPR:
1280 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1281 break;
1283 case TRUNC_DIV_EXPR:
1284 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1285 case EXACT_DIV_EXPR:
1286 /* This is a shortcut for a common special case. */
1287 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1288 && ! TREE_CONSTANT_OVERFLOW (arg1)
1289 && ! TREE_CONSTANT_OVERFLOW (arg2)
1290 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1292 if (code == CEIL_DIV_EXPR)
1293 int1l += int2l - 1;
1295 low = int1l / int2l, hi = 0;
1296 break;
1299 /* ... fall through ... */
1301 case ROUND_DIV_EXPR:
1302 if (int2h == 0 && int2l == 1)
1304 low = int1l, hi = int1h;
1305 break;
1307 if (int1l == int2l && int1h == int2h
1308 && ! (int1l == 0 && int1h == 0))
1310 low = 1, hi = 0;
1311 break;
1313 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1314 &low, &hi, &garbagel, &garbageh);
1315 break;
1317 case TRUNC_MOD_EXPR:
1318 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1319 /* This is a shortcut for a common special case. */
1320 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1321 && ! TREE_CONSTANT_OVERFLOW (arg1)
1322 && ! TREE_CONSTANT_OVERFLOW (arg2)
1323 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1325 if (code == CEIL_MOD_EXPR)
1326 int1l += int2l - 1;
1327 low = int1l % int2l, hi = 0;
1328 break;
1331 /* ... fall through ... */
1333 case ROUND_MOD_EXPR:
1334 overflow = div_and_round_double (code, uns,
1335 int1l, int1h, int2l, int2h,
1336 &garbagel, &garbageh, &low, &hi);
1337 break;
1339 case MIN_EXPR:
1340 case MAX_EXPR:
1341 if (uns)
1342 low = (((unsigned HOST_WIDE_INT) int1h
1343 < (unsigned HOST_WIDE_INT) int2h)
1344 || (((unsigned HOST_WIDE_INT) int1h
1345 == (unsigned HOST_WIDE_INT) int2h)
1346 && int1l < int2l));
1347 else
1348 low = (int1h < int2h
1349 || (int1h == int2h && int1l < int2l));
1351 if (low == (code == MIN_EXPR))
1352 low = int1l, hi = int1h;
1353 else
1354 low = int2l, hi = int2h;
1355 break;
1357 default:
1358 abort ();
1361 /* If this is for a sizetype, can be represented as one (signed)
1362 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1363 constants. */
1364 if (is_sizetype
1365 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1366 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1367 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1368 return size_int_type_wide (low, type);
1369 else
1371 t = build_int_2 (low, hi);
1372 TREE_TYPE (t) = TREE_TYPE (arg1);
1375 TREE_OVERFLOW (t)
1376 = ((notrunc
1377 ? (!uns || is_sizetype) && overflow
1378 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1379 && ! no_overflow))
1380 | TREE_OVERFLOW (arg1)
1381 | TREE_OVERFLOW (arg2));
1383 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1384 So check if force_fit_type truncated the value. */
1385 if (is_sizetype
1386 && ! TREE_OVERFLOW (t)
1387 && (TREE_INT_CST_HIGH (t) != hi
1388 || TREE_INT_CST_LOW (t) != low))
1389 TREE_OVERFLOW (t) = 1;
1391 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1392 | TREE_CONSTANT_OVERFLOW (arg1)
1393 | TREE_CONSTANT_OVERFLOW (arg2));
1394 return t;
1397 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1398 constant. We assume ARG1 and ARG2 have the same data type, or at least
1399 are the same kind of constant and the same machine mode.
1401 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1403 static tree
1404 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1406 STRIP_NOPS (arg1);
1407 STRIP_NOPS (arg2);
1409 if (TREE_CODE (arg1) == INTEGER_CST)
1410 return int_const_binop (code, arg1, arg2, notrunc);
1412 if (TREE_CODE (arg1) == REAL_CST)
1414 enum machine_mode mode;
1415 REAL_VALUE_TYPE d1;
1416 REAL_VALUE_TYPE d2;
1417 REAL_VALUE_TYPE value;
1418 tree t, type;
1420 d1 = TREE_REAL_CST (arg1);
1421 d2 = TREE_REAL_CST (arg2);
1423 type = TREE_TYPE (arg1);
1424 mode = TYPE_MODE (type);
1426 /* Don't perform operation if we honor signaling NaNs and
1427 either operand is a NaN. */
1428 if (HONOR_SNANS (mode)
1429 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1430 return NULL_TREE;
1432 /* Don't perform operation if it would raise a division
1433 by zero exception. */
1434 if (code == RDIV_EXPR
1435 && REAL_VALUES_EQUAL (d2, dconst0)
1436 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1437 return NULL_TREE;
1439 /* If either operand is a NaN, just return it. Otherwise, set up
1440 for floating-point trap; we return an overflow. */
1441 if (REAL_VALUE_ISNAN (d1))
1442 return arg1;
1443 else if (REAL_VALUE_ISNAN (d2))
1444 return arg2;
1446 REAL_ARITHMETIC (value, code, d1, d2);
1448 t = build_real (type, real_value_truncate (mode, value));
1450 TREE_OVERFLOW (t)
1451 = (force_fit_type (t, 0)
1452 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1453 TREE_CONSTANT_OVERFLOW (t)
1454 = TREE_OVERFLOW (t)
1455 | TREE_CONSTANT_OVERFLOW (arg1)
1456 | TREE_CONSTANT_OVERFLOW (arg2);
1457 return t;
1459 if (TREE_CODE (arg1) == COMPLEX_CST)
1461 tree type = TREE_TYPE (arg1);
1462 tree r1 = TREE_REALPART (arg1);
1463 tree i1 = TREE_IMAGPART (arg1);
1464 tree r2 = TREE_REALPART (arg2);
1465 tree i2 = TREE_IMAGPART (arg2);
1466 tree t;
1468 switch (code)
1470 case PLUS_EXPR:
1471 t = build_complex (type,
1472 const_binop (PLUS_EXPR, r1, r2, notrunc),
1473 const_binop (PLUS_EXPR, i1, i2, notrunc));
1474 break;
1476 case MINUS_EXPR:
1477 t = build_complex (type,
1478 const_binop (MINUS_EXPR, r1, r2, notrunc),
1479 const_binop (MINUS_EXPR, i1, i2, notrunc));
1480 break;
1482 case MULT_EXPR:
1483 t = build_complex (type,
1484 const_binop (MINUS_EXPR,
1485 const_binop (MULT_EXPR,
1486 r1, r2, notrunc),
1487 const_binop (MULT_EXPR,
1488 i1, i2, notrunc),
1489 notrunc),
1490 const_binop (PLUS_EXPR,
1491 const_binop (MULT_EXPR,
1492 r1, i2, notrunc),
1493 const_binop (MULT_EXPR,
1494 i1, r2, notrunc),
1495 notrunc));
1496 break;
1498 case RDIV_EXPR:
1500 tree magsquared
1501 = const_binop (PLUS_EXPR,
1502 const_binop (MULT_EXPR, r2, r2, notrunc),
1503 const_binop (MULT_EXPR, i2, i2, notrunc),
1504 notrunc);
1506 t = build_complex (type,
1507 const_binop
1508 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1509 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1510 const_binop (PLUS_EXPR,
1511 const_binop (MULT_EXPR, r1, r2,
1512 notrunc),
1513 const_binop (MULT_EXPR, i1, i2,
1514 notrunc),
1515 notrunc),
1516 magsquared, notrunc),
1517 const_binop
1518 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1519 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1520 const_binop (MINUS_EXPR,
1521 const_binop (MULT_EXPR, i1, r2,
1522 notrunc),
1523 const_binop (MULT_EXPR, r1, i2,
1524 notrunc),
1525 notrunc),
1526 magsquared, notrunc));
1528 break;
1530 default:
1531 abort ();
1533 return t;
1535 return 0;
1538 /* These are the hash table functions for the hash table of INTEGER_CST
1539 nodes of a sizetype. */
1541 /* Return the hash code code X, an INTEGER_CST. */
1543 static hashval_t
1544 size_htab_hash (const void *x)
1546 tree t = (tree) x;
1548 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1549 ^ htab_hash_pointer (TREE_TYPE (t))
1550 ^ (TREE_OVERFLOW (t) << 20));
1553 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1554 is the same as that given by *Y, which is the same. */
1556 static int
1557 size_htab_eq (const void *x, const void *y)
1559 tree xt = (tree) x;
1560 tree yt = (tree) y;
1562 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1563 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1564 && TREE_TYPE (xt) == TREE_TYPE (yt)
1565 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1568 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1569 bits are given by NUMBER and of the sizetype represented by KIND. */
1571 tree
1572 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1574 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1577 /* Likewise, but the desired type is specified explicitly. */
1579 static GTY (()) tree new_const;
1580 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1581 htab_t size_htab;
1583 tree
1584 size_int_type_wide (HOST_WIDE_INT number, tree type)
1586 void **slot;
1588 if (size_htab == 0)
1590 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1591 new_const = make_node (INTEGER_CST);
1594 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1595 hash table, we return the value from the hash table. Otherwise, we
1596 place that in the hash table and make a new node for the next time. */
1597 TREE_INT_CST_LOW (new_const) = number;
1598 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1599 TREE_TYPE (new_const) = type;
1600 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1601 = force_fit_type (new_const, 0);
1603 slot = htab_find_slot (size_htab, new_const, INSERT);
1604 if (*slot == 0)
1606 tree t = new_const;
1608 *slot = new_const;
1609 new_const = make_node (INTEGER_CST);
1610 return t;
1612 else
1613 return (tree) *slot;
1616 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1617 is a tree code. The type of the result is taken from the operands.
1618 Both must be the same type integer type and it must be a size type.
1619 If the operands are constant, so is the result. */
1621 tree
1622 size_binop (enum tree_code code, tree arg0, tree arg1)
1624 tree type = TREE_TYPE (arg0);
1626 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1627 || type != TREE_TYPE (arg1))
1628 abort ();
1630 /* Handle the special case of two integer constants faster. */
1631 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1633 /* And some specific cases even faster than that. */
1634 if (code == PLUS_EXPR && integer_zerop (arg0))
1635 return arg1;
1636 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1637 && integer_zerop (arg1))
1638 return arg0;
1639 else if (code == MULT_EXPR && integer_onep (arg0))
1640 return arg1;
1642 /* Handle general case of two integer constants. */
1643 return int_const_binop (code, arg0, arg1, 0);
1646 if (arg0 == error_mark_node || arg1 == error_mark_node)
1647 return error_mark_node;
1649 return fold (build (code, type, arg0, arg1));
1652 /* Given two values, either both of sizetype or both of bitsizetype,
1653 compute the difference between the two values. Return the value
1654 in signed type corresponding to the type of the operands. */
1656 tree
1657 size_diffop (tree arg0, tree arg1)
1659 tree type = TREE_TYPE (arg0);
1660 tree ctype;
1662 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1663 || type != TREE_TYPE (arg1))
1664 abort ();
1666 /* If the type is already signed, just do the simple thing. */
1667 if (! TREE_UNSIGNED (type))
1668 return size_binop (MINUS_EXPR, arg0, arg1);
1670 ctype = (type == bitsizetype || type == ubitsizetype
1671 ? sbitsizetype : ssizetype);
1673 /* If either operand is not a constant, do the conversions to the signed
1674 type and subtract. The hardware will do the right thing with any
1675 overflow in the subtraction. */
1676 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1677 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1678 fold_convert (ctype, arg1));
1680 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1681 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1682 overflow) and negate (which can't either). Special-case a result
1683 of zero while we're here. */
1684 if (tree_int_cst_equal (arg0, arg1))
1685 return fold_convert (ctype, integer_zero_node);
1686 else if (tree_int_cst_lt (arg1, arg0))
1687 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1688 else
1689 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1690 fold_convert (ctype, size_binop (MINUS_EXPR,
1691 arg1, arg0)));
1695 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1696 type TYPE. If no simplification can be done return NULL_TREE. */
1698 static tree
1699 fold_convert_const (enum tree_code code, tree type, tree arg1)
1701 int overflow = 0;
1702 tree t;
1704 if (TREE_TYPE (arg1) == type)
1705 return arg1;
1707 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1709 if (TREE_CODE (arg1) == INTEGER_CST)
1711 /* If we would build a constant wider than GCC supports,
1712 leave the conversion unfolded. */
1713 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1714 return NULL_TREE;
1716 /* If we are trying to make a sizetype for a small integer, use
1717 size_int to pick up cached types to reduce duplicate nodes. */
1718 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1719 && !TREE_CONSTANT_OVERFLOW (arg1)
1720 && compare_tree_int (arg1, 10000) < 0)
1721 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1723 /* Given an integer constant, make new constant with new type,
1724 appropriately sign-extended or truncated. */
1725 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1726 TREE_INT_CST_HIGH (arg1));
1727 TREE_TYPE (t) = type;
1728 /* Indicate an overflow if (1) ARG1 already overflowed,
1729 or (2) force_fit_type indicates an overflow.
1730 Tell force_fit_type that an overflow has already occurred
1731 if ARG1 is a too-large unsigned value and T is signed.
1732 But don't indicate an overflow if converting a pointer. */
1733 TREE_OVERFLOW (t)
1734 = ((force_fit_type (t,
1735 (TREE_INT_CST_HIGH (arg1) < 0
1736 && (TREE_UNSIGNED (type)
1737 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1738 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1739 || TREE_OVERFLOW (arg1));
1740 TREE_CONSTANT_OVERFLOW (t)
1741 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1742 return t;
1744 else if (TREE_CODE (arg1) == REAL_CST)
1746 /* The following code implements the floating point to integer
1747 conversion rules required by the Java Language Specification,
1748 that IEEE NaNs are mapped to zero and values that overflow
1749 the target precision saturate, i.e. values greater than
1750 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1751 are mapped to INT_MIN. These semantics are allowed by the
1752 C and C++ standards that simply state that the behavior of
1753 FP-to-integer conversion is unspecified upon overflow. */
1755 HOST_WIDE_INT high, low;
1757 REAL_VALUE_TYPE r;
1758 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1760 switch (code)
1762 case FIX_TRUNC_EXPR:
1763 real_trunc (&r, VOIDmode, &x);
1764 break;
1766 case FIX_CEIL_EXPR:
1767 real_ceil (&r, VOIDmode, &x);
1768 break;
1770 case FIX_FLOOR_EXPR:
1771 real_floor (&r, VOIDmode, &x);
1772 break;
1774 default:
1775 abort ();
1778 /* If R is NaN, return zero and show we have an overflow. */
1779 if (REAL_VALUE_ISNAN (r))
1781 overflow = 1;
1782 high = 0;
1783 low = 0;
1786 /* See if R is less than the lower bound or greater than the
1787 upper bound. */
1789 if (! overflow)
1791 tree lt = TYPE_MIN_VALUE (type);
1792 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1793 if (REAL_VALUES_LESS (r, l))
1795 overflow = 1;
1796 high = TREE_INT_CST_HIGH (lt);
1797 low = TREE_INT_CST_LOW (lt);
1801 if (! overflow)
1803 tree ut = TYPE_MAX_VALUE (type);
1804 if (ut)
1806 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1807 if (REAL_VALUES_LESS (u, r))
1809 overflow = 1;
1810 high = TREE_INT_CST_HIGH (ut);
1811 low = TREE_INT_CST_LOW (ut);
1816 if (! overflow)
1817 REAL_VALUE_TO_INT (&low, &high, r);
1819 t = build_int_2 (low, high);
1820 TREE_TYPE (t) = type;
1821 TREE_OVERFLOW (t)
1822 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1823 TREE_CONSTANT_OVERFLOW (t)
1824 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1825 return t;
1828 else if (TREE_CODE (type) == REAL_TYPE)
1830 if (TREE_CODE (arg1) == INTEGER_CST)
1831 return build_real_from_int_cst (type, arg1);
1832 if (TREE_CODE (arg1) == REAL_CST)
1834 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1836 /* We make a copy of ARG1 so that we don't modify an
1837 existing constant tree. */
1838 t = copy_node (arg1);
1839 TREE_TYPE (t) = type;
1840 return t;
1843 t = build_real (type,
1844 real_value_truncate (TYPE_MODE (type),
1845 TREE_REAL_CST (arg1)));
1847 TREE_OVERFLOW (t)
1848 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1849 TREE_CONSTANT_OVERFLOW (t)
1850 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1851 return t;
1854 return NULL_TREE;
1857 /* Convert expression ARG to type TYPE. Used by the middle-end for
1858 simple conversions in preference to calling the front-end's convert. */
1860 static tree
1861 fold_convert (tree type, tree arg)
1863 tree orig = TREE_TYPE (arg);
1864 tree tem;
1866 if (type == orig)
1867 return arg;
1869 if (TREE_CODE (arg) == ERROR_MARK
1870 || TREE_CODE (type) == ERROR_MARK
1871 || TREE_CODE (orig) == ERROR_MARK)
1872 return error_mark_node;
1874 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1875 return fold (build1 (NOP_EXPR, type, arg));
1877 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1879 if (TREE_CODE (arg) == INTEGER_CST)
1881 tem = fold_convert_const (NOP_EXPR, type, arg);
1882 if (tem != NULL_TREE)
1883 return tem;
1885 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1886 return fold (build1 (NOP_EXPR, type, arg));
1887 if (TREE_CODE (orig) == COMPLEX_TYPE)
1889 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1890 return fold_convert (type, tem);
1892 if (TREE_CODE (orig) == VECTOR_TYPE
1893 && GET_MODE_SIZE (TYPE_MODE (type))
1894 == GET_MODE_SIZE (TYPE_MODE (orig)))
1895 return fold (build1 (NOP_EXPR, type, arg));
1897 else if (TREE_CODE (type) == REAL_TYPE)
1899 if (TREE_CODE (arg) == INTEGER_CST)
1901 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1902 if (tem != NULL_TREE)
1903 return tem;
1905 else if (TREE_CODE (arg) == REAL_CST)
1907 tem = fold_convert_const (NOP_EXPR, type, arg);
1908 if (tem != NULL_TREE)
1909 return tem;
1912 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1913 return fold (build1 (FLOAT_EXPR, type, arg));
1914 if (TREE_CODE (orig) == REAL_TYPE)
1915 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1916 type, arg));
1917 if (TREE_CODE (orig) == COMPLEX_TYPE)
1919 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1920 return fold_convert (type, tem);
1923 else if (TREE_CODE (type) == COMPLEX_TYPE)
1925 if (INTEGRAL_TYPE_P (orig)
1926 || POINTER_TYPE_P (orig)
1927 || TREE_CODE (orig) == REAL_TYPE)
1928 return build (COMPLEX_EXPR, type,
1929 fold_convert (TREE_TYPE (type), arg),
1930 fold_convert (TREE_TYPE (type), integer_zero_node));
1931 if (TREE_CODE (orig) == COMPLEX_TYPE)
1933 tree rpart, ipart;
1935 if (TREE_CODE (arg) == COMPLEX_EXPR)
1937 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1938 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1939 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1942 arg = save_expr (arg);
1943 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1944 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1945 rpart = fold_convert (TREE_TYPE (type), rpart);
1946 ipart = fold_convert (TREE_TYPE (type), ipart);
1947 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1950 else if (TREE_CODE (type) == VECTOR_TYPE)
1952 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1953 && GET_MODE_SIZE (TYPE_MODE (type))
1954 == GET_MODE_SIZE (TYPE_MODE (orig)))
1955 return fold (build1 (NOP_EXPR, type, arg));
1956 if (TREE_CODE (orig) == VECTOR_TYPE
1957 && GET_MODE_SIZE (TYPE_MODE (type))
1958 == GET_MODE_SIZE (TYPE_MODE (orig)))
1959 return fold (build1 (NOP_EXPR, type, arg));
1961 else if (VOID_TYPE_P (type))
1962 return fold (build1 (CONVERT_EXPR, type, arg));
1963 abort ();
1966 /* Return an expr equal to X but certainly not valid as an lvalue. */
1968 tree
1969 non_lvalue (tree x)
1971 tree result;
1973 /* These things are certainly not lvalues. */
1974 if (TREE_CODE (x) == NON_LVALUE_EXPR
1975 || TREE_CODE (x) == INTEGER_CST
1976 || TREE_CODE (x) == REAL_CST
1977 || TREE_CODE (x) == STRING_CST
1978 || TREE_CODE (x) == ADDR_EXPR)
1979 return x;
1981 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1982 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1983 return result;
1986 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1987 Zero means allow extended lvalues. */
1989 int pedantic_lvalues;
1991 /* When pedantic, return an expr equal to X but certainly not valid as a
1992 pedantic lvalue. Otherwise, return X. */
1994 tree
1995 pedantic_non_lvalue (tree x)
1997 if (pedantic_lvalues)
1998 return non_lvalue (x);
1999 else
2000 return x;
2003 /* Given a tree comparison code, return the code that is the logical inverse
2004 of the given code. It is not safe to do this for floating-point
2005 comparisons, except for NE_EXPR and EQ_EXPR. */
2007 static enum tree_code
2008 invert_tree_comparison (enum tree_code code)
2010 switch (code)
2012 case EQ_EXPR:
2013 return NE_EXPR;
2014 case NE_EXPR:
2015 return EQ_EXPR;
2016 case GT_EXPR:
2017 return LE_EXPR;
2018 case GE_EXPR:
2019 return LT_EXPR;
2020 case LT_EXPR:
2021 return GE_EXPR;
2022 case LE_EXPR:
2023 return GT_EXPR;
2024 default:
2025 abort ();
2029 /* Similar, but return the comparison that results if the operands are
2030 swapped. This is safe for floating-point. */
2032 static enum tree_code
2033 swap_tree_comparison (enum tree_code code)
2035 switch (code)
2037 case EQ_EXPR:
2038 case NE_EXPR:
2039 return code;
2040 case GT_EXPR:
2041 return LT_EXPR;
2042 case GE_EXPR:
2043 return LE_EXPR;
2044 case LT_EXPR:
2045 return GT_EXPR;
2046 case LE_EXPR:
2047 return GE_EXPR;
2048 default:
2049 abort ();
2054 /* Convert a comparison tree code from an enum tree_code representation
2055 into a compcode bit-based encoding. This function is the inverse of
2056 compcode_to_comparison. */
2058 static int
2059 comparison_to_compcode (enum tree_code code)
2061 switch (code)
2063 case LT_EXPR:
2064 return COMPCODE_LT;
2065 case EQ_EXPR:
2066 return COMPCODE_EQ;
2067 case LE_EXPR:
2068 return COMPCODE_LE;
2069 case GT_EXPR:
2070 return COMPCODE_GT;
2071 case NE_EXPR:
2072 return COMPCODE_NE;
2073 case GE_EXPR:
2074 return COMPCODE_GE;
2075 default:
2076 abort ();
2080 /* Convert a compcode bit-based encoding of a comparison operator back
2081 to GCC's enum tree_code representation. This function is the
2082 inverse of comparison_to_compcode. */
2084 static enum tree_code
2085 compcode_to_comparison (int code)
2087 switch (code)
2089 case COMPCODE_LT:
2090 return LT_EXPR;
2091 case COMPCODE_EQ:
2092 return EQ_EXPR;
2093 case COMPCODE_LE:
2094 return LE_EXPR;
2095 case COMPCODE_GT:
2096 return GT_EXPR;
2097 case COMPCODE_NE:
2098 return NE_EXPR;
2099 case COMPCODE_GE:
2100 return GE_EXPR;
2101 default:
2102 abort ();
2106 /* Return nonzero if CODE is a tree code that represents a truth value. */
2108 static int
2109 truth_value_p (enum tree_code code)
2111 return (TREE_CODE_CLASS (code) == '<'
2112 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2113 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2114 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2117 /* Return nonzero if two operands (typically of the same tree node)
2118 are necessarily equal. If either argument has side-effects this
2119 function returns zero.
2121 If ONLY_CONST is nonzero, only return nonzero for constants.
2122 This function tests whether the operands are indistinguishable;
2123 it does not test whether they are equal using C's == operation.
2124 The distinction is important for IEEE floating point, because
2125 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2126 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2128 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
2129 even though it may hold multiple values during a function.
2130 This is because a GCC tree node guarantees that nothing else is
2131 executed between the evaluation of its "operands" (which may often
2132 be evaluated in arbitrary order). Hence if the operands themselves
2133 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2134 same value in each operand/subexpression. Hence a zero value for
2135 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2136 If comparing arbitrary expression trees, such as from different
2137 statements, ONLY_CONST must usually be nonzero. */
2140 operand_equal_p (tree arg0, tree arg1, int only_const)
2142 tree fndecl;
2144 /* If both types don't have the same signedness, then we can't consider
2145 them equal. We must check this before the STRIP_NOPS calls
2146 because they may change the signedness of the arguments. */
2147 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2148 return 0;
2150 STRIP_NOPS (arg0);
2151 STRIP_NOPS (arg1);
2153 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2154 /* This is needed for conversions and for COMPONENT_REF.
2155 Might as well play it safe and always test this. */
2156 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2157 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2158 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2159 return 0;
2161 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2162 We don't care about side effects in that case because the SAVE_EXPR
2163 takes care of that for us. In all other cases, two expressions are
2164 equal if they have no side effects. If we have two identical
2165 expressions with side effects that should be treated the same due
2166 to the only side effects being identical SAVE_EXPR's, that will
2167 be detected in the recursive calls below. */
2168 if (arg0 == arg1 && ! only_const
2169 && (TREE_CODE (arg0) == SAVE_EXPR
2170 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2171 return 1;
2173 /* Next handle constant cases, those for which we can return 1 even
2174 if ONLY_CONST is set. */
2175 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2176 switch (TREE_CODE (arg0))
2178 case INTEGER_CST:
2179 return (! TREE_CONSTANT_OVERFLOW (arg0)
2180 && ! TREE_CONSTANT_OVERFLOW (arg1)
2181 && tree_int_cst_equal (arg0, arg1));
2183 case REAL_CST:
2184 return (! TREE_CONSTANT_OVERFLOW (arg0)
2185 && ! TREE_CONSTANT_OVERFLOW (arg1)
2186 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2187 TREE_REAL_CST (arg1)));
2189 case VECTOR_CST:
2191 tree v1, v2;
2193 if (TREE_CONSTANT_OVERFLOW (arg0)
2194 || TREE_CONSTANT_OVERFLOW (arg1))
2195 return 0;
2197 v1 = TREE_VECTOR_CST_ELTS (arg0);
2198 v2 = TREE_VECTOR_CST_ELTS (arg1);
2199 while (v1 && v2)
2201 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2202 only_const))
2203 return 0;
2204 v1 = TREE_CHAIN (v1);
2205 v2 = TREE_CHAIN (v2);
2208 return 1;
2211 case COMPLEX_CST:
2212 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2213 only_const)
2214 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2215 only_const));
2217 case STRING_CST:
2218 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2219 && ! memcmp (TREE_STRING_POINTER (arg0),
2220 TREE_STRING_POINTER (arg1),
2221 TREE_STRING_LENGTH (arg0)));
2223 case ADDR_EXPR:
2224 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2226 default:
2227 break;
2230 if (only_const)
2231 return 0;
2233 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2235 case '1':
2236 /* Two conversions are equal only if signedness and modes match. */
2237 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2238 && (TREE_UNSIGNED (TREE_TYPE (arg0))
2239 != TREE_UNSIGNED (TREE_TYPE (arg1))))
2240 return 0;
2242 return operand_equal_p (TREE_OPERAND (arg0, 0),
2243 TREE_OPERAND (arg1, 0), 0);
2245 case '<':
2246 case '2':
2247 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2248 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2250 return 1;
2252 /* For commutative ops, allow the other order. */
2253 return (commutative_tree_code (TREE_CODE (arg0))
2254 && operand_equal_p (TREE_OPERAND (arg0, 0),
2255 TREE_OPERAND (arg1, 1), 0)
2256 && operand_equal_p (TREE_OPERAND (arg0, 1),
2257 TREE_OPERAND (arg1, 0), 0));
2259 case 'r':
2260 /* If either of the pointer (or reference) expressions we are
2261 dereferencing contain a side effect, these cannot be equal. */
2262 if (TREE_SIDE_EFFECTS (arg0)
2263 || TREE_SIDE_EFFECTS (arg1))
2264 return 0;
2266 switch (TREE_CODE (arg0))
2268 case INDIRECT_REF:
2269 return operand_equal_p (TREE_OPERAND (arg0, 0),
2270 TREE_OPERAND (arg1, 0), 0);
2272 case COMPONENT_REF:
2273 case ARRAY_REF:
2274 case ARRAY_RANGE_REF:
2275 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2276 TREE_OPERAND (arg1, 0), 0)
2277 && operand_equal_p (TREE_OPERAND (arg0, 1),
2278 TREE_OPERAND (arg1, 1), 0));
2280 case BIT_FIELD_REF:
2281 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2282 TREE_OPERAND (arg1, 0), 0)
2283 && operand_equal_p (TREE_OPERAND (arg0, 1),
2284 TREE_OPERAND (arg1, 1), 0)
2285 && operand_equal_p (TREE_OPERAND (arg0, 2),
2286 TREE_OPERAND (arg1, 2), 0));
2287 default:
2288 return 0;
2291 case 'e':
2292 switch (TREE_CODE (arg0))
2294 case ADDR_EXPR:
2295 case TRUTH_NOT_EXPR:
2296 return operand_equal_p (TREE_OPERAND (arg0, 0),
2297 TREE_OPERAND (arg1, 0), 0);
2299 case RTL_EXPR:
2300 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2302 case CALL_EXPR:
2303 /* If the CALL_EXPRs call different functions, then they
2304 clearly can not be equal. */
2305 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2306 TREE_OPERAND (arg1, 0), 0))
2307 return 0;
2309 /* Only consider const functions equivalent. */
2310 fndecl = get_callee_fndecl (arg0);
2311 if (fndecl == NULL_TREE
2312 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2313 return 0;
2315 /* Now see if all the arguments are the same. operand_equal_p
2316 does not handle TREE_LIST, so we walk the operands here
2317 feeding them to operand_equal_p. */
2318 arg0 = TREE_OPERAND (arg0, 1);
2319 arg1 = TREE_OPERAND (arg1, 1);
2320 while (arg0 && arg1)
2322 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2323 return 0;
2325 arg0 = TREE_CHAIN (arg0);
2326 arg1 = TREE_CHAIN (arg1);
2329 /* If we get here and both argument lists are exhausted
2330 then the CALL_EXPRs are equal. */
2331 return ! (arg0 || arg1);
2333 default:
2334 return 0;
2337 case 'd':
2338 /* Consider __builtin_sqrt equal to sqrt. */
2339 return TREE_CODE (arg0) == FUNCTION_DECL
2340 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2341 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2342 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2344 default:
2345 return 0;
2349 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2350 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2352 When in doubt, return 0. */
2354 static int
2355 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2357 int unsignedp1, unsignedpo;
2358 tree primarg0, primarg1, primother;
2359 unsigned int correct_width;
2361 if (operand_equal_p (arg0, arg1, 0))
2362 return 1;
2364 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2365 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2366 return 0;
2368 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2369 and see if the inner values are the same. This removes any
2370 signedness comparison, which doesn't matter here. */
2371 primarg0 = arg0, primarg1 = arg1;
2372 STRIP_NOPS (primarg0);
2373 STRIP_NOPS (primarg1);
2374 if (operand_equal_p (primarg0, primarg1, 0))
2375 return 1;
2377 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2378 actual comparison operand, ARG0.
2380 First throw away any conversions to wider types
2381 already present in the operands. */
2383 primarg1 = get_narrower (arg1, &unsignedp1);
2384 primother = get_narrower (other, &unsignedpo);
2386 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2387 if (unsignedp1 == unsignedpo
2388 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2389 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2391 tree type = TREE_TYPE (arg0);
2393 /* Make sure shorter operand is extended the right way
2394 to match the longer operand. */
2395 primarg1 = fold_convert ((*lang_hooks.types.signed_or_unsigned_type)
2396 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2398 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2399 return 1;
2402 return 0;
2405 /* See if ARG is an expression that is either a comparison or is performing
2406 arithmetic on comparisons. The comparisons must only be comparing
2407 two different values, which will be stored in *CVAL1 and *CVAL2; if
2408 they are nonzero it means that some operands have already been found.
2409 No variables may be used anywhere else in the expression except in the
2410 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2411 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2413 If this is true, return 1. Otherwise, return zero. */
2415 static int
2416 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2418 enum tree_code code = TREE_CODE (arg);
2419 char class = TREE_CODE_CLASS (code);
2421 /* We can handle some of the 'e' cases here. */
2422 if (class == 'e' && code == TRUTH_NOT_EXPR)
2423 class = '1';
2424 else if (class == 'e'
2425 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2426 || code == COMPOUND_EXPR))
2427 class = '2';
2429 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2430 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2432 /* If we've already found a CVAL1 or CVAL2, this expression is
2433 two complex to handle. */
2434 if (*cval1 || *cval2)
2435 return 0;
2437 class = '1';
2438 *save_p = 1;
2441 switch (class)
2443 case '1':
2444 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2446 case '2':
2447 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2448 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2449 cval1, cval2, save_p));
2451 case 'c':
2452 return 1;
2454 case 'e':
2455 if (code == COND_EXPR)
2456 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2457 cval1, cval2, save_p)
2458 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2459 cval1, cval2, save_p)
2460 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2461 cval1, cval2, save_p));
2462 return 0;
2464 case '<':
2465 /* First see if we can handle the first operand, then the second. For
2466 the second operand, we know *CVAL1 can't be zero. It must be that
2467 one side of the comparison is each of the values; test for the
2468 case where this isn't true by failing if the two operands
2469 are the same. */
2471 if (operand_equal_p (TREE_OPERAND (arg, 0),
2472 TREE_OPERAND (arg, 1), 0))
2473 return 0;
2475 if (*cval1 == 0)
2476 *cval1 = TREE_OPERAND (arg, 0);
2477 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2479 else if (*cval2 == 0)
2480 *cval2 = TREE_OPERAND (arg, 0);
2481 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2483 else
2484 return 0;
2486 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2488 else if (*cval2 == 0)
2489 *cval2 = TREE_OPERAND (arg, 1);
2490 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2492 else
2493 return 0;
2495 return 1;
2497 default:
2498 return 0;
2502 /* ARG is a tree that is known to contain just arithmetic operations and
2503 comparisons. Evaluate the operations in the tree substituting NEW0 for
2504 any occurrence of OLD0 as an operand of a comparison and likewise for
2505 NEW1 and OLD1. */
2507 static tree
2508 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2510 tree type = TREE_TYPE (arg);
2511 enum tree_code code = TREE_CODE (arg);
2512 char class = TREE_CODE_CLASS (code);
2514 /* We can handle some of the 'e' cases here. */
2515 if (class == 'e' && code == TRUTH_NOT_EXPR)
2516 class = '1';
2517 else if (class == 'e'
2518 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2519 class = '2';
2521 switch (class)
2523 case '1':
2524 return fold (build1 (code, type,
2525 eval_subst (TREE_OPERAND (arg, 0),
2526 old0, new0, old1, new1)));
2528 case '2':
2529 return fold (build (code, type,
2530 eval_subst (TREE_OPERAND (arg, 0),
2531 old0, new0, old1, new1),
2532 eval_subst (TREE_OPERAND (arg, 1),
2533 old0, new0, old1, new1)));
2535 case 'e':
2536 switch (code)
2538 case SAVE_EXPR:
2539 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2541 case COMPOUND_EXPR:
2542 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2544 case COND_EXPR:
2545 return fold (build (code, type,
2546 eval_subst (TREE_OPERAND (arg, 0),
2547 old0, new0, old1, new1),
2548 eval_subst (TREE_OPERAND (arg, 1),
2549 old0, new0, old1, new1),
2550 eval_subst (TREE_OPERAND (arg, 2),
2551 old0, new0, old1, new1)));
2552 default:
2553 break;
2555 /* Fall through - ??? */
2557 case '<':
2559 tree arg0 = TREE_OPERAND (arg, 0);
2560 tree arg1 = TREE_OPERAND (arg, 1);
2562 /* We need to check both for exact equality and tree equality. The
2563 former will be true if the operand has a side-effect. In that
2564 case, we know the operand occurred exactly once. */
2566 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2567 arg0 = new0;
2568 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2569 arg0 = new1;
2571 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2572 arg1 = new0;
2573 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2574 arg1 = new1;
2576 return fold (build (code, type, arg0, arg1));
2579 default:
2580 return arg;
2584 /* Return a tree for the case when the result of an expression is RESULT
2585 converted to TYPE and OMITTED was previously an operand of the expression
2586 but is now not needed (e.g., we folded OMITTED * 0).
2588 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2589 the conversion of RESULT to TYPE. */
2591 tree
2592 omit_one_operand (tree type, tree result, tree omitted)
2594 tree t = fold_convert (type, result);
2596 if (TREE_SIDE_EFFECTS (omitted))
2597 return build (COMPOUND_EXPR, type, omitted, t);
2599 return non_lvalue (t);
2602 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2604 static tree
2605 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2607 tree t = fold_convert (type, result);
2609 if (TREE_SIDE_EFFECTS (omitted))
2610 return build (COMPOUND_EXPR, type, omitted, t);
2612 return pedantic_non_lvalue (t);
2615 /* Return a simplified tree node for the truth-negation of ARG. This
2616 never alters ARG itself. We assume that ARG is an operation that
2617 returns a truth value (0 or 1). */
2619 tree
2620 invert_truthvalue (tree arg)
2622 tree type = TREE_TYPE (arg);
2623 enum tree_code code = TREE_CODE (arg);
2625 if (code == ERROR_MARK)
2626 return arg;
2628 /* If this is a comparison, we can simply invert it, except for
2629 floating-point non-equality comparisons, in which case we just
2630 enclose a TRUTH_NOT_EXPR around what we have. */
2632 if (TREE_CODE_CLASS (code) == '<')
2634 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2635 && !flag_unsafe_math_optimizations
2636 && code != NE_EXPR
2637 && code != EQ_EXPR)
2638 return build1 (TRUTH_NOT_EXPR, type, arg);
2639 else if (code == UNORDERED_EXPR
2640 || code == ORDERED_EXPR
2641 || code == UNEQ_EXPR
2642 || code == UNLT_EXPR
2643 || code == UNLE_EXPR
2644 || code == UNGT_EXPR
2645 || code == UNGE_EXPR)
2646 return build1 (TRUTH_NOT_EXPR, type, arg);
2647 else
2648 return build (invert_tree_comparison (code), type,
2649 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2652 switch (code)
2654 case INTEGER_CST:
2655 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2657 case TRUTH_AND_EXPR:
2658 return build (TRUTH_OR_EXPR, type,
2659 invert_truthvalue (TREE_OPERAND (arg, 0)),
2660 invert_truthvalue (TREE_OPERAND (arg, 1)));
2662 case TRUTH_OR_EXPR:
2663 return build (TRUTH_AND_EXPR, type,
2664 invert_truthvalue (TREE_OPERAND (arg, 0)),
2665 invert_truthvalue (TREE_OPERAND (arg, 1)));
2667 case TRUTH_XOR_EXPR:
2668 /* Here we can invert either operand. We invert the first operand
2669 unless the second operand is a TRUTH_NOT_EXPR in which case our
2670 result is the XOR of the first operand with the inside of the
2671 negation of the second operand. */
2673 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2674 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2675 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2676 else
2677 return build (TRUTH_XOR_EXPR, type,
2678 invert_truthvalue (TREE_OPERAND (arg, 0)),
2679 TREE_OPERAND (arg, 1));
2681 case TRUTH_ANDIF_EXPR:
2682 return build (TRUTH_ORIF_EXPR, type,
2683 invert_truthvalue (TREE_OPERAND (arg, 0)),
2684 invert_truthvalue (TREE_OPERAND (arg, 1)));
2686 case TRUTH_ORIF_EXPR:
2687 return build (TRUTH_ANDIF_EXPR, type,
2688 invert_truthvalue (TREE_OPERAND (arg, 0)),
2689 invert_truthvalue (TREE_OPERAND (arg, 1)));
2691 case TRUTH_NOT_EXPR:
2692 return TREE_OPERAND (arg, 0);
2694 case COND_EXPR:
2695 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2696 invert_truthvalue (TREE_OPERAND (arg, 1)),
2697 invert_truthvalue (TREE_OPERAND (arg, 2)));
2699 case COMPOUND_EXPR:
2700 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2701 invert_truthvalue (TREE_OPERAND (arg, 1)));
2703 case WITH_RECORD_EXPR:
2704 return build (WITH_RECORD_EXPR, type,
2705 invert_truthvalue (TREE_OPERAND (arg, 0)),
2706 TREE_OPERAND (arg, 1));
2708 case NON_LVALUE_EXPR:
2709 return invert_truthvalue (TREE_OPERAND (arg, 0));
2711 case NOP_EXPR:
2712 case CONVERT_EXPR:
2713 case FLOAT_EXPR:
2714 return build1 (TREE_CODE (arg), type,
2715 invert_truthvalue (TREE_OPERAND (arg, 0)));
2717 case BIT_AND_EXPR:
2718 if (!integer_onep (TREE_OPERAND (arg, 1)))
2719 break;
2720 return build (EQ_EXPR, type, arg,
2721 fold_convert (type, integer_zero_node));
2723 case SAVE_EXPR:
2724 return build1 (TRUTH_NOT_EXPR, type, arg);
2726 case CLEANUP_POINT_EXPR:
2727 return build1 (CLEANUP_POINT_EXPR, type,
2728 invert_truthvalue (TREE_OPERAND (arg, 0)));
2730 default:
2731 break;
2733 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2734 abort ();
2735 return build1 (TRUTH_NOT_EXPR, type, arg);
2738 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2739 operands are another bit-wise operation with a common input. If so,
2740 distribute the bit operations to save an operation and possibly two if
2741 constants are involved. For example, convert
2742 (A | B) & (A | C) into A | (B & C)
2743 Further simplification will occur if B and C are constants.
2745 If this optimization cannot be done, 0 will be returned. */
2747 static tree
2748 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2750 tree common;
2751 tree left, right;
2753 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2754 || TREE_CODE (arg0) == code
2755 || (TREE_CODE (arg0) != BIT_AND_EXPR
2756 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2757 return 0;
2759 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2761 common = TREE_OPERAND (arg0, 0);
2762 left = TREE_OPERAND (arg0, 1);
2763 right = TREE_OPERAND (arg1, 1);
2765 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2767 common = TREE_OPERAND (arg0, 0);
2768 left = TREE_OPERAND (arg0, 1);
2769 right = TREE_OPERAND (arg1, 0);
2771 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2773 common = TREE_OPERAND (arg0, 1);
2774 left = TREE_OPERAND (arg0, 0);
2775 right = TREE_OPERAND (arg1, 1);
2777 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2779 common = TREE_OPERAND (arg0, 1);
2780 left = TREE_OPERAND (arg0, 0);
2781 right = TREE_OPERAND (arg1, 0);
2783 else
2784 return 0;
2786 return fold (build (TREE_CODE (arg0), type, common,
2787 fold (build (code, type, left, right))));
2790 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2791 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2793 static tree
2794 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2795 int unsignedp)
2797 tree result = build (BIT_FIELD_REF, type, inner,
2798 size_int (bitsize), bitsize_int (bitpos));
2800 TREE_UNSIGNED (result) = unsignedp;
2802 return result;
2805 /* Optimize a bit-field compare.
2807 There are two cases: First is a compare against a constant and the
2808 second is a comparison of two items where the fields are at the same
2809 bit position relative to the start of a chunk (byte, halfword, word)
2810 large enough to contain it. In these cases we can avoid the shift
2811 implicit in bitfield extractions.
2813 For constants, we emit a compare of the shifted constant with the
2814 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2815 compared. For two fields at the same position, we do the ANDs with the
2816 similar mask and compare the result of the ANDs.
2818 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2819 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2820 are the left and right operands of the comparison, respectively.
2822 If the optimization described above can be done, we return the resulting
2823 tree. Otherwise we return zero. */
2825 static tree
2826 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2827 tree lhs, tree rhs)
2829 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2830 tree type = TREE_TYPE (lhs);
2831 tree signed_type, unsigned_type;
2832 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2833 enum machine_mode lmode, rmode, nmode;
2834 int lunsignedp, runsignedp;
2835 int lvolatilep = 0, rvolatilep = 0;
2836 tree linner, rinner = NULL_TREE;
2837 tree mask;
2838 tree offset;
2840 /* Get all the information about the extractions being done. If the bit size
2841 if the same as the size of the underlying object, we aren't doing an
2842 extraction at all and so can do nothing. We also don't want to
2843 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2844 then will no longer be able to replace it. */
2845 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2846 &lunsignedp, &lvolatilep);
2847 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2848 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2849 return 0;
2851 if (!const_p)
2853 /* If this is not a constant, we can only do something if bit positions,
2854 sizes, and signedness are the same. */
2855 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2856 &runsignedp, &rvolatilep);
2858 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2859 || lunsignedp != runsignedp || offset != 0
2860 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2861 return 0;
2864 /* See if we can find a mode to refer to this field. We should be able to,
2865 but fail if we can't. */
2866 nmode = get_best_mode (lbitsize, lbitpos,
2867 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2868 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2869 TYPE_ALIGN (TREE_TYPE (rinner))),
2870 word_mode, lvolatilep || rvolatilep);
2871 if (nmode == VOIDmode)
2872 return 0;
2874 /* Set signed and unsigned types of the precision of this mode for the
2875 shifts below. */
2876 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2877 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2879 /* Compute the bit position and size for the new reference and our offset
2880 within it. If the new reference is the same size as the original, we
2881 won't optimize anything, so return zero. */
2882 nbitsize = GET_MODE_BITSIZE (nmode);
2883 nbitpos = lbitpos & ~ (nbitsize - 1);
2884 lbitpos -= nbitpos;
2885 if (nbitsize == lbitsize)
2886 return 0;
2888 if (BYTES_BIG_ENDIAN)
2889 lbitpos = nbitsize - lbitsize - lbitpos;
2891 /* Make the mask to be used against the extracted field. */
2892 mask = build_int_2 (~0, ~0);
2893 TREE_TYPE (mask) = unsigned_type;
2894 force_fit_type (mask, 0);
2895 mask = fold_convert (unsigned_type, mask);
2896 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2897 mask = const_binop (RSHIFT_EXPR, mask,
2898 size_int (nbitsize - lbitsize - lbitpos), 0);
2900 if (! const_p)
2901 /* If not comparing with constant, just rework the comparison
2902 and return. */
2903 return build (code, compare_type,
2904 build (BIT_AND_EXPR, unsigned_type,
2905 make_bit_field_ref (linner, unsigned_type,
2906 nbitsize, nbitpos, 1),
2907 mask),
2908 build (BIT_AND_EXPR, unsigned_type,
2909 make_bit_field_ref (rinner, unsigned_type,
2910 nbitsize, nbitpos, 1),
2911 mask));
2913 /* Otherwise, we are handling the constant case. See if the constant is too
2914 big for the field. Warn and return a tree of for 0 (false) if so. We do
2915 this not only for its own sake, but to avoid having to test for this
2916 error case below. If we didn't, we might generate wrong code.
2918 For unsigned fields, the constant shifted right by the field length should
2919 be all zero. For signed fields, the high-order bits should agree with
2920 the sign bit. */
2922 if (lunsignedp)
2924 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2925 fold_convert (unsigned_type, rhs),
2926 size_int (lbitsize), 0)))
2928 warning ("comparison is always %d due to width of bit-field",
2929 code == NE_EXPR);
2930 return fold_convert (compare_type,
2931 (code == NE_EXPR
2932 ? integer_one_node : integer_zero_node));
2935 else
2937 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
2938 size_int (lbitsize - 1), 0);
2939 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2941 warning ("comparison is always %d due to width of bit-field",
2942 code == NE_EXPR);
2943 return fold_convert (compare_type,
2944 (code == NE_EXPR
2945 ? integer_one_node : integer_zero_node));
2949 /* Single-bit compares should always be against zero. */
2950 if (lbitsize == 1 && ! integer_zerop (rhs))
2952 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2953 rhs = fold_convert (type, integer_zero_node);
2956 /* Make a new bitfield reference, shift the constant over the
2957 appropriate number of bits and mask it with the computed mask
2958 (in case this was a signed field). If we changed it, make a new one. */
2959 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2960 if (lvolatilep)
2962 TREE_SIDE_EFFECTS (lhs) = 1;
2963 TREE_THIS_VOLATILE (lhs) = 1;
2966 rhs = fold (const_binop (BIT_AND_EXPR,
2967 const_binop (LSHIFT_EXPR,
2968 fold_convert (unsigned_type, rhs),
2969 size_int (lbitpos), 0),
2970 mask, 0));
2972 return build (code, compare_type,
2973 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2974 rhs);
2977 /* Subroutine for fold_truthop: decode a field reference.
2979 If EXP is a comparison reference, we return the innermost reference.
2981 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2982 set to the starting bit number.
2984 If the innermost field can be completely contained in a mode-sized
2985 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2987 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2988 otherwise it is not changed.
2990 *PUNSIGNEDP is set to the signedness of the field.
2992 *PMASK is set to the mask used. This is either contained in a
2993 BIT_AND_EXPR or derived from the width of the field.
2995 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2997 Return 0 if this is not a component reference or is one that we can't
2998 do anything with. */
3000 static tree
3001 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3002 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3003 int *punsignedp, int *pvolatilep,
3004 tree *pmask, tree *pand_mask)
3006 tree outer_type = 0;
3007 tree and_mask = 0;
3008 tree mask, inner, offset;
3009 tree unsigned_type;
3010 unsigned int precision;
3012 /* All the optimizations using this function assume integer fields.
3013 There are problems with FP fields since the type_for_size call
3014 below can fail for, e.g., XFmode. */
3015 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3016 return 0;
3018 /* We are interested in the bare arrangement of bits, so strip everything
3019 that doesn't affect the machine mode. However, record the type of the
3020 outermost expression if it may matter below. */
3021 if (TREE_CODE (exp) == NOP_EXPR
3022 || TREE_CODE (exp) == CONVERT_EXPR
3023 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3024 outer_type = TREE_TYPE (exp);
3025 STRIP_NOPS (exp);
3027 if (TREE_CODE (exp) == BIT_AND_EXPR)
3029 and_mask = TREE_OPERAND (exp, 1);
3030 exp = TREE_OPERAND (exp, 0);
3031 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3032 if (TREE_CODE (and_mask) != INTEGER_CST)
3033 return 0;
3036 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3037 punsignedp, pvolatilep);
3038 if ((inner == exp && and_mask == 0)
3039 || *pbitsize < 0 || offset != 0
3040 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3041 return 0;
3043 /* If the number of bits in the reference is the same as the bitsize of
3044 the outer type, then the outer type gives the signedness. Otherwise
3045 (in case of a small bitfield) the signedness is unchanged. */
3046 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3047 *punsignedp = TREE_UNSIGNED (outer_type);
3049 /* Compute the mask to access the bitfield. */
3050 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
3051 precision = TYPE_PRECISION (unsigned_type);
3053 mask = build_int_2 (~0, ~0);
3054 TREE_TYPE (mask) = unsigned_type;
3055 force_fit_type (mask, 0);
3056 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3057 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3059 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3060 if (and_mask != 0)
3061 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3062 fold_convert (unsigned_type, and_mask), mask));
3064 *pmask = mask;
3065 *pand_mask = and_mask;
3066 return inner;
3069 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3070 bit positions. */
3072 static int
3073 all_ones_mask_p (tree mask, int size)
3075 tree type = TREE_TYPE (mask);
3076 unsigned int precision = TYPE_PRECISION (type);
3077 tree tmask;
3079 tmask = build_int_2 (~0, ~0);
3080 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
3081 force_fit_type (tmask, 0);
3082 return
3083 tree_int_cst_equal (mask,
3084 const_binop (RSHIFT_EXPR,
3085 const_binop (LSHIFT_EXPR, tmask,
3086 size_int (precision - size),
3088 size_int (precision - size), 0));
3091 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3092 represents the sign bit of EXP's type. If EXP represents a sign
3093 or zero extension, also test VAL against the unextended type.
3094 The return value is the (sub)expression whose sign bit is VAL,
3095 or NULL_TREE otherwise. */
3097 static tree
3098 sign_bit_p (tree exp, tree val)
3100 unsigned HOST_WIDE_INT mask_lo, lo;
3101 HOST_WIDE_INT mask_hi, hi;
3102 int width;
3103 tree t;
3105 /* Tree EXP must have an integral type. */
3106 t = TREE_TYPE (exp);
3107 if (! INTEGRAL_TYPE_P (t))
3108 return NULL_TREE;
3110 /* Tree VAL must be an integer constant. */
3111 if (TREE_CODE (val) != INTEGER_CST
3112 || TREE_CONSTANT_OVERFLOW (val))
3113 return NULL_TREE;
3115 width = TYPE_PRECISION (t);
3116 if (width > HOST_BITS_PER_WIDE_INT)
3118 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3119 lo = 0;
3121 mask_hi = ((unsigned HOST_WIDE_INT) -1
3122 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3123 mask_lo = -1;
3125 else
3127 hi = 0;
3128 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3130 mask_hi = 0;
3131 mask_lo = ((unsigned HOST_WIDE_INT) -1
3132 >> (HOST_BITS_PER_WIDE_INT - width));
3135 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3136 treat VAL as if it were unsigned. */
3137 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3138 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3139 return exp;
3141 /* Handle extension from a narrower type. */
3142 if (TREE_CODE (exp) == NOP_EXPR
3143 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3144 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3146 return NULL_TREE;
3149 /* Subroutine for fold_truthop: determine if an operand is simple enough
3150 to be evaluated unconditionally. */
3152 static int
3153 simple_operand_p (tree exp)
3155 /* Strip any conversions that don't change the machine mode. */
3156 while ((TREE_CODE (exp) == NOP_EXPR
3157 || TREE_CODE (exp) == CONVERT_EXPR)
3158 && (TYPE_MODE (TREE_TYPE (exp))
3159 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3160 exp = TREE_OPERAND (exp, 0);
3162 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3163 || (DECL_P (exp)
3164 && ! TREE_ADDRESSABLE (exp)
3165 && ! TREE_THIS_VOLATILE (exp)
3166 && ! DECL_NONLOCAL (exp)
3167 /* Don't regard global variables as simple. They may be
3168 allocated in ways unknown to the compiler (shared memory,
3169 #pragma weak, etc). */
3170 && ! TREE_PUBLIC (exp)
3171 && ! DECL_EXTERNAL (exp)
3172 /* Loading a static variable is unduly expensive, but global
3173 registers aren't expensive. */
3174 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3177 /* The following functions are subroutines to fold_range_test and allow it to
3178 try to change a logical combination of comparisons into a range test.
3180 For example, both
3181 X == 2 || X == 3 || X == 4 || X == 5
3183 X >= 2 && X <= 5
3184 are converted to
3185 (unsigned) (X - 2) <= 3
3187 We describe each set of comparisons as being either inside or outside
3188 a range, using a variable named like IN_P, and then describe the
3189 range with a lower and upper bound. If one of the bounds is omitted,
3190 it represents either the highest or lowest value of the type.
3192 In the comments below, we represent a range by two numbers in brackets
3193 preceded by a "+" to designate being inside that range, or a "-" to
3194 designate being outside that range, so the condition can be inverted by
3195 flipping the prefix. An omitted bound is represented by a "-". For
3196 example, "- [-, 10]" means being outside the range starting at the lowest
3197 possible value and ending at 10, in other words, being greater than 10.
3198 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3199 always false.
3201 We set up things so that the missing bounds are handled in a consistent
3202 manner so neither a missing bound nor "true" and "false" need to be
3203 handled using a special case. */
3205 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3206 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3207 and UPPER1_P are nonzero if the respective argument is an upper bound
3208 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3209 must be specified for a comparison. ARG1 will be converted to ARG0's
3210 type if both are specified. */
3212 static tree
3213 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3214 tree arg1, int upper1_p)
3216 tree tem;
3217 int result;
3218 int sgn0, sgn1;
3220 /* If neither arg represents infinity, do the normal operation.
3221 Else, if not a comparison, return infinity. Else handle the special
3222 comparison rules. Note that most of the cases below won't occur, but
3223 are handled for consistency. */
3225 if (arg0 != 0 && arg1 != 0)
3227 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3228 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3229 STRIP_NOPS (tem);
3230 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3233 if (TREE_CODE_CLASS (code) != '<')
3234 return 0;
3236 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3237 for neither. In real maths, we cannot assume open ended ranges are
3238 the same. But, this is computer arithmetic, where numbers are finite.
3239 We can therefore make the transformation of any unbounded range with
3240 the value Z, Z being greater than any representable number. This permits
3241 us to treat unbounded ranges as equal. */
3242 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3243 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3244 switch (code)
3246 case EQ_EXPR:
3247 result = sgn0 == sgn1;
3248 break;
3249 case NE_EXPR:
3250 result = sgn0 != sgn1;
3251 break;
3252 case LT_EXPR:
3253 result = sgn0 < sgn1;
3254 break;
3255 case LE_EXPR:
3256 result = sgn0 <= sgn1;
3257 break;
3258 case GT_EXPR:
3259 result = sgn0 > sgn1;
3260 break;
3261 case GE_EXPR:
3262 result = sgn0 >= sgn1;
3263 break;
3264 default:
3265 abort ();
3268 return fold_convert (type, result ? integer_one_node : integer_zero_node);
3271 /* Given EXP, a logical expression, set the range it is testing into
3272 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3273 actually being tested. *PLOW and *PHIGH will be made of the same type
3274 as the returned expression. If EXP is not a comparison, we will most
3275 likely not be returning a useful value and range. */
3277 static tree
3278 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3280 enum tree_code code;
3281 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3282 tree orig_type = NULL_TREE;
3283 int in_p, n_in_p;
3284 tree low, high, n_low, n_high;
3286 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3287 and see if we can refine the range. Some of the cases below may not
3288 happen, but it doesn't seem worth worrying about this. We "continue"
3289 the outer loop when we've changed something; otherwise we "break"
3290 the switch, which will "break" the while. */
3292 in_p = 0;
3293 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3295 while (1)
3297 code = TREE_CODE (exp);
3299 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3301 if (first_rtl_op (code) > 0)
3302 arg0 = TREE_OPERAND (exp, 0);
3303 if (TREE_CODE_CLASS (code) == '<'
3304 || TREE_CODE_CLASS (code) == '1'
3305 || TREE_CODE_CLASS (code) == '2')
3306 type = TREE_TYPE (arg0);
3307 if (TREE_CODE_CLASS (code) == '2'
3308 || TREE_CODE_CLASS (code) == '<'
3309 || (TREE_CODE_CLASS (code) == 'e'
3310 && TREE_CODE_LENGTH (code) > 1))
3311 arg1 = TREE_OPERAND (exp, 1);
3314 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3315 lose a cast by accident. */
3316 if (type != NULL_TREE && orig_type == NULL_TREE)
3317 orig_type = type;
3319 switch (code)
3321 case TRUTH_NOT_EXPR:
3322 in_p = ! in_p, exp = arg0;
3323 continue;
3325 case EQ_EXPR: case NE_EXPR:
3326 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3327 /* We can only do something if the range is testing for zero
3328 and if the second operand is an integer constant. Note that
3329 saying something is "in" the range we make is done by
3330 complementing IN_P since it will set in the initial case of
3331 being not equal to zero; "out" is leaving it alone. */
3332 if (low == 0 || high == 0
3333 || ! integer_zerop (low) || ! integer_zerop (high)
3334 || TREE_CODE (arg1) != INTEGER_CST)
3335 break;
3337 switch (code)
3339 case NE_EXPR: /* - [c, c] */
3340 low = high = arg1;
3341 break;
3342 case EQ_EXPR: /* + [c, c] */
3343 in_p = ! in_p, low = high = arg1;
3344 break;
3345 case GT_EXPR: /* - [-, c] */
3346 low = 0, high = arg1;
3347 break;
3348 case GE_EXPR: /* + [c, -] */
3349 in_p = ! in_p, low = arg1, high = 0;
3350 break;
3351 case LT_EXPR: /* - [c, -] */
3352 low = arg1, high = 0;
3353 break;
3354 case LE_EXPR: /* + [-, c] */
3355 in_p = ! in_p, low = 0, high = arg1;
3356 break;
3357 default:
3358 abort ();
3361 exp = arg0;
3363 /* If this is an unsigned comparison, we also know that EXP is
3364 greater than or equal to zero. We base the range tests we make
3365 on that fact, so we record it here so we can parse existing
3366 range tests. */
3367 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3369 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3370 1, fold_convert (type, integer_zero_node),
3371 NULL_TREE))
3372 break;
3374 in_p = n_in_p, low = n_low, high = n_high;
3376 /* If the high bound is missing, but we have a nonzero low
3377 bound, reverse the range so it goes from zero to the low bound
3378 minus 1. */
3379 if (high == 0 && low && ! integer_zerop (low))
3381 in_p = ! in_p;
3382 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3383 integer_one_node, 0);
3384 low = fold_convert (type, integer_zero_node);
3387 continue;
3389 case NEGATE_EXPR:
3390 /* (-x) IN [a,b] -> x in [-b, -a] */
3391 n_low = range_binop (MINUS_EXPR, type,
3392 fold_convert (type, integer_zero_node),
3393 0, high, 1);
3394 n_high = range_binop (MINUS_EXPR, type,
3395 fold_convert (type, integer_zero_node),
3396 0, low, 0);
3397 low = n_low, high = n_high;
3398 exp = arg0;
3399 continue;
3401 case BIT_NOT_EXPR:
3402 /* ~ X -> -X - 1 */
3403 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3404 fold_convert (type, integer_one_node));
3405 continue;
3407 case PLUS_EXPR: case MINUS_EXPR:
3408 if (TREE_CODE (arg1) != INTEGER_CST)
3409 break;
3411 /* If EXP is signed, any overflow in the computation is undefined,
3412 so we don't worry about it so long as our computations on
3413 the bounds don't overflow. For unsigned, overflow is defined
3414 and this is exactly the right thing. */
3415 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3416 type, low, 0, arg1, 0);
3417 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3418 type, high, 1, arg1, 0);
3419 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3420 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3421 break;
3423 /* Check for an unsigned range which has wrapped around the maximum
3424 value thus making n_high < n_low, and normalize it. */
3425 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3427 low = range_binop (PLUS_EXPR, type, n_high, 0,
3428 integer_one_node, 0);
3429 high = range_binop (MINUS_EXPR, type, n_low, 0,
3430 integer_one_node, 0);
3432 /* If the range is of the form +/- [ x+1, x ], we won't
3433 be able to normalize it. But then, it represents the
3434 whole range or the empty set, so make it
3435 +/- [ -, - ]. */
3436 if (tree_int_cst_equal (n_low, low)
3437 && tree_int_cst_equal (n_high, high))
3438 low = high = 0;
3439 else
3440 in_p = ! in_p;
3442 else
3443 low = n_low, high = n_high;
3445 exp = arg0;
3446 continue;
3448 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3449 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3450 break;
3452 if (! INTEGRAL_TYPE_P (type)
3453 || (low != 0 && ! int_fits_type_p (low, type))
3454 || (high != 0 && ! int_fits_type_p (high, type)))
3455 break;
3457 n_low = low, n_high = high;
3459 if (n_low != 0)
3460 n_low = fold_convert (type, n_low);
3462 if (n_high != 0)
3463 n_high = fold_convert (type, n_high);
3465 /* If we're converting from an unsigned to a signed type,
3466 we will be doing the comparison as unsigned. The tests above
3467 have already verified that LOW and HIGH are both positive.
3469 So we have to make sure that the original unsigned value will
3470 be interpreted as positive. */
3471 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3473 tree equiv_type = (*lang_hooks.types.type_for_mode)
3474 (TYPE_MODE (type), 1);
3475 tree high_positive;
3477 /* A range without an upper bound is, naturally, unbounded.
3478 Since convert would have cropped a very large value, use
3479 the max value for the destination type. */
3480 high_positive
3481 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3482 : TYPE_MAX_VALUE (type);
3484 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3485 high_positive = fold (build (RSHIFT_EXPR, type,
3486 fold_convert (type,
3487 high_positive),
3488 fold_convert (type,
3489 integer_one_node)));
3491 /* If the low bound is specified, "and" the range with the
3492 range for which the original unsigned value will be
3493 positive. */
3494 if (low != 0)
3496 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3497 1, n_low, n_high, 1,
3498 fold_convert (type, integer_zero_node),
3499 high_positive))
3500 break;
3502 in_p = (n_in_p == in_p);
3504 else
3506 /* Otherwise, "or" the range with the range of the input
3507 that will be interpreted as negative. */
3508 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3509 0, n_low, n_high, 1,
3510 fold_convert (type, integer_zero_node),
3511 high_positive))
3512 break;
3514 in_p = (in_p != n_in_p);
3518 exp = arg0;
3519 low = n_low, high = n_high;
3520 continue;
3522 default:
3523 break;
3526 break;
3529 /* If EXP is a constant, we can evaluate whether this is true or false. */
3530 if (TREE_CODE (exp) == INTEGER_CST)
3532 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3533 exp, 0, low, 0))
3534 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3535 exp, 1, high, 1)));
3536 low = high = 0;
3537 exp = 0;
3540 *pin_p = in_p, *plow = low, *phigh = high;
3541 return exp;
3544 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3545 type, TYPE, return an expression to test if EXP is in (or out of, depending
3546 on IN_P) the range. */
3548 static tree
3549 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3551 tree etype = TREE_TYPE (exp);
3552 tree value;
3554 if (! in_p
3555 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3556 return invert_truthvalue (value);
3558 if (low == 0 && high == 0)
3559 return fold_convert (type, integer_one_node);
3561 if (low == 0)
3562 return fold (build (LE_EXPR, type, exp, high));
3564 if (high == 0)
3565 return fold (build (GE_EXPR, type, exp, low));
3567 if (operand_equal_p (low, high, 0))
3568 return fold (build (EQ_EXPR, type, exp, low));
3570 if (integer_zerop (low))
3572 if (! TREE_UNSIGNED (etype))
3574 etype = (*lang_hooks.types.unsigned_type) (etype);
3575 high = fold_convert (etype, high);
3576 exp = fold_convert (etype, exp);
3578 return build_range_check (type, exp, 1, 0, high);
3581 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3582 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3584 unsigned HOST_WIDE_INT lo;
3585 HOST_WIDE_INT hi;
3586 int prec;
3588 prec = TYPE_PRECISION (etype);
3589 if (prec <= HOST_BITS_PER_WIDE_INT)
3591 hi = 0;
3592 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3594 else
3596 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3597 lo = (unsigned HOST_WIDE_INT) -1;
3600 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3602 if (TREE_UNSIGNED (etype))
3604 etype = (*lang_hooks.types.signed_type) (etype);
3605 exp = fold_convert (etype, exp);
3607 return fold (build (GT_EXPR, type, exp,
3608 fold_convert (etype, integer_zero_node)));
3612 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3613 && ! TREE_OVERFLOW (value))
3614 return build_range_check (type,
3615 fold (build (MINUS_EXPR, etype, exp, low)),
3616 1, fold_convert (etype, integer_zero_node),
3617 value);
3619 return 0;
3622 /* Given two ranges, see if we can merge them into one. Return 1 if we
3623 can, 0 if we can't. Set the output range into the specified parameters. */
3625 static int
3626 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3627 tree high0, int in1_p, tree low1, tree high1)
3629 int no_overlap;
3630 int subset;
3631 int temp;
3632 tree tem;
3633 int in_p;
3634 tree low, high;
3635 int lowequal = ((low0 == 0 && low1 == 0)
3636 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3637 low0, 0, low1, 0)));
3638 int highequal = ((high0 == 0 && high1 == 0)
3639 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3640 high0, 1, high1, 1)));
3642 /* Make range 0 be the range that starts first, or ends last if they
3643 start at the same value. Swap them if it isn't. */
3644 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3645 low0, 0, low1, 0))
3646 || (lowequal
3647 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3648 high1, 1, high0, 1))))
3650 temp = in0_p, in0_p = in1_p, in1_p = temp;
3651 tem = low0, low0 = low1, low1 = tem;
3652 tem = high0, high0 = high1, high1 = tem;
3655 /* Now flag two cases, whether the ranges are disjoint or whether the
3656 second range is totally subsumed in the first. Note that the tests
3657 below are simplified by the ones above. */
3658 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3659 high0, 1, low1, 0));
3660 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3661 high1, 1, high0, 1));
3663 /* We now have four cases, depending on whether we are including or
3664 excluding the two ranges. */
3665 if (in0_p && in1_p)
3667 /* If they don't overlap, the result is false. If the second range
3668 is a subset it is the result. Otherwise, the range is from the start
3669 of the second to the end of the first. */
3670 if (no_overlap)
3671 in_p = 0, low = high = 0;
3672 else if (subset)
3673 in_p = 1, low = low1, high = high1;
3674 else
3675 in_p = 1, low = low1, high = high0;
3678 else if (in0_p && ! in1_p)
3680 /* If they don't overlap, the result is the first range. If they are
3681 equal, the result is false. If the second range is a subset of the
3682 first, and the ranges begin at the same place, we go from just after
3683 the end of the first range to the end of the second. If the second
3684 range is not a subset of the first, or if it is a subset and both
3685 ranges end at the same place, the range starts at the start of the
3686 first range and ends just before the second range.
3687 Otherwise, we can't describe this as a single range. */
3688 if (no_overlap)
3689 in_p = 1, low = low0, high = high0;
3690 else if (lowequal && highequal)
3691 in_p = 0, low = high = 0;
3692 else if (subset && lowequal)
3694 in_p = 1, high = high0;
3695 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3696 integer_one_node, 0);
3698 else if (! subset || highequal)
3700 in_p = 1, low = low0;
3701 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3702 integer_one_node, 0);
3704 else
3705 return 0;
3708 else if (! in0_p && in1_p)
3710 /* If they don't overlap, the result is the second range. If the second
3711 is a subset of the first, the result is false. Otherwise,
3712 the range starts just after the first range and ends at the
3713 end of the second. */
3714 if (no_overlap)
3715 in_p = 1, low = low1, high = high1;
3716 else if (subset || highequal)
3717 in_p = 0, low = high = 0;
3718 else
3720 in_p = 1, high = high1;
3721 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3722 integer_one_node, 0);
3726 else
3728 /* The case where we are excluding both ranges. Here the complex case
3729 is if they don't overlap. In that case, the only time we have a
3730 range is if they are adjacent. If the second is a subset of the
3731 first, the result is the first. Otherwise, the range to exclude
3732 starts at the beginning of the first range and ends at the end of the
3733 second. */
3734 if (no_overlap)
3736 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3737 range_binop (PLUS_EXPR, NULL_TREE,
3738 high0, 1,
3739 integer_one_node, 1),
3740 1, low1, 0)))
3741 in_p = 0, low = low0, high = high1;
3742 else
3743 return 0;
3745 else if (subset)
3746 in_p = 0, low = low0, high = high0;
3747 else
3748 in_p = 0, low = low0, high = high1;
3751 *pin_p = in_p, *plow = low, *phigh = high;
3752 return 1;
3755 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3756 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3757 #endif
3759 /* EXP is some logical combination of boolean tests. See if we can
3760 merge it into some range test. Return the new tree if so. */
3762 static tree
3763 fold_range_test (tree exp)
3765 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3766 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3767 int in0_p, in1_p, in_p;
3768 tree low0, low1, low, high0, high1, high;
3769 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3770 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3771 tree tem;
3773 /* If this is an OR operation, invert both sides; we will invert
3774 again at the end. */
3775 if (or_op)
3776 in0_p = ! in0_p, in1_p = ! in1_p;
3778 /* If both expressions are the same, if we can merge the ranges, and we
3779 can build the range test, return it or it inverted. If one of the
3780 ranges is always true or always false, consider it to be the same
3781 expression as the other. */
3782 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3783 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3784 in1_p, low1, high1)
3785 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3786 lhs != 0 ? lhs
3787 : rhs != 0 ? rhs : integer_zero_node,
3788 in_p, low, high))))
3789 return or_op ? invert_truthvalue (tem) : tem;
3791 /* On machines where the branch cost is expensive, if this is a
3792 short-circuited branch and the underlying object on both sides
3793 is the same, make a non-short-circuit operation. */
3794 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3795 && lhs != 0 && rhs != 0
3796 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3797 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3798 && operand_equal_p (lhs, rhs, 0))
3800 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3801 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3802 which cases we can't do this. */
3803 if (simple_operand_p (lhs))
3804 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3805 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3806 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3807 TREE_OPERAND (exp, 1));
3809 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3810 && ! CONTAINS_PLACEHOLDER_P (lhs))
3812 tree common = save_expr (lhs);
3814 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3815 or_op ? ! in0_p : in0_p,
3816 low0, high0))
3817 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3818 or_op ? ! in1_p : in1_p,
3819 low1, high1))))
3820 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3821 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3822 TREE_TYPE (exp), lhs, rhs);
3826 return 0;
3829 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3830 bit value. Arrange things so the extra bits will be set to zero if and
3831 only if C is signed-extended to its full width. If MASK is nonzero,
3832 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3834 static tree
3835 unextend (tree c, int p, int unsignedp, tree mask)
3837 tree type = TREE_TYPE (c);
3838 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3839 tree temp;
3841 if (p == modesize || unsignedp)
3842 return c;
3844 /* We work by getting just the sign bit into the low-order bit, then
3845 into the high-order bit, then sign-extend. We then XOR that value
3846 with C. */
3847 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3848 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3850 /* We must use a signed type in order to get an arithmetic right shift.
3851 However, we must also avoid introducing accidental overflows, so that
3852 a subsequent call to integer_zerop will work. Hence we must
3853 do the type conversion here. At this point, the constant is either
3854 zero or one, and the conversion to a signed type can never overflow.
3855 We could get an overflow if this conversion is done anywhere else. */
3856 if (TREE_UNSIGNED (type))
3857 temp = fold_convert ((*lang_hooks.types.signed_type) (type), temp);
3859 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3860 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3861 if (mask != 0)
3862 temp = const_binop (BIT_AND_EXPR, temp,
3863 fold_convert (TREE_TYPE (c), mask), 0);
3864 /* If necessary, convert the type back to match the type of C. */
3865 if (TREE_UNSIGNED (type))
3866 temp = fold_convert (type, temp);
3868 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3871 /* Find ways of folding logical expressions of LHS and RHS:
3872 Try to merge two comparisons to the same innermost item.
3873 Look for range tests like "ch >= '0' && ch <= '9'".
3874 Look for combinations of simple terms on machines with expensive branches
3875 and evaluate the RHS unconditionally.
3877 For example, if we have p->a == 2 && p->b == 4 and we can make an
3878 object large enough to span both A and B, we can do this with a comparison
3879 against the object ANDed with the a mask.
3881 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3882 operations to do this with one comparison.
3884 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3885 function and the one above.
3887 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3888 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3890 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3891 two operands.
3893 We return the simplified tree or 0 if no optimization is possible. */
3895 static tree
3896 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3898 /* If this is the "or" of two comparisons, we can do something if
3899 the comparisons are NE_EXPR. If this is the "and", we can do something
3900 if the comparisons are EQ_EXPR. I.e.,
3901 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3903 WANTED_CODE is this operation code. For single bit fields, we can
3904 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3905 comparison for one-bit fields. */
3907 enum tree_code wanted_code;
3908 enum tree_code lcode, rcode;
3909 tree ll_arg, lr_arg, rl_arg, rr_arg;
3910 tree ll_inner, lr_inner, rl_inner, rr_inner;
3911 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3912 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3913 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3914 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3915 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3916 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3917 enum machine_mode lnmode, rnmode;
3918 tree ll_mask, lr_mask, rl_mask, rr_mask;
3919 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3920 tree l_const, r_const;
3921 tree lntype, rntype, result;
3922 int first_bit, end_bit;
3923 int volatilep;
3925 /* Start by getting the comparison codes. Fail if anything is volatile.
3926 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3927 it were surrounded with a NE_EXPR. */
3929 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3930 return 0;
3932 lcode = TREE_CODE (lhs);
3933 rcode = TREE_CODE (rhs);
3935 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3936 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3938 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3939 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3941 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3942 return 0;
3944 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3945 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3947 ll_arg = TREE_OPERAND (lhs, 0);
3948 lr_arg = TREE_OPERAND (lhs, 1);
3949 rl_arg = TREE_OPERAND (rhs, 0);
3950 rr_arg = TREE_OPERAND (rhs, 1);
3952 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3953 if (simple_operand_p (ll_arg)
3954 && simple_operand_p (lr_arg)
3955 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3957 int compcode;
3959 if (operand_equal_p (ll_arg, rl_arg, 0)
3960 && operand_equal_p (lr_arg, rr_arg, 0))
3962 int lcompcode, rcompcode;
3964 lcompcode = comparison_to_compcode (lcode);
3965 rcompcode = comparison_to_compcode (rcode);
3966 compcode = (code == TRUTH_AND_EXPR)
3967 ? lcompcode & rcompcode
3968 : lcompcode | rcompcode;
3970 else if (operand_equal_p (ll_arg, rr_arg, 0)
3971 && operand_equal_p (lr_arg, rl_arg, 0))
3973 int lcompcode, rcompcode;
3975 rcode = swap_tree_comparison (rcode);
3976 lcompcode = comparison_to_compcode (lcode);
3977 rcompcode = comparison_to_compcode (rcode);
3978 compcode = (code == TRUTH_AND_EXPR)
3979 ? lcompcode & rcompcode
3980 : lcompcode | rcompcode;
3982 else
3983 compcode = -1;
3985 if (compcode == COMPCODE_TRUE)
3986 return fold_convert (truth_type, integer_one_node);
3987 else if (compcode == COMPCODE_FALSE)
3988 return fold_convert (truth_type, integer_zero_node);
3989 else if (compcode != -1)
3990 return build (compcode_to_comparison (compcode),
3991 truth_type, ll_arg, lr_arg);
3994 /* If the RHS can be evaluated unconditionally and its operands are
3995 simple, it wins to evaluate the RHS unconditionally on machines
3996 with expensive branches. In this case, this isn't a comparison
3997 that can be merged. Avoid doing this if the RHS is a floating-point
3998 comparison since those can trap. */
4000 if (BRANCH_COST >= 2
4001 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4002 && simple_operand_p (rl_arg)
4003 && simple_operand_p (rr_arg))
4005 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4006 if (code == TRUTH_OR_EXPR
4007 && lcode == NE_EXPR && integer_zerop (lr_arg)
4008 && rcode == NE_EXPR && integer_zerop (rr_arg)
4009 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4010 return build (NE_EXPR, truth_type,
4011 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4012 ll_arg, rl_arg),
4013 integer_zero_node);
4015 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4016 if (code == TRUTH_AND_EXPR
4017 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4018 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4019 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4020 return build (EQ_EXPR, truth_type,
4021 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4022 ll_arg, rl_arg),
4023 integer_zero_node);
4025 return build (code, truth_type, lhs, rhs);
4028 /* See if the comparisons can be merged. Then get all the parameters for
4029 each side. */
4031 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4032 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4033 return 0;
4035 volatilep = 0;
4036 ll_inner = decode_field_reference (ll_arg,
4037 &ll_bitsize, &ll_bitpos, &ll_mode,
4038 &ll_unsignedp, &volatilep, &ll_mask,
4039 &ll_and_mask);
4040 lr_inner = decode_field_reference (lr_arg,
4041 &lr_bitsize, &lr_bitpos, &lr_mode,
4042 &lr_unsignedp, &volatilep, &lr_mask,
4043 &lr_and_mask);
4044 rl_inner = decode_field_reference (rl_arg,
4045 &rl_bitsize, &rl_bitpos, &rl_mode,
4046 &rl_unsignedp, &volatilep, &rl_mask,
4047 &rl_and_mask);
4048 rr_inner = decode_field_reference (rr_arg,
4049 &rr_bitsize, &rr_bitpos, &rr_mode,
4050 &rr_unsignedp, &volatilep, &rr_mask,
4051 &rr_and_mask);
4053 /* It must be true that the inner operation on the lhs of each
4054 comparison must be the same if we are to be able to do anything.
4055 Then see if we have constants. If not, the same must be true for
4056 the rhs's. */
4057 if (volatilep || ll_inner == 0 || rl_inner == 0
4058 || ! operand_equal_p (ll_inner, rl_inner, 0))
4059 return 0;
4061 if (TREE_CODE (lr_arg) == INTEGER_CST
4062 && TREE_CODE (rr_arg) == INTEGER_CST)
4063 l_const = lr_arg, r_const = rr_arg;
4064 else if (lr_inner == 0 || rr_inner == 0
4065 || ! operand_equal_p (lr_inner, rr_inner, 0))
4066 return 0;
4067 else
4068 l_const = r_const = 0;
4070 /* If either comparison code is not correct for our logical operation,
4071 fail. However, we can convert a one-bit comparison against zero into
4072 the opposite comparison against that bit being set in the field. */
4074 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4075 if (lcode != wanted_code)
4077 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4079 /* Make the left operand unsigned, since we are only interested
4080 in the value of one bit. Otherwise we are doing the wrong
4081 thing below. */
4082 ll_unsignedp = 1;
4083 l_const = ll_mask;
4085 else
4086 return 0;
4089 /* This is analogous to the code for l_const above. */
4090 if (rcode != wanted_code)
4092 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4094 rl_unsignedp = 1;
4095 r_const = rl_mask;
4097 else
4098 return 0;
4101 /* After this point all optimizations will generate bit-field
4102 references, which we might not want. */
4103 if (! (*lang_hooks.can_use_bit_fields_p) ())
4104 return 0;
4106 /* See if we can find a mode that contains both fields being compared on
4107 the left. If we can't, fail. Otherwise, update all constants and masks
4108 to be relative to a field of that size. */
4109 first_bit = MIN (ll_bitpos, rl_bitpos);
4110 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4111 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4112 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4113 volatilep);
4114 if (lnmode == VOIDmode)
4115 return 0;
4117 lnbitsize = GET_MODE_BITSIZE (lnmode);
4118 lnbitpos = first_bit & ~ (lnbitsize - 1);
4119 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
4120 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4122 if (BYTES_BIG_ENDIAN)
4124 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4125 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4128 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4129 size_int (xll_bitpos), 0);
4130 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4131 size_int (xrl_bitpos), 0);
4133 if (l_const)
4135 l_const = fold_convert (lntype, l_const);
4136 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4137 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4138 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4139 fold (build1 (BIT_NOT_EXPR,
4140 lntype, ll_mask)),
4141 0)))
4143 warning ("comparison is always %d", wanted_code == NE_EXPR);
4145 return fold_convert (truth_type,
4146 wanted_code == NE_EXPR
4147 ? integer_one_node : integer_zero_node);
4150 if (r_const)
4152 r_const = fold_convert (lntype, r_const);
4153 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4154 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4155 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4156 fold (build1 (BIT_NOT_EXPR,
4157 lntype, rl_mask)),
4158 0)))
4160 warning ("comparison is always %d", wanted_code == NE_EXPR);
4162 return fold_convert (truth_type,
4163 wanted_code == NE_EXPR
4164 ? integer_one_node : integer_zero_node);
4168 /* If the right sides are not constant, do the same for it. Also,
4169 disallow this optimization if a size or signedness mismatch occurs
4170 between the left and right sides. */
4171 if (l_const == 0)
4173 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4174 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4175 /* Make sure the two fields on the right
4176 correspond to the left without being swapped. */
4177 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4178 return 0;
4180 first_bit = MIN (lr_bitpos, rr_bitpos);
4181 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4182 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4183 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4184 volatilep);
4185 if (rnmode == VOIDmode)
4186 return 0;
4188 rnbitsize = GET_MODE_BITSIZE (rnmode);
4189 rnbitpos = first_bit & ~ (rnbitsize - 1);
4190 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
4191 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4193 if (BYTES_BIG_ENDIAN)
4195 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4196 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4199 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4200 size_int (xlr_bitpos), 0);
4201 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4202 size_int (xrr_bitpos), 0);
4204 /* Make a mask that corresponds to both fields being compared.
4205 Do this for both items being compared. If the operands are the
4206 same size and the bits being compared are in the same position
4207 then we can do this by masking both and comparing the masked
4208 results. */
4209 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4210 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4211 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4213 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4214 ll_unsignedp || rl_unsignedp);
4215 if (! all_ones_mask_p (ll_mask, lnbitsize))
4216 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4218 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4219 lr_unsignedp || rr_unsignedp);
4220 if (! all_ones_mask_p (lr_mask, rnbitsize))
4221 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4223 return build (wanted_code, truth_type, lhs, rhs);
4226 /* There is still another way we can do something: If both pairs of
4227 fields being compared are adjacent, we may be able to make a wider
4228 field containing them both.
4230 Note that we still must mask the lhs/rhs expressions. Furthermore,
4231 the mask must be shifted to account for the shift done by
4232 make_bit_field_ref. */
4233 if ((ll_bitsize + ll_bitpos == rl_bitpos
4234 && lr_bitsize + lr_bitpos == rr_bitpos)
4235 || (ll_bitpos == rl_bitpos + rl_bitsize
4236 && lr_bitpos == rr_bitpos + rr_bitsize))
4238 tree type;
4240 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4241 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4242 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4243 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4245 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4246 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4247 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4248 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4250 /* Convert to the smaller type before masking out unwanted bits. */
4251 type = lntype;
4252 if (lntype != rntype)
4254 if (lnbitsize > rnbitsize)
4256 lhs = fold_convert (rntype, lhs);
4257 ll_mask = fold_convert (rntype, ll_mask);
4258 type = rntype;
4260 else if (lnbitsize < rnbitsize)
4262 rhs = fold_convert (lntype, rhs);
4263 lr_mask = fold_convert (lntype, lr_mask);
4264 type = lntype;
4268 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4269 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4271 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4272 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4274 return build (wanted_code, truth_type, lhs, rhs);
4277 return 0;
4280 /* Handle the case of comparisons with constants. If there is something in
4281 common between the masks, those bits of the constants must be the same.
4282 If not, the condition is always false. Test for this to avoid generating
4283 incorrect code below. */
4284 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4285 if (! integer_zerop (result)
4286 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4287 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4289 if (wanted_code == NE_EXPR)
4291 warning ("`or' of unmatched not-equal tests is always 1");
4292 return fold_convert (truth_type, integer_one_node);
4294 else
4296 warning ("`and' of mutually exclusive equal-tests is always 0");
4297 return fold_convert (truth_type, integer_zero_node);
4301 /* Construct the expression we will return. First get the component
4302 reference we will make. Unless the mask is all ones the width of
4303 that field, perform the mask operation. Then compare with the
4304 merged constant. */
4305 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4306 ll_unsignedp || rl_unsignedp);
4308 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4309 if (! all_ones_mask_p (ll_mask, lnbitsize))
4310 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4312 return build (wanted_code, truth_type, result,
4313 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4316 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4317 constant. */
4319 static tree
4320 optimize_minmax_comparison (tree t)
4322 tree type = TREE_TYPE (t);
4323 tree arg0 = TREE_OPERAND (t, 0);
4324 enum tree_code op_code;
4325 tree comp_const = TREE_OPERAND (t, 1);
4326 tree minmax_const;
4327 int consts_equal, consts_lt;
4328 tree inner;
4330 STRIP_SIGN_NOPS (arg0);
4332 op_code = TREE_CODE (arg0);
4333 minmax_const = TREE_OPERAND (arg0, 1);
4334 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4335 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4336 inner = TREE_OPERAND (arg0, 0);
4338 /* If something does not permit us to optimize, return the original tree. */
4339 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4340 || TREE_CODE (comp_const) != INTEGER_CST
4341 || TREE_CONSTANT_OVERFLOW (comp_const)
4342 || TREE_CODE (minmax_const) != INTEGER_CST
4343 || TREE_CONSTANT_OVERFLOW (minmax_const))
4344 return t;
4346 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4347 and GT_EXPR, doing the rest with recursive calls using logical
4348 simplifications. */
4349 switch (TREE_CODE (t))
4351 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4352 return
4353 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4355 case GE_EXPR:
4356 return
4357 fold (build (TRUTH_ORIF_EXPR, type,
4358 optimize_minmax_comparison
4359 (build (EQ_EXPR, type, arg0, comp_const)),
4360 optimize_minmax_comparison
4361 (build (GT_EXPR, type, arg0, comp_const))));
4363 case EQ_EXPR:
4364 if (op_code == MAX_EXPR && consts_equal)
4365 /* MAX (X, 0) == 0 -> X <= 0 */
4366 return fold (build (LE_EXPR, type, inner, comp_const));
4368 else if (op_code == MAX_EXPR && consts_lt)
4369 /* MAX (X, 0) == 5 -> X == 5 */
4370 return fold (build (EQ_EXPR, type, inner, comp_const));
4372 else if (op_code == MAX_EXPR)
4373 /* MAX (X, 0) == -1 -> false */
4374 return omit_one_operand (type, integer_zero_node, inner);
4376 else if (consts_equal)
4377 /* MIN (X, 0) == 0 -> X >= 0 */
4378 return fold (build (GE_EXPR, type, inner, comp_const));
4380 else if (consts_lt)
4381 /* MIN (X, 0) == 5 -> false */
4382 return omit_one_operand (type, integer_zero_node, inner);
4384 else
4385 /* MIN (X, 0) == -1 -> X == -1 */
4386 return fold (build (EQ_EXPR, type, inner, comp_const));
4388 case GT_EXPR:
4389 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4390 /* MAX (X, 0) > 0 -> X > 0
4391 MAX (X, 0) > 5 -> X > 5 */
4392 return fold (build (GT_EXPR, type, inner, comp_const));
4394 else if (op_code == MAX_EXPR)
4395 /* MAX (X, 0) > -1 -> true */
4396 return omit_one_operand (type, integer_one_node, inner);
4398 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4399 /* MIN (X, 0) > 0 -> false
4400 MIN (X, 0) > 5 -> false */
4401 return omit_one_operand (type, integer_zero_node, inner);
4403 else
4404 /* MIN (X, 0) > -1 -> X > -1 */
4405 return fold (build (GT_EXPR, type, inner, comp_const));
4407 default:
4408 return t;
4412 /* T is an integer expression that is being multiplied, divided, or taken a
4413 modulus (CODE says which and what kind of divide or modulus) by a
4414 constant C. See if we can eliminate that operation by folding it with
4415 other operations already in T. WIDE_TYPE, if non-null, is a type that
4416 should be used for the computation if wider than our type.
4418 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4419 (X * 2) + (Y * 4). We must, however, be assured that either the original
4420 expression would not overflow or that overflow is undefined for the type
4421 in the language in question.
4423 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4424 the machine has a multiply-accumulate insn or that this is part of an
4425 addressing calculation.
4427 If we return a non-null expression, it is an equivalent form of the
4428 original computation, but need not be in the original type. */
4430 static tree
4431 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4433 /* To avoid exponential search depth, refuse to allow recursion past
4434 three levels. Beyond that (1) it's highly unlikely that we'll find
4435 something interesting and (2) we've probably processed it before
4436 when we built the inner expression. */
4438 static int depth;
4439 tree ret;
4441 if (depth > 3)
4442 return NULL;
4444 depth++;
4445 ret = extract_muldiv_1 (t, c, code, wide_type);
4446 depth--;
4448 return ret;
4451 static tree
4452 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4454 tree type = TREE_TYPE (t);
4455 enum tree_code tcode = TREE_CODE (t);
4456 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4457 > GET_MODE_SIZE (TYPE_MODE (type)))
4458 ? wide_type : type);
4459 tree t1, t2;
4460 int same_p = tcode == code;
4461 tree op0 = NULL_TREE, op1 = NULL_TREE;
4463 /* Don't deal with constants of zero here; they confuse the code below. */
4464 if (integer_zerop (c))
4465 return NULL_TREE;
4467 if (TREE_CODE_CLASS (tcode) == '1')
4468 op0 = TREE_OPERAND (t, 0);
4470 if (TREE_CODE_CLASS (tcode) == '2')
4471 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4473 /* Note that we need not handle conditional operations here since fold
4474 already handles those cases. So just do arithmetic here. */
4475 switch (tcode)
4477 case INTEGER_CST:
4478 /* For a constant, we can always simplify if we are a multiply
4479 or (for divide and modulus) if it is a multiple of our constant. */
4480 if (code == MULT_EXPR
4481 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4482 return const_binop (code, fold_convert (ctype, t),
4483 fold_convert (ctype, c), 0);
4484 break;
4486 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4487 /* If op0 is an expression ... */
4488 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4489 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4490 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4491 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4492 /* ... and is unsigned, and its type is smaller than ctype,
4493 then we cannot pass through as widening. */
4494 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4495 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4496 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4497 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4498 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4499 /* ... or its type is larger than ctype,
4500 then we cannot pass through this truncation. */
4501 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4502 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4503 /* ... or signedness changes for division or modulus,
4504 then we cannot pass through this conversion. */
4505 || (code != MULT_EXPR
4506 && (TREE_UNSIGNED (ctype)
4507 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4508 break;
4510 /* Pass the constant down and see if we can make a simplification. If
4511 we can, replace this expression with the inner simplification for
4512 possible later conversion to our or some other type. */
4513 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4514 && TREE_CODE (t2) == INTEGER_CST
4515 && ! TREE_CONSTANT_OVERFLOW (t2)
4516 && (0 != (t1 = extract_muldiv (op0, t2, code,
4517 code == MULT_EXPR
4518 ? ctype : NULL_TREE))))
4519 return t1;
4520 break;
4522 case NEGATE_EXPR: case ABS_EXPR:
4523 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4524 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4525 break;
4527 case MIN_EXPR: case MAX_EXPR:
4528 /* If widening the type changes the signedness, then we can't perform
4529 this optimization as that changes the result. */
4530 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4531 break;
4533 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4534 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4535 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4537 if (tree_int_cst_sgn (c) < 0)
4538 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4540 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4541 fold_convert (ctype, t2)));
4543 break;
4545 case WITH_RECORD_EXPR:
4546 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4547 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4548 TREE_OPERAND (t, 1));
4549 break;
4551 case LSHIFT_EXPR: case RSHIFT_EXPR:
4552 /* If the second operand is constant, this is a multiplication
4553 or floor division, by a power of two, so we can treat it that
4554 way unless the multiplier or divisor overflows. */
4555 if (TREE_CODE (op1) == INTEGER_CST
4556 /* const_binop may not detect overflow correctly,
4557 so check for it explicitly here. */
4558 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4559 && TREE_INT_CST_HIGH (op1) == 0
4560 && 0 != (t1 = fold_convert (ctype,
4561 const_binop (LSHIFT_EXPR,
4562 size_one_node,
4563 op1, 0)))
4564 && ! TREE_OVERFLOW (t1))
4565 return extract_muldiv (build (tcode == LSHIFT_EXPR
4566 ? MULT_EXPR : FLOOR_DIV_EXPR,
4567 ctype, fold_convert (ctype, op0), t1),
4568 c, code, wide_type);
4569 break;
4571 case PLUS_EXPR: case MINUS_EXPR:
4572 /* See if we can eliminate the operation on both sides. If we can, we
4573 can return a new PLUS or MINUS. If we can't, the only remaining
4574 cases where we can do anything are if the second operand is a
4575 constant. */
4576 t1 = extract_muldiv (op0, c, code, wide_type);
4577 t2 = extract_muldiv (op1, c, code, wide_type);
4578 if (t1 != 0 && t2 != 0
4579 && (code == MULT_EXPR
4580 /* If not multiplication, we can only do this if both operands
4581 are divisible by c. */
4582 || (multiple_of_p (ctype, op0, c)
4583 && multiple_of_p (ctype, op1, c))))
4584 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4585 fold_convert (ctype, t2)));
4587 /* If this was a subtraction, negate OP1 and set it to be an addition.
4588 This simplifies the logic below. */
4589 if (tcode == MINUS_EXPR)
4590 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4592 if (TREE_CODE (op1) != INTEGER_CST)
4593 break;
4595 /* If either OP1 or C are negative, this optimization is not safe for
4596 some of the division and remainder types while for others we need
4597 to change the code. */
4598 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4600 if (code == CEIL_DIV_EXPR)
4601 code = FLOOR_DIV_EXPR;
4602 else if (code == FLOOR_DIV_EXPR)
4603 code = CEIL_DIV_EXPR;
4604 else if (code != MULT_EXPR
4605 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4606 break;
4609 /* If it's a multiply or a division/modulus operation of a multiple
4610 of our constant, do the operation and verify it doesn't overflow. */
4611 if (code == MULT_EXPR
4612 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4614 op1 = const_binop (code, fold_convert (ctype, op1),
4615 fold_convert (ctype, c), 0);
4616 /* We allow the constant to overflow with wrapping semantics. */
4617 if (op1 == 0
4618 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4619 break;
4621 else
4622 break;
4624 /* If we have an unsigned type is not a sizetype, we cannot widen
4625 the operation since it will change the result if the original
4626 computation overflowed. */
4627 if (TREE_UNSIGNED (ctype)
4628 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4629 && ctype != type)
4630 break;
4632 /* If we were able to eliminate our operation from the first side,
4633 apply our operation to the second side and reform the PLUS. */
4634 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4635 return fold (build (tcode, ctype, fold_convert (ctype, t1), op1));
4637 /* The last case is if we are a multiply. In that case, we can
4638 apply the distributive law to commute the multiply and addition
4639 if the multiplication of the constants doesn't overflow. */
4640 if (code == MULT_EXPR)
4641 return fold (build (tcode, ctype,
4642 fold (build (code, ctype,
4643 fold_convert (ctype, op0),
4644 fold_convert (ctype, c))),
4645 op1));
4647 break;
4649 case MULT_EXPR:
4650 /* We have a special case here if we are doing something like
4651 (C * 8) % 4 since we know that's zero. */
4652 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4653 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4654 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4655 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4656 return omit_one_operand (type, integer_zero_node, op0);
4658 /* ... fall through ... */
4660 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4661 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4662 /* If we can extract our operation from the LHS, do so and return a
4663 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4664 do something only if the second operand is a constant. */
4665 if (same_p
4666 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4667 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4668 fold_convert (ctype, op1)));
4669 else if (tcode == MULT_EXPR && code == MULT_EXPR
4670 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4671 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4672 fold_convert (ctype, t1)));
4673 else if (TREE_CODE (op1) != INTEGER_CST)
4674 return 0;
4676 /* If these are the same operation types, we can associate them
4677 assuming no overflow. */
4678 if (tcode == code
4679 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4680 fold_convert (ctype, c), 0))
4681 && ! TREE_OVERFLOW (t1))
4682 return fold (build (tcode, ctype, fold_convert (ctype, op0), t1));
4684 /* If these operations "cancel" each other, we have the main
4685 optimizations of this pass, which occur when either constant is a
4686 multiple of the other, in which case we replace this with either an
4687 operation or CODE or TCODE.
4689 If we have an unsigned type that is not a sizetype, we cannot do
4690 this since it will change the result if the original computation
4691 overflowed. */
4692 if ((! TREE_UNSIGNED (ctype)
4693 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4694 && ! flag_wrapv
4695 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4696 || (tcode == MULT_EXPR
4697 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4698 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4700 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4701 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4702 fold_convert (ctype,
4703 const_binop (TRUNC_DIV_EXPR,
4704 op1, c, 0))));
4705 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4706 return fold (build (code, ctype, fold_convert (ctype, op0),
4707 fold_convert (ctype,
4708 const_binop (TRUNC_DIV_EXPR,
4709 c, op1, 0))));
4711 break;
4713 default:
4714 break;
4717 return 0;
4720 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4721 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4722 that we may sometimes modify the tree. */
4724 static tree
4725 strip_compound_expr (tree t, tree s)
4727 enum tree_code code = TREE_CODE (t);
4729 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4730 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4731 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4732 return TREE_OPERAND (t, 1);
4734 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4735 don't bother handling any other types. */
4736 else if (code == COND_EXPR)
4738 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4739 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4740 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4742 else if (TREE_CODE_CLASS (code) == '1')
4743 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4744 else if (TREE_CODE_CLASS (code) == '<'
4745 || TREE_CODE_CLASS (code) == '2')
4747 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4748 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4751 return t;
4754 /* Return a node which has the indicated constant VALUE (either 0 or
4755 1), and is of the indicated TYPE. */
4757 static tree
4758 constant_boolean_node (int value, tree type)
4760 if (type == integer_type_node)
4761 return value ? integer_one_node : integer_zero_node;
4762 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4763 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4764 integer_zero_node);
4765 else
4767 tree t = build_int_2 (value, 0);
4769 TREE_TYPE (t) = type;
4770 return t;
4774 /* Utility function for the following routine, to see how complex a nesting of
4775 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4776 we don't care (to avoid spending too much time on complex expressions.). */
4778 static int
4779 count_cond (tree expr, int lim)
4781 int ctrue, cfalse;
4783 if (TREE_CODE (expr) != COND_EXPR)
4784 return 0;
4785 else if (lim <= 0)
4786 return 0;
4788 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4789 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4790 return MIN (lim, 1 + ctrue + cfalse);
4793 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4794 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4795 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4796 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4797 COND is the first argument to CODE; otherwise (as in the example
4798 given here), it is the second argument. TYPE is the type of the
4799 original expression. */
4801 static tree
4802 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4803 tree cond, tree arg, int cond_first_p)
4805 tree test, true_value, false_value;
4806 tree lhs = NULL_TREE;
4807 tree rhs = NULL_TREE;
4808 /* In the end, we'll produce a COND_EXPR. Both arms of the
4809 conditional expression will be binary operations. The left-hand
4810 side of the expression to be executed if the condition is true
4811 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4812 of the expression to be executed if the condition is true will be
4813 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4814 but apply to the expression to be executed if the conditional is
4815 false. */
4816 tree *true_lhs;
4817 tree *true_rhs;
4818 tree *false_lhs;
4819 tree *false_rhs;
4820 /* These are the codes to use for the left-hand side and right-hand
4821 side of the COND_EXPR. Normally, they are the same as CODE. */
4822 enum tree_code lhs_code = code;
4823 enum tree_code rhs_code = code;
4824 /* And these are the types of the expressions. */
4825 tree lhs_type = type;
4826 tree rhs_type = type;
4827 int save = 0;
4829 if (cond_first_p)
4831 true_rhs = false_rhs = &arg;
4832 true_lhs = &true_value;
4833 false_lhs = &false_value;
4835 else
4837 true_lhs = false_lhs = &arg;
4838 true_rhs = &true_value;
4839 false_rhs = &false_value;
4842 if (TREE_CODE (cond) == COND_EXPR)
4844 test = TREE_OPERAND (cond, 0);
4845 true_value = TREE_OPERAND (cond, 1);
4846 false_value = TREE_OPERAND (cond, 2);
4847 /* If this operand throws an expression, then it does not make
4848 sense to try to perform a logical or arithmetic operation
4849 involving it. Instead of building `a + throw 3' for example,
4850 we simply build `a, throw 3'. */
4851 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4853 if (! cond_first_p)
4855 lhs_code = COMPOUND_EXPR;
4856 lhs_type = void_type_node;
4858 else
4859 lhs = true_value;
4861 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4863 if (! cond_first_p)
4865 rhs_code = COMPOUND_EXPR;
4866 rhs_type = void_type_node;
4868 else
4869 rhs = false_value;
4872 else
4874 tree testtype = TREE_TYPE (cond);
4875 test = cond;
4876 true_value = fold_convert (testtype, integer_one_node);
4877 false_value = fold_convert (testtype, integer_zero_node);
4880 /* If ARG is complex we want to make sure we only evaluate it once. Though
4881 this is only required if it is volatile, it might be more efficient even
4882 if it is not. However, if we succeed in folding one part to a constant,
4883 we do not need to make this SAVE_EXPR. Since we do this optimization
4884 primarily to see if we do end up with constant and this SAVE_EXPR
4885 interferes with later optimizations, suppressing it when we can is
4886 important.
4888 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4889 do so. Don't try to see if the result is a constant if an arm is a
4890 COND_EXPR since we get exponential behavior in that case. */
4892 if (saved_expr_p (arg))
4893 save = 1;
4894 else if (lhs == 0 && rhs == 0
4895 && !TREE_CONSTANT (arg)
4896 && (*lang_hooks.decls.global_bindings_p) () == 0
4897 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4898 || TREE_SIDE_EFFECTS (arg)))
4900 if (TREE_CODE (true_value) != COND_EXPR)
4901 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4903 if (TREE_CODE (false_value) != COND_EXPR)
4904 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4906 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4907 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4909 arg = save_expr (arg);
4910 lhs = rhs = 0;
4911 save = saved_expr_p (arg);
4915 if (lhs == 0)
4916 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4917 if (rhs == 0)
4918 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4920 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4922 /* If ARG involves a SAVE_EXPR, we need to ensure it is evaluated
4923 ahead of the COND_EXPR we made. Otherwise we would have it only
4924 evaluated in one branch, with the other branch using the result
4925 but missing the evaluation code. Beware that the save_expr call
4926 above might not return a SAVE_EXPR, so testing the TREE_CODE
4927 of ARG is not enough to decide here.  */
4928 if (save)
4929 return build (COMPOUND_EXPR, type,
4930 fold_convert (void_type_node, arg),
4931 strip_compound_expr (test, arg));
4932 else
4933 return fold_convert (type, test);
4937 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4939 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4940 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4941 ADDEND is the same as X.
4943 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4944 and finite. The problematic cases are when X is zero, and its mode
4945 has signed zeros. In the case of rounding towards -infinity,
4946 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4947 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4949 static bool
4950 fold_real_zero_addition_p (tree type, tree addend, int negate)
4952 if (!real_zerop (addend))
4953 return false;
4955 /* Don't allow the fold with -fsignaling-nans. */
4956 if (HONOR_SNANS (TYPE_MODE (type)))
4957 return false;
4959 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4960 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4961 return true;
4963 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4964 if (TREE_CODE (addend) == REAL_CST
4965 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4966 negate = !negate;
4968 /* The mode has signed zeros, and we have to honor their sign.
4969 In this situation, there is only one case we can return true for.
4970 X - 0 is the same as X unless rounding towards -infinity is
4971 supported. */
4972 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4975 /* Subroutine of fold() that checks comparisons of built-in math
4976 functions against real constants.
4978 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4979 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4980 is the type of the result and ARG0 and ARG1 are the operands of the
4981 comparison. ARG1 must be a TREE_REAL_CST.
4983 The function returns the constant folded tree if a simplification
4984 can be made, and NULL_TREE otherwise. */
4986 static tree
4987 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4988 tree type, tree arg0, tree arg1)
4990 REAL_VALUE_TYPE c;
4992 if (fcode == BUILT_IN_SQRT
4993 || fcode == BUILT_IN_SQRTF
4994 || fcode == BUILT_IN_SQRTL)
4996 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4997 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4999 c = TREE_REAL_CST (arg1);
5000 if (REAL_VALUE_NEGATIVE (c))
5002 /* sqrt(x) < y is always false, if y is negative. */
5003 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5004 return omit_one_operand (type,
5005 fold_convert (type, integer_zero_node),
5006 arg);
5008 /* sqrt(x) > y is always true, if y is negative and we
5009 don't care about NaNs, i.e. negative values of x. */
5010 if (code == NE_EXPR || !HONOR_NANS (mode))
5011 return omit_one_operand (type,
5012 fold_convert (type, integer_one_node),
5013 arg);
5015 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5016 return fold (build (GE_EXPR, type, arg,
5017 build_real (TREE_TYPE (arg), dconst0)));
5019 else if (code == GT_EXPR || code == GE_EXPR)
5021 REAL_VALUE_TYPE c2;
5023 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5024 real_convert (&c2, mode, &c2);
5026 if (REAL_VALUE_ISINF (c2))
5028 /* sqrt(x) > y is x == +Inf, when y is very large. */
5029 if (HONOR_INFINITIES (mode))
5030 return fold (build (EQ_EXPR, type, arg,
5031 build_real (TREE_TYPE (arg), c2)));
5033 /* sqrt(x) > y is always false, when y is very large
5034 and we don't care about infinities. */
5035 return omit_one_operand (type,
5036 fold_convert (type, integer_zero_node),
5037 arg);
5040 /* sqrt(x) > c is the same as x > c*c. */
5041 return fold (build (code, type, arg,
5042 build_real (TREE_TYPE (arg), c2)));
5044 else if (code == LT_EXPR || code == LE_EXPR)
5046 REAL_VALUE_TYPE c2;
5048 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5049 real_convert (&c2, mode, &c2);
5051 if (REAL_VALUE_ISINF (c2))
5053 /* sqrt(x) < y is always true, when y is a very large
5054 value and we don't care about NaNs or Infinities. */
5055 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5056 return omit_one_operand (type,
5057 fold_convert (type, integer_one_node),
5058 arg);
5060 /* sqrt(x) < y is x != +Inf when y is very large and we
5061 don't care about NaNs. */
5062 if (! HONOR_NANS (mode))
5063 return fold (build (NE_EXPR, type, arg,
5064 build_real (TREE_TYPE (arg), c2)));
5066 /* sqrt(x) < y is x >= 0 when y is very large and we
5067 don't care about Infinities. */
5068 if (! HONOR_INFINITIES (mode))
5069 return fold (build (GE_EXPR, type, arg,
5070 build_real (TREE_TYPE (arg), dconst0)));
5072 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5073 if ((*lang_hooks.decls.global_bindings_p) () != 0
5074 || CONTAINS_PLACEHOLDER_P (arg))
5075 return NULL_TREE;
5077 arg = save_expr (arg);
5078 return fold (build (TRUTH_ANDIF_EXPR, type,
5079 fold (build (GE_EXPR, type, arg,
5080 build_real (TREE_TYPE (arg),
5081 dconst0))),
5082 fold (build (NE_EXPR, type, arg,
5083 build_real (TREE_TYPE (arg),
5084 c2)))));
5087 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5088 if (! HONOR_NANS (mode))
5089 return fold (build (code, type, arg,
5090 build_real (TREE_TYPE (arg), c2)));
5092 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5093 if ((*lang_hooks.decls.global_bindings_p) () == 0
5094 && ! CONTAINS_PLACEHOLDER_P (arg))
5096 arg = save_expr (arg);
5097 return fold (build (TRUTH_ANDIF_EXPR, type,
5098 fold (build (GE_EXPR, type, arg,
5099 build_real (TREE_TYPE (arg),
5100 dconst0))),
5101 fold (build (code, type, arg,
5102 build_real (TREE_TYPE (arg),
5103 c2)))));
5108 return NULL_TREE;
5111 /* Subroutine of fold() that optimizes comparisons against Infinities,
5112 either +Inf or -Inf.
5114 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5115 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5116 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5118 The function returns the constant folded tree if a simplification
5119 can be made, and NULL_TREE otherwise. */
5121 static tree
5122 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5124 enum machine_mode mode;
5125 REAL_VALUE_TYPE max;
5126 tree temp;
5127 bool neg;
5129 mode = TYPE_MODE (TREE_TYPE (arg0));
5131 /* For negative infinity swap the sense of the comparison. */
5132 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5133 if (neg)
5134 code = swap_tree_comparison (code);
5136 switch (code)
5138 case GT_EXPR:
5139 /* x > +Inf is always false, if with ignore sNANs. */
5140 if (HONOR_SNANS (mode))
5141 return NULL_TREE;
5142 return omit_one_operand (type,
5143 fold_convert (type, integer_zero_node),
5144 arg0);
5146 case LE_EXPR:
5147 /* x <= +Inf is always true, if we don't case about NaNs. */
5148 if (! HONOR_NANS (mode))
5149 return omit_one_operand (type,
5150 fold_convert (type, integer_one_node),
5151 arg0);
5153 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5154 if ((*lang_hooks.decls.global_bindings_p) () == 0
5155 && ! CONTAINS_PLACEHOLDER_P (arg0))
5157 arg0 = save_expr (arg0);
5158 return fold (build (EQ_EXPR, type, arg0, arg0));
5160 break;
5162 case EQ_EXPR:
5163 case GE_EXPR:
5164 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5165 real_maxval (&max, neg, mode);
5166 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
5167 arg0, build_real (TREE_TYPE (arg0), max)));
5169 case LT_EXPR:
5170 /* x < +Inf is always equal to x <= DBL_MAX. */
5171 real_maxval (&max, neg, mode);
5172 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5173 arg0, build_real (TREE_TYPE (arg0), max)));
5175 case NE_EXPR:
5176 /* x != +Inf is always equal to !(x > DBL_MAX). */
5177 real_maxval (&max, neg, mode);
5178 if (! HONOR_NANS (mode))
5179 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5180 arg0, build_real (TREE_TYPE (arg0), max)));
5181 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
5182 arg0, build_real (TREE_TYPE (arg0), max)));
5183 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5185 default:
5186 break;
5189 return NULL_TREE;
5192 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5193 equality/inequality test, then return a simplified form of
5194 the test using shifts and logical operations. Otherwise return
5195 NULL. TYPE is the desired result type. */
5197 tree
5198 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5199 tree result_type)
5201 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5202 operand 0. */
5203 if (code == TRUTH_NOT_EXPR)
5205 code = TREE_CODE (arg0);
5206 if (code != NE_EXPR && code != EQ_EXPR)
5207 return NULL_TREE;
5209 /* Extract the arguments of the EQ/NE. */
5210 arg1 = TREE_OPERAND (arg0, 1);
5211 arg0 = TREE_OPERAND (arg0, 0);
5213 /* This requires us to invert the code. */
5214 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5217 /* If this is testing a single bit, we can optimize the test. */
5218 if ((code == NE_EXPR || code == EQ_EXPR)
5219 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5220 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5222 tree inner = TREE_OPERAND (arg0, 0);
5223 tree type = TREE_TYPE (arg0);
5224 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5225 enum machine_mode operand_mode = TYPE_MODE (type);
5226 int ops_unsigned;
5227 tree signed_type, unsigned_type, intermediate_type;
5228 tree arg00;
5230 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5231 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5232 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5233 if (arg00 != NULL_TREE)
5235 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
5236 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
5237 fold_convert (stype, arg00),
5238 fold_convert (stype, integer_zero_node)));
5241 /* At this point, we know that arg0 is not testing the sign bit. */
5242 if (TYPE_PRECISION (type) - 1 == bitnum)
5243 abort ();
5245 /* Otherwise we have (A & C) != 0 where C is a single bit,
5246 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5247 Similarly for (A & C) == 0. */
5249 /* If INNER is a right shift of a constant and it plus BITNUM does
5250 not overflow, adjust BITNUM and INNER. */
5251 if (TREE_CODE (inner) == RSHIFT_EXPR
5252 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5253 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5254 && bitnum < TYPE_PRECISION (type)
5255 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5256 bitnum - TYPE_PRECISION (type)))
5258 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5259 inner = TREE_OPERAND (inner, 0);
5262 /* If we are going to be able to omit the AND below, we must do our
5263 operations as unsigned. If we must use the AND, we have a choice.
5264 Normally unsigned is faster, but for some machines signed is. */
5265 #ifdef LOAD_EXTEND_OP
5266 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5267 #else
5268 ops_unsigned = 1;
5269 #endif
5271 signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
5272 unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
5273 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5274 inner = fold_convert (intermediate_type, inner);
5276 if (bitnum != 0)
5277 inner = build (RSHIFT_EXPR, intermediate_type,
5278 inner, size_int (bitnum));
5280 if (code == EQ_EXPR)
5281 inner = build (BIT_XOR_EXPR, intermediate_type,
5282 inner, integer_one_node);
5284 /* Put the AND last so it can combine with more things. */
5285 inner = build (BIT_AND_EXPR, intermediate_type,
5286 inner, integer_one_node);
5288 /* Make sure to return the proper type. */
5289 inner = fold_convert (result_type, inner);
5291 return inner;
5293 return NULL_TREE;
5296 /* Check whether we are allowed to reorder operands arg0 and arg1,
5297 such that the evaluation of arg1 occurs before arg0. */
5299 static bool
5300 reorder_operands_p (tree arg0, tree arg1)
5302 if (! flag_evaluation_order)
5303 return true;
5304 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5305 return true;
5306 return ! TREE_SIDE_EFFECTS (arg0)
5307 && ! TREE_SIDE_EFFECTS (arg1);
5310 /* Test whether it is preferable two swap two operands, ARG0 and
5311 ARG1, for example because ARG0 is an integer constant and ARG1
5312 isn't. If REORDER is true, only recommend swapping if we can
5313 evaluate the operands in reverse order. */
5315 static bool
5316 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5318 STRIP_SIGN_NOPS (arg0);
5319 STRIP_SIGN_NOPS (arg1);
5321 if (TREE_CODE (arg1) == INTEGER_CST)
5322 return 0;
5323 if (TREE_CODE (arg0) == INTEGER_CST)
5324 return 1;
5326 if (TREE_CODE (arg1) == REAL_CST)
5327 return 0;
5328 if (TREE_CODE (arg0) == REAL_CST)
5329 return 1;
5331 if (TREE_CODE (arg1) == COMPLEX_CST)
5332 return 0;
5333 if (TREE_CODE (arg0) == COMPLEX_CST)
5334 return 1;
5336 if (TREE_CONSTANT (arg1))
5337 return 0;
5338 if (TREE_CONSTANT (arg0))
5339 return 1;
5341 if (optimize_size)
5342 return 0;
5344 if (reorder && flag_evaluation_order
5345 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5346 return 0;
5348 if (DECL_P (arg1))
5349 return 0;
5350 if (DECL_P (arg0))
5351 return 1;
5353 return 0;
5356 /* Perform constant folding and related simplification of EXPR.
5357 The related simplifications include x*1 => x, x*0 => 0, etc.,
5358 and application of the associative law.
5359 NOP_EXPR conversions may be removed freely (as long as we
5360 are careful not to change the C type of the overall expression)
5361 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5362 but we can constant-fold them if they have constant operands. */
5364 #ifdef ENABLE_FOLD_CHECKING
5365 # define fold(x) fold_1 (x)
5366 static tree fold_1 (tree);
5367 static
5368 #endif
5369 tree
5370 fold (tree expr)
5372 tree t = expr, orig_t;
5373 tree t1 = NULL_TREE;
5374 tree tem;
5375 tree type = TREE_TYPE (expr);
5376 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5377 enum tree_code code = TREE_CODE (t);
5378 int kind = TREE_CODE_CLASS (code);
5379 int invert;
5380 /* WINS will be nonzero when the switch is done
5381 if all operands are constant. */
5382 int wins = 1;
5384 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5385 Likewise for a SAVE_EXPR that's already been evaluated. */
5386 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5387 return t;
5389 /* Return right away if a constant. */
5390 if (kind == 'c')
5391 return t;
5393 orig_t = t;
5395 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5397 tree subop;
5399 /* Special case for conversion ops that can have fixed point args. */
5400 arg0 = TREE_OPERAND (t, 0);
5402 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5403 if (arg0 != 0)
5404 STRIP_SIGN_NOPS (arg0);
5406 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5407 subop = TREE_REALPART (arg0);
5408 else
5409 subop = arg0;
5411 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5412 && TREE_CODE (subop) != REAL_CST)
5413 /* Note that TREE_CONSTANT isn't enough:
5414 static var addresses are constant but we can't
5415 do arithmetic on them. */
5416 wins = 0;
5418 else if (IS_EXPR_CODE_CLASS (kind))
5420 int len = first_rtl_op (code);
5421 int i;
5422 for (i = 0; i < len; i++)
5424 tree op = TREE_OPERAND (t, i);
5425 tree subop;
5427 if (op == 0)
5428 continue; /* Valid for CALL_EXPR, at least. */
5430 if (kind == '<' || code == RSHIFT_EXPR)
5432 /* Signedness matters here. Perhaps we can refine this
5433 later. */
5434 STRIP_SIGN_NOPS (op);
5436 else
5437 /* Strip any conversions that don't change the mode. */
5438 STRIP_NOPS (op);
5440 if (TREE_CODE (op) == COMPLEX_CST)
5441 subop = TREE_REALPART (op);
5442 else
5443 subop = op;
5445 if (TREE_CODE (subop) != INTEGER_CST
5446 && TREE_CODE (subop) != REAL_CST)
5447 /* Note that TREE_CONSTANT isn't enough:
5448 static var addresses are constant but we can't
5449 do arithmetic on them. */
5450 wins = 0;
5452 if (i == 0)
5453 arg0 = op;
5454 else if (i == 1)
5455 arg1 = op;
5459 /* If this is a commutative operation, and ARG0 is a constant, move it
5460 to ARG1 to reduce the number of tests below. */
5461 if (commutative_tree_code (code)
5462 && tree_swap_operands_p (arg0, arg1, true))
5463 return fold (build (code, type, arg1, arg0));
5465 /* Now WINS is set as described above,
5466 ARG0 is the first operand of EXPR,
5467 and ARG1 is the second operand (if it has more than one operand).
5469 First check for cases where an arithmetic operation is applied to a
5470 compound, conditional, or comparison operation. Push the arithmetic
5471 operation inside the compound or conditional to see if any folding
5472 can then be done. Convert comparison to conditional for this purpose.
5473 The also optimizes non-constant cases that used to be done in
5474 expand_expr.
5476 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5477 one of the operands is a comparison and the other is a comparison, a
5478 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5479 code below would make the expression more complex. Change it to a
5480 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5481 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5483 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5484 || code == EQ_EXPR || code == NE_EXPR)
5485 && ((truth_value_p (TREE_CODE (arg0))
5486 && (truth_value_p (TREE_CODE (arg1))
5487 || (TREE_CODE (arg1) == BIT_AND_EXPR
5488 && integer_onep (TREE_OPERAND (arg1, 1)))))
5489 || (truth_value_p (TREE_CODE (arg1))
5490 && (truth_value_p (TREE_CODE (arg0))
5491 || (TREE_CODE (arg0) == BIT_AND_EXPR
5492 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5494 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5495 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5496 : TRUTH_XOR_EXPR,
5497 type, arg0, arg1));
5499 if (code == EQ_EXPR)
5500 t = invert_truthvalue (t);
5502 return t;
5505 if (TREE_CODE_CLASS (code) == '1')
5507 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5508 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5509 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5510 else if (TREE_CODE (arg0) == COND_EXPR)
5512 tree arg01 = TREE_OPERAND (arg0, 1);
5513 tree arg02 = TREE_OPERAND (arg0, 2);
5514 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5515 arg01 = fold (build1 (code, type, arg01));
5516 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5517 arg02 = fold (build1 (code, type, arg02));
5518 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5519 arg01, arg02));
5521 /* If this was a conversion, and all we did was to move into
5522 inside the COND_EXPR, bring it back out. But leave it if
5523 it is a conversion from integer to integer and the
5524 result precision is no wider than a word since such a
5525 conversion is cheap and may be optimized away by combine,
5526 while it couldn't if it were outside the COND_EXPR. Then return
5527 so we don't get into an infinite recursion loop taking the
5528 conversion out and then back in. */
5530 if ((code == NOP_EXPR || code == CONVERT_EXPR
5531 || code == NON_LVALUE_EXPR)
5532 && TREE_CODE (t) == COND_EXPR
5533 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5534 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5535 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5536 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5537 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5538 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5539 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5540 && (INTEGRAL_TYPE_P
5541 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5542 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5543 t = build1 (code, type,
5544 build (COND_EXPR,
5545 TREE_TYPE (TREE_OPERAND
5546 (TREE_OPERAND (t, 1), 0)),
5547 TREE_OPERAND (t, 0),
5548 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5549 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5550 return t;
5552 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5553 return fold (build (COND_EXPR, type, arg0,
5554 fold (build1 (code, type, integer_one_node)),
5555 fold (build1 (code, type, integer_zero_node))));
5557 else if (TREE_CODE_CLASS (code) == '<'
5558 && TREE_CODE (arg0) == COMPOUND_EXPR)
5559 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5560 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5561 else if (TREE_CODE_CLASS (code) == '<'
5562 && TREE_CODE (arg1) == COMPOUND_EXPR)
5563 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5564 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5565 else if (TREE_CODE_CLASS (code) == '2'
5566 || TREE_CODE_CLASS (code) == '<')
5568 if (TREE_CODE (arg1) == COMPOUND_EXPR
5569 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5570 && ! TREE_SIDE_EFFECTS (arg0))
5571 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5572 fold (build (code, type,
5573 arg0, TREE_OPERAND (arg1, 1))));
5574 else if ((TREE_CODE (arg1) == COND_EXPR
5575 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5576 && TREE_CODE_CLASS (code) != '<'))
5577 && (TREE_CODE (arg0) != COND_EXPR
5578 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5579 && (! TREE_SIDE_EFFECTS (arg0)
5580 || ((*lang_hooks.decls.global_bindings_p) () == 0
5581 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5582 return
5583 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5584 /*cond_first_p=*/0);
5585 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5586 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5587 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5588 else if ((TREE_CODE (arg0) == COND_EXPR
5589 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5590 && TREE_CODE_CLASS (code) != '<'))
5591 && (TREE_CODE (arg1) != COND_EXPR
5592 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5593 && (! TREE_SIDE_EFFECTS (arg1)
5594 || ((*lang_hooks.decls.global_bindings_p) () == 0
5595 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5596 return
5597 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5598 /*cond_first_p=*/1);
5601 switch (code)
5603 case INTEGER_CST:
5604 case REAL_CST:
5605 case VECTOR_CST:
5606 case STRING_CST:
5607 case COMPLEX_CST:
5608 case CONSTRUCTOR:
5609 return t;
5611 case CONST_DECL:
5612 return fold (DECL_INITIAL (t));
5614 case NOP_EXPR:
5615 case FLOAT_EXPR:
5616 case CONVERT_EXPR:
5617 case FIX_TRUNC_EXPR:
5618 case FIX_CEIL_EXPR:
5619 case FIX_FLOOR_EXPR:
5620 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5621 return TREE_OPERAND (t, 0);
5623 /* Handle cases of two conversions in a row. */
5624 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5625 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5627 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5628 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5629 tree final_type = TREE_TYPE (t);
5630 int inside_int = INTEGRAL_TYPE_P (inside_type);
5631 int inside_ptr = POINTER_TYPE_P (inside_type);
5632 int inside_float = FLOAT_TYPE_P (inside_type);
5633 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5634 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5635 int inter_int = INTEGRAL_TYPE_P (inter_type);
5636 int inter_ptr = POINTER_TYPE_P (inter_type);
5637 int inter_float = FLOAT_TYPE_P (inter_type);
5638 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5639 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5640 int final_int = INTEGRAL_TYPE_P (final_type);
5641 int final_ptr = POINTER_TYPE_P (final_type);
5642 int final_float = FLOAT_TYPE_P (final_type);
5643 unsigned int final_prec = TYPE_PRECISION (final_type);
5644 int final_unsignedp = TREE_UNSIGNED (final_type);
5646 /* In addition to the cases of two conversions in a row
5647 handled below, if we are converting something to its own
5648 type via an object of identical or wider precision, neither
5649 conversion is needed. */
5650 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5651 && ((inter_int && final_int) || (inter_float && final_float))
5652 && inter_prec >= final_prec)
5653 return fold (build1 (code, final_type,
5654 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5656 /* Likewise, if the intermediate and final types are either both
5657 float or both integer, we don't need the middle conversion if
5658 it is wider than the final type and doesn't change the signedness
5659 (for integers). Avoid this if the final type is a pointer
5660 since then we sometimes need the inner conversion. Likewise if
5661 the outer has a precision not equal to the size of its mode. */
5662 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5663 || (inter_float && inside_float))
5664 && inter_prec >= inside_prec
5665 && (inter_float || inter_unsignedp == inside_unsignedp)
5666 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5667 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5668 && ! final_ptr)
5669 return fold (build1 (code, final_type,
5670 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5672 /* If we have a sign-extension of a zero-extended value, we can
5673 replace that by a single zero-extension. */
5674 if (inside_int && inter_int && final_int
5675 && inside_prec < inter_prec && inter_prec < final_prec
5676 && inside_unsignedp && !inter_unsignedp)
5677 return fold (build1 (code, final_type,
5678 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5680 /* Two conversions in a row are not needed unless:
5681 - some conversion is floating-point (overstrict for now), or
5682 - the intermediate type is narrower than both initial and
5683 final, or
5684 - the intermediate type and innermost type differ in signedness,
5685 and the outermost type is wider than the intermediate, or
5686 - the initial type is a pointer type and the precisions of the
5687 intermediate and final types differ, or
5688 - the final type is a pointer type and the precisions of the
5689 initial and intermediate types differ. */
5690 if (! inside_float && ! inter_float && ! final_float
5691 && (inter_prec > inside_prec || inter_prec > final_prec)
5692 && ! (inside_int && inter_int
5693 && inter_unsignedp != inside_unsignedp
5694 && inter_prec < final_prec)
5695 && ((inter_unsignedp && inter_prec > inside_prec)
5696 == (final_unsignedp && final_prec > inter_prec))
5697 && ! (inside_ptr && inter_prec != final_prec)
5698 && ! (final_ptr && inside_prec != inter_prec)
5699 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5700 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5701 && ! final_ptr)
5702 return fold (build1 (code, final_type,
5703 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5706 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5707 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5708 /* Detect assigning a bitfield. */
5709 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5710 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5712 /* Don't leave an assignment inside a conversion
5713 unless assigning a bitfield. */
5714 tree prev = TREE_OPERAND (t, 0);
5715 if (t == orig_t)
5716 t = copy_node (t);
5717 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5718 /* First do the assignment, then return converted constant. */
5719 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5720 TREE_USED (t) = 1;
5721 return t;
5724 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5725 constants (if x has signed type, the sign bit cannot be set
5726 in c). This folds extension into the BIT_AND_EXPR. */
5727 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5728 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5729 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5730 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5732 tree and = TREE_OPERAND (t, 0);
5733 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5734 int change = 0;
5736 if (TREE_UNSIGNED (TREE_TYPE (and))
5737 || (TYPE_PRECISION (TREE_TYPE (t))
5738 <= TYPE_PRECISION (TREE_TYPE (and))))
5739 change = 1;
5740 else if (TYPE_PRECISION (TREE_TYPE (and1))
5741 <= HOST_BITS_PER_WIDE_INT
5742 && host_integerp (and1, 1))
5744 unsigned HOST_WIDE_INT cst;
5746 cst = tree_low_cst (and1, 1);
5747 cst &= (HOST_WIDE_INT) -1
5748 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5749 change = (cst == 0);
5750 #ifdef LOAD_EXTEND_OP
5751 if (change
5752 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5753 == ZERO_EXTEND))
5755 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5756 and0 = fold_convert (uns, and0);
5757 and1 = fold_convert (uns, and1);
5759 #endif
5761 if (change)
5762 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5763 fold_convert (TREE_TYPE (t), and0),
5764 fold_convert (TREE_TYPE (t), and1)));
5767 tem = fold_convert_const (code, TREE_TYPE (t), arg0);
5768 return tem ? tem : t;
5770 case VIEW_CONVERT_EXPR:
5771 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5772 return build1 (VIEW_CONVERT_EXPR, type,
5773 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5774 return t;
5776 case COMPONENT_REF:
5777 if (TREE_CODE (arg0) == CONSTRUCTOR
5778 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5780 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5781 if (m)
5782 t = TREE_VALUE (m);
5784 return t;
5786 case RANGE_EXPR:
5787 if (TREE_CONSTANT (t) != wins)
5789 if (t == orig_t)
5790 t = copy_node (t);
5791 TREE_CONSTANT (t) = wins;
5793 return t;
5795 case NEGATE_EXPR:
5796 if (negate_expr_p (arg0))
5797 return fold_convert (type, negate_expr (arg0));
5798 return t;
5800 case ABS_EXPR:
5801 if (wins)
5803 if (TREE_CODE (arg0) == INTEGER_CST)
5805 /* If the value is unsigned, then the absolute value is
5806 the same as the ordinary value. */
5807 if (TREE_UNSIGNED (type))
5808 return arg0;
5809 /* Similarly, if the value is non-negative. */
5810 else if (INT_CST_LT (integer_minus_one_node, arg0))
5811 return arg0;
5812 /* If the value is negative, then the absolute value is
5813 its negation. */
5814 else
5816 unsigned HOST_WIDE_INT low;
5817 HOST_WIDE_INT high;
5818 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5819 TREE_INT_CST_HIGH (arg0),
5820 &low, &high);
5821 t = build_int_2 (low, high);
5822 TREE_TYPE (t) = type;
5823 TREE_OVERFLOW (t)
5824 = (TREE_OVERFLOW (arg0)
5825 | force_fit_type (t, overflow));
5826 TREE_CONSTANT_OVERFLOW (t)
5827 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5830 else if (TREE_CODE (arg0) == REAL_CST)
5832 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5833 t = build_real (type,
5834 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5837 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5838 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5839 /* Convert fabs((double)float) into (double)fabsf(float). */
5840 else if (TREE_CODE (arg0) == NOP_EXPR
5841 && TREE_CODE (type) == REAL_TYPE)
5843 tree targ0 = strip_float_extensions (arg0);
5844 if (targ0 != arg0)
5845 return fold_convert (type, fold (build1 (ABS_EXPR,
5846 TREE_TYPE (targ0),
5847 targ0)));
5849 else if (tree_expr_nonnegative_p (arg0))
5850 return arg0;
5851 return t;
5853 case CONJ_EXPR:
5854 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5855 return fold_convert (type, arg0);
5856 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5857 return build (COMPLEX_EXPR, type,
5858 TREE_OPERAND (arg0, 0),
5859 negate_expr (TREE_OPERAND (arg0, 1)));
5860 else if (TREE_CODE (arg0) == COMPLEX_CST)
5861 return build_complex (type, TREE_REALPART (arg0),
5862 negate_expr (TREE_IMAGPART (arg0)));
5863 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5864 return fold (build (TREE_CODE (arg0), type,
5865 fold (build1 (CONJ_EXPR, type,
5866 TREE_OPERAND (arg0, 0))),
5867 fold (build1 (CONJ_EXPR,
5868 type, TREE_OPERAND (arg0, 1)))));
5869 else if (TREE_CODE (arg0) == CONJ_EXPR)
5870 return TREE_OPERAND (arg0, 0);
5871 return t;
5873 case BIT_NOT_EXPR:
5874 if (wins)
5876 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5877 ~ TREE_INT_CST_HIGH (arg0));
5878 TREE_TYPE (t) = type;
5879 force_fit_type (t, 0);
5880 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5881 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5883 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5884 return TREE_OPERAND (arg0, 0);
5885 return t;
5887 case PLUS_EXPR:
5888 /* A + (-B) -> A - B */
5889 if (TREE_CODE (arg1) == NEGATE_EXPR)
5890 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5891 /* (-A) + B -> B - A */
5892 if (TREE_CODE (arg0) == NEGATE_EXPR)
5893 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5894 else if (! FLOAT_TYPE_P (type))
5896 if (integer_zerop (arg1))
5897 return non_lvalue (fold_convert (type, arg0));
5899 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5900 with a constant, and the two constants have no bits in common,
5901 we should treat this as a BIT_IOR_EXPR since this may produce more
5902 simplifications. */
5903 if (TREE_CODE (arg0) == BIT_AND_EXPR
5904 && TREE_CODE (arg1) == BIT_AND_EXPR
5905 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5906 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5907 && integer_zerop (const_binop (BIT_AND_EXPR,
5908 TREE_OPERAND (arg0, 1),
5909 TREE_OPERAND (arg1, 1), 0)))
5911 code = BIT_IOR_EXPR;
5912 goto bit_ior;
5915 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5916 (plus (plus (mult) (mult)) (foo)) so that we can
5917 take advantage of the factoring cases below. */
5918 if ((TREE_CODE (arg0) == PLUS_EXPR
5919 && TREE_CODE (arg1) == MULT_EXPR)
5920 || (TREE_CODE (arg1) == PLUS_EXPR
5921 && TREE_CODE (arg0) == MULT_EXPR))
5923 tree parg0, parg1, parg, marg;
5925 if (TREE_CODE (arg0) == PLUS_EXPR)
5926 parg = arg0, marg = arg1;
5927 else
5928 parg = arg1, marg = arg0;
5929 parg0 = TREE_OPERAND (parg, 0);
5930 parg1 = TREE_OPERAND (parg, 1);
5931 STRIP_NOPS (parg0);
5932 STRIP_NOPS (parg1);
5934 if (TREE_CODE (parg0) == MULT_EXPR
5935 && TREE_CODE (parg1) != MULT_EXPR)
5936 return fold (build (PLUS_EXPR, type,
5937 fold (build (PLUS_EXPR, type,
5938 fold_convert (type, parg0),
5939 fold_convert (type, marg))),
5940 fold_convert (type, parg1)));
5941 if (TREE_CODE (parg0) != MULT_EXPR
5942 && TREE_CODE (parg1) == MULT_EXPR)
5943 return fold (build (PLUS_EXPR, type,
5944 fold (build (PLUS_EXPR, type,
5945 fold_convert (type, parg1),
5946 fold_convert (type, marg))),
5947 fold_convert (type, parg0)));
5950 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5952 tree arg00, arg01, arg10, arg11;
5953 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5955 /* (A * C) + (B * C) -> (A+B) * C.
5956 We are most concerned about the case where C is a constant,
5957 but other combinations show up during loop reduction. Since
5958 it is not difficult, try all four possibilities. */
5960 arg00 = TREE_OPERAND (arg0, 0);
5961 arg01 = TREE_OPERAND (arg0, 1);
5962 arg10 = TREE_OPERAND (arg1, 0);
5963 arg11 = TREE_OPERAND (arg1, 1);
5964 same = NULL_TREE;
5966 if (operand_equal_p (arg01, arg11, 0))
5967 same = arg01, alt0 = arg00, alt1 = arg10;
5968 else if (operand_equal_p (arg00, arg10, 0))
5969 same = arg00, alt0 = arg01, alt1 = arg11;
5970 else if (operand_equal_p (arg00, arg11, 0))
5971 same = arg00, alt0 = arg01, alt1 = arg10;
5972 else if (operand_equal_p (arg01, arg10, 0))
5973 same = arg01, alt0 = arg00, alt1 = arg11;
5975 /* No identical multiplicands; see if we can find a common
5976 power-of-two factor in non-power-of-two multiplies. This
5977 can help in multi-dimensional array access. */
5978 else if (TREE_CODE (arg01) == INTEGER_CST
5979 && TREE_CODE (arg11) == INTEGER_CST
5980 && TREE_INT_CST_HIGH (arg01) == 0
5981 && TREE_INT_CST_HIGH (arg11) == 0)
5983 HOST_WIDE_INT int01, int11, tmp;
5984 int01 = TREE_INT_CST_LOW (arg01);
5985 int11 = TREE_INT_CST_LOW (arg11);
5987 /* Move min of absolute values to int11. */
5988 if ((int01 >= 0 ? int01 : -int01)
5989 < (int11 >= 0 ? int11 : -int11))
5991 tmp = int01, int01 = int11, int11 = tmp;
5992 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5993 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5996 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5998 alt0 = fold (build (MULT_EXPR, type, arg00,
5999 build_int_2 (int01 / int11, 0)));
6000 alt1 = arg10;
6001 same = arg11;
6005 if (same)
6006 return fold (build (MULT_EXPR, type,
6007 fold (build (PLUS_EXPR, type, alt0, alt1)),
6008 same));
6011 else
6013 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6014 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6015 return non_lvalue (fold_convert (type, arg0));
6017 /* Likewise if the operands are reversed. */
6018 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6019 return non_lvalue (fold_convert (type, arg1));
6021 /* Convert x+x into x*2.0. */
6022 if (operand_equal_p (arg0, arg1, 0)
6023 && SCALAR_FLOAT_TYPE_P (type))
6024 return fold (build (MULT_EXPR, type, arg0,
6025 build_real (type, dconst2)));
6027 /* Convert x*c+x into x*(c+1). */
6028 if (flag_unsafe_math_optimizations
6029 && TREE_CODE (arg0) == MULT_EXPR
6030 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6031 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6032 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6034 REAL_VALUE_TYPE c;
6036 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6037 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6038 return fold (build (MULT_EXPR, type, arg1,
6039 build_real (type, c)));
6042 /* Convert x+x*c into x*(c+1). */
6043 if (flag_unsafe_math_optimizations
6044 && TREE_CODE (arg1) == MULT_EXPR
6045 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6046 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6047 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6049 REAL_VALUE_TYPE c;
6051 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6052 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6053 return fold (build (MULT_EXPR, type, arg0,
6054 build_real (type, c)));
6057 /* Convert x*c1+x*c2 into x*(c1+c2). */
6058 if (flag_unsafe_math_optimizations
6059 && TREE_CODE (arg0) == MULT_EXPR
6060 && TREE_CODE (arg1) == MULT_EXPR
6061 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6062 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6063 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6064 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6065 && operand_equal_p (TREE_OPERAND (arg0, 0),
6066 TREE_OPERAND (arg1, 0), 0))
6068 REAL_VALUE_TYPE c1, c2;
6070 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6071 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6072 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6073 return fold (build (MULT_EXPR, type,
6074 TREE_OPERAND (arg0, 0),
6075 build_real (type, c1)));
6079 bit_rotate:
6080 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6081 is a rotate of A by C1 bits. */
6082 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6083 is a rotate of A by B bits. */
6085 enum tree_code code0, code1;
6086 code0 = TREE_CODE (arg0);
6087 code1 = TREE_CODE (arg1);
6088 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6089 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6090 && operand_equal_p (TREE_OPERAND (arg0, 0),
6091 TREE_OPERAND (arg1, 0), 0)
6092 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6094 tree tree01, tree11;
6095 enum tree_code code01, code11;
6097 tree01 = TREE_OPERAND (arg0, 1);
6098 tree11 = TREE_OPERAND (arg1, 1);
6099 STRIP_NOPS (tree01);
6100 STRIP_NOPS (tree11);
6101 code01 = TREE_CODE (tree01);
6102 code11 = TREE_CODE (tree11);
6103 if (code01 == INTEGER_CST
6104 && code11 == INTEGER_CST
6105 && TREE_INT_CST_HIGH (tree01) == 0
6106 && TREE_INT_CST_HIGH (tree11) == 0
6107 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6108 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6109 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6110 code0 == LSHIFT_EXPR ? tree01 : tree11);
6111 else if (code11 == MINUS_EXPR)
6113 tree tree110, tree111;
6114 tree110 = TREE_OPERAND (tree11, 0);
6115 tree111 = TREE_OPERAND (tree11, 1);
6116 STRIP_NOPS (tree110);
6117 STRIP_NOPS (tree111);
6118 if (TREE_CODE (tree110) == INTEGER_CST
6119 && 0 == compare_tree_int (tree110,
6120 TYPE_PRECISION
6121 (TREE_TYPE (TREE_OPERAND
6122 (arg0, 0))))
6123 && operand_equal_p (tree01, tree111, 0))
6124 return build ((code0 == LSHIFT_EXPR
6125 ? LROTATE_EXPR
6126 : RROTATE_EXPR),
6127 type, TREE_OPERAND (arg0, 0), tree01);
6129 else if (code01 == MINUS_EXPR)
6131 tree tree010, tree011;
6132 tree010 = TREE_OPERAND (tree01, 0);
6133 tree011 = TREE_OPERAND (tree01, 1);
6134 STRIP_NOPS (tree010);
6135 STRIP_NOPS (tree011);
6136 if (TREE_CODE (tree010) == INTEGER_CST
6137 && 0 == compare_tree_int (tree010,
6138 TYPE_PRECISION
6139 (TREE_TYPE (TREE_OPERAND
6140 (arg0, 0))))
6141 && operand_equal_p (tree11, tree011, 0))
6142 return build ((code0 != LSHIFT_EXPR
6143 ? LROTATE_EXPR
6144 : RROTATE_EXPR),
6145 type, TREE_OPERAND (arg0, 0), tree11);
6150 associate:
6151 /* In most languages, can't associate operations on floats through
6152 parentheses. Rather than remember where the parentheses were, we
6153 don't associate floats at all, unless the user has specified
6154 -funsafe-math-optimizations. */
6156 if (! wins
6157 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6159 tree var0, con0, lit0, minus_lit0;
6160 tree var1, con1, lit1, minus_lit1;
6162 /* Split both trees into variables, constants, and literals. Then
6163 associate each group together, the constants with literals,
6164 then the result with variables. This increases the chances of
6165 literals being recombined later and of generating relocatable
6166 expressions for the sum of a constant and literal. */
6167 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6168 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6169 code == MINUS_EXPR);
6171 /* Only do something if we found more than two objects. Otherwise,
6172 nothing has changed and we risk infinite recursion. */
6173 if (2 < ((var0 != 0) + (var1 != 0)
6174 + (con0 != 0) + (con1 != 0)
6175 + (lit0 != 0) + (lit1 != 0)
6176 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6178 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6179 if (code == MINUS_EXPR)
6180 code = PLUS_EXPR;
6182 var0 = associate_trees (var0, var1, code, type);
6183 con0 = associate_trees (con0, con1, code, type);
6184 lit0 = associate_trees (lit0, lit1, code, type);
6185 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6187 /* Preserve the MINUS_EXPR if the negative part of the literal is
6188 greater than the positive part. Otherwise, the multiplicative
6189 folding code (i.e extract_muldiv) may be fooled in case
6190 unsigned constants are subtracted, like in the following
6191 example: ((X*2 + 4) - 8U)/2. */
6192 if (minus_lit0 && lit0)
6194 if (TREE_CODE (lit0) == INTEGER_CST
6195 && TREE_CODE (minus_lit0) == INTEGER_CST
6196 && tree_int_cst_lt (lit0, minus_lit0))
6198 minus_lit0 = associate_trees (minus_lit0, lit0,
6199 MINUS_EXPR, type);
6200 lit0 = 0;
6202 else
6204 lit0 = associate_trees (lit0, minus_lit0,
6205 MINUS_EXPR, type);
6206 minus_lit0 = 0;
6209 if (minus_lit0)
6211 if (con0 == 0)
6212 return fold_convert (type,
6213 associate_trees (var0, minus_lit0,
6214 MINUS_EXPR, type));
6215 else
6217 con0 = associate_trees (con0, minus_lit0,
6218 MINUS_EXPR, type);
6219 return fold_convert (type,
6220 associate_trees (var0, con0,
6221 PLUS_EXPR, type));
6225 con0 = associate_trees (con0, lit0, code, type);
6226 return fold_convert (type, associate_trees (var0, con0,
6227 code, type));
6231 binary:
6232 if (wins)
6233 t1 = const_binop (code, arg0, arg1, 0);
6234 if (t1 != NULL_TREE)
6236 /* The return value should always have
6237 the same type as the original expression. */
6238 if (TREE_TYPE (t1) != TREE_TYPE (t))
6239 t1 = fold_convert (TREE_TYPE (t), t1);
6241 return t1;
6243 return t;
6245 case MINUS_EXPR:
6246 /* A - (-B) -> A + B */
6247 if (TREE_CODE (arg1) == NEGATE_EXPR)
6248 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6249 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6250 if (TREE_CODE (arg0) == NEGATE_EXPR
6251 && (FLOAT_TYPE_P (type)
6252 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6253 && negate_expr_p (arg1)
6254 && reorder_operands_p (arg0, arg1))
6255 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
6256 TREE_OPERAND (arg0, 0)));
6258 if (! FLOAT_TYPE_P (type))
6260 if (! wins && integer_zerop (arg0))
6261 return negate_expr (fold_convert (type, arg1));
6262 if (integer_zerop (arg1))
6263 return non_lvalue (fold_convert (type, arg0));
6265 /* Fold A - (A & B) into ~B & A. */
6266 if (!TREE_SIDE_EFFECTS (arg0)
6267 && TREE_CODE (arg1) == BIT_AND_EXPR)
6269 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6270 return fold (build (BIT_AND_EXPR, type,
6271 fold (build1 (BIT_NOT_EXPR, type,
6272 TREE_OPERAND (arg1, 0))),
6273 arg0));
6274 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6275 return fold (build (BIT_AND_EXPR, type,
6276 fold (build1 (BIT_NOT_EXPR, type,
6277 TREE_OPERAND (arg1, 1))),
6278 arg0));
6281 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6282 any power of 2 minus 1. */
6283 if (TREE_CODE (arg0) == BIT_AND_EXPR
6284 && TREE_CODE (arg1) == BIT_AND_EXPR
6285 && operand_equal_p (TREE_OPERAND (arg0, 0),
6286 TREE_OPERAND (arg1, 0), 0))
6288 tree mask0 = TREE_OPERAND (arg0, 1);
6289 tree mask1 = TREE_OPERAND (arg1, 1);
6290 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6292 if (operand_equal_p (tem, mask1, 0))
6294 tem = fold (build (BIT_XOR_EXPR, type,
6295 TREE_OPERAND (arg0, 0), mask1));
6296 return fold (build (MINUS_EXPR, type, tem, mask1));
6301 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6302 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6303 return non_lvalue (fold_convert (type, arg0));
6305 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6306 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6307 (-ARG1 + ARG0) reduces to -ARG1. */
6308 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6309 return negate_expr (fold_convert (type, arg1));
6311 /* Fold &x - &x. This can happen from &x.foo - &x.
6312 This is unsafe for certain floats even in non-IEEE formats.
6313 In IEEE, it is unsafe because it does wrong for NaNs.
6314 Also note that operand_equal_p is always false if an operand
6315 is volatile. */
6317 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6318 && operand_equal_p (arg0, arg1, 0))
6319 return fold_convert (type, integer_zero_node);
6321 /* A - B -> A + (-B) if B is easily negatable. */
6322 if (!wins && negate_expr_p (arg1)
6323 && (FLOAT_TYPE_P (type)
6324 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6325 return fold (build (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6327 if (TREE_CODE (arg0) == MULT_EXPR
6328 && TREE_CODE (arg1) == MULT_EXPR
6329 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6331 /* (A * C) - (B * C) -> (A-B) * C. */
6332 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6333 TREE_OPERAND (arg1, 1), 0))
6334 return fold (build (MULT_EXPR, type,
6335 fold (build (MINUS_EXPR, type,
6336 TREE_OPERAND (arg0, 0),
6337 TREE_OPERAND (arg1, 0))),
6338 TREE_OPERAND (arg0, 1)));
6339 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6340 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6341 TREE_OPERAND (arg1, 0), 0))
6342 return fold (build (MULT_EXPR, type,
6343 TREE_OPERAND (arg0, 0),
6344 fold (build (MINUS_EXPR, type,
6345 TREE_OPERAND (arg0, 1),
6346 TREE_OPERAND (arg1, 1)))));
6349 goto associate;
6351 case MULT_EXPR:
6352 /* (-A) * (-B) -> A * B */
6353 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6354 return fold (build (MULT_EXPR, type,
6355 TREE_OPERAND (arg0, 0),
6356 negate_expr (arg1)));
6357 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6358 return fold (build (MULT_EXPR, type,
6359 negate_expr (arg0),
6360 TREE_OPERAND (arg1, 0)));
6362 if (! FLOAT_TYPE_P (type))
6364 if (integer_zerop (arg1))
6365 return omit_one_operand (type, arg1, arg0);
6366 if (integer_onep (arg1))
6367 return non_lvalue (fold_convert (type, arg0));
6369 /* (a * (1 << b)) is (a << b) */
6370 if (TREE_CODE (arg1) == LSHIFT_EXPR
6371 && integer_onep (TREE_OPERAND (arg1, 0)))
6372 return fold (build (LSHIFT_EXPR, type, arg0,
6373 TREE_OPERAND (arg1, 1)));
6374 if (TREE_CODE (arg0) == LSHIFT_EXPR
6375 && integer_onep (TREE_OPERAND (arg0, 0)))
6376 return fold (build (LSHIFT_EXPR, type, arg1,
6377 TREE_OPERAND (arg0, 1)));
6379 if (TREE_CODE (arg1) == INTEGER_CST
6380 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6381 fold_convert (type, arg1),
6382 code, NULL_TREE)))
6383 return fold_convert (type, tem);
6386 else
6388 /* Maybe fold x * 0 to 0. The expressions aren't the same
6389 when x is NaN, since x * 0 is also NaN. Nor are they the
6390 same in modes with signed zeros, since multiplying a
6391 negative value by 0 gives -0, not +0. */
6392 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6393 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6394 && real_zerop (arg1))
6395 return omit_one_operand (type, arg1, arg0);
6396 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6397 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6398 && real_onep (arg1))
6399 return non_lvalue (fold_convert (type, arg0));
6401 /* Transform x * -1.0 into -x. */
6402 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6403 && real_minus_onep (arg1))
6404 return fold (build1 (NEGATE_EXPR, type, arg0));
6406 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6407 if (flag_unsafe_math_optimizations
6408 && TREE_CODE (arg0) == RDIV_EXPR
6409 && TREE_CODE (arg1) == REAL_CST
6410 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6412 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6413 arg1, 0);
6414 if (tem)
6415 return fold (build (RDIV_EXPR, type, tem,
6416 TREE_OPERAND (arg0, 1)));
6419 if (flag_unsafe_math_optimizations)
6421 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6422 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6424 /* Optimizations of sqrt(...)*sqrt(...). */
6425 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
6426 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
6427 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
6429 tree sqrtfn, arg, arglist;
6430 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6431 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6433 /* Optimize sqrt(x)*sqrt(x) as x. */
6434 if (operand_equal_p (arg00, arg10, 0)
6435 && ! HONOR_SNANS (TYPE_MODE (type)))
6436 return arg00;
6438 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6439 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6440 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6441 arglist = build_tree_list (NULL_TREE, arg);
6442 return build_function_call_expr (sqrtfn, arglist);
6445 /* Optimize expN(x)*expN(y) as expN(x+y). */
6446 if (fcode0 == fcode1
6447 && (fcode0 == BUILT_IN_EXP
6448 || fcode0 == BUILT_IN_EXPF
6449 || fcode0 == BUILT_IN_EXPL
6450 || fcode0 == BUILT_IN_EXP2
6451 || fcode0 == BUILT_IN_EXP2F
6452 || fcode0 == BUILT_IN_EXP2L
6453 || fcode0 == BUILT_IN_EXP10
6454 || fcode0 == BUILT_IN_EXP10F
6455 || fcode0 == BUILT_IN_EXP10L
6456 || fcode0 == BUILT_IN_POW10
6457 || fcode0 == BUILT_IN_POW10F
6458 || fcode0 == BUILT_IN_POW10L))
6460 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6461 tree arg = build (PLUS_EXPR, type,
6462 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6463 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6464 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6465 return build_function_call_expr (expfn, arglist);
6468 /* Optimizations of pow(...)*pow(...). */
6469 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6470 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6471 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6473 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6474 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6475 1)));
6476 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6477 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6478 1)));
6480 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6481 if (operand_equal_p (arg01, arg11, 0))
6483 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6484 tree arg = build (MULT_EXPR, type, arg00, arg10);
6485 tree arglist = tree_cons (NULL_TREE, fold (arg),
6486 build_tree_list (NULL_TREE,
6487 arg01));
6488 return build_function_call_expr (powfn, arglist);
6491 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6492 if (operand_equal_p (arg00, arg10, 0))
6494 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6495 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6496 tree arglist = tree_cons (NULL_TREE, arg00,
6497 build_tree_list (NULL_TREE,
6498 arg));
6499 return build_function_call_expr (powfn, arglist);
6503 /* Optimize tan(x)*cos(x) as sin(x). */
6504 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6505 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6506 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6507 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6508 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6509 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6510 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6511 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6513 tree sinfn;
6515 switch (fcode0)
6517 case BUILT_IN_TAN:
6518 case BUILT_IN_COS:
6519 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6520 break;
6521 case BUILT_IN_TANF:
6522 case BUILT_IN_COSF:
6523 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6524 break;
6525 case BUILT_IN_TANL:
6526 case BUILT_IN_COSL:
6527 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6528 break;
6529 default:
6530 sinfn = NULL_TREE;
6533 if (sinfn != NULL_TREE)
6534 return build_function_call_expr (sinfn,
6535 TREE_OPERAND (arg0, 1));
6538 /* Optimize x*pow(x,c) as pow(x,c+1). */
6539 if (fcode1 == BUILT_IN_POW
6540 || fcode1 == BUILT_IN_POWF
6541 || fcode1 == BUILT_IN_POWL)
6543 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6544 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6545 1)));
6546 if (TREE_CODE (arg11) == REAL_CST
6547 && ! TREE_CONSTANT_OVERFLOW (arg11)
6548 && operand_equal_p (arg0, arg10, 0))
6550 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6551 REAL_VALUE_TYPE c;
6552 tree arg, arglist;
6554 c = TREE_REAL_CST (arg11);
6555 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6556 arg = build_real (type, c);
6557 arglist = build_tree_list (NULL_TREE, arg);
6558 arglist = tree_cons (NULL_TREE, arg0, arglist);
6559 return build_function_call_expr (powfn, arglist);
6563 /* Optimize pow(x,c)*x as pow(x,c+1). */
6564 if (fcode0 == BUILT_IN_POW
6565 || fcode0 == BUILT_IN_POWF
6566 || fcode0 == BUILT_IN_POWL)
6568 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6569 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6570 1)));
6571 if (TREE_CODE (arg01) == REAL_CST
6572 && ! TREE_CONSTANT_OVERFLOW (arg01)
6573 && operand_equal_p (arg1, arg00, 0))
6575 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6576 REAL_VALUE_TYPE c;
6577 tree arg, arglist;
6579 c = TREE_REAL_CST (arg01);
6580 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6581 arg = build_real (type, c);
6582 arglist = build_tree_list (NULL_TREE, arg);
6583 arglist = tree_cons (NULL_TREE, arg1, arglist);
6584 return build_function_call_expr (powfn, arglist);
6588 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6589 if (! optimize_size
6590 && operand_equal_p (arg0, arg1, 0))
6592 tree powfn;
6594 if (type == double_type_node)
6595 powfn = implicit_built_in_decls[BUILT_IN_POW];
6596 else if (type == float_type_node)
6597 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6598 else if (type == long_double_type_node)
6599 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6600 else
6601 powfn = NULL_TREE;
6603 if (powfn)
6605 tree arg = build_real (type, dconst2);
6606 tree arglist = build_tree_list (NULL_TREE, arg);
6607 arglist = tree_cons (NULL_TREE, arg0, arglist);
6608 return build_function_call_expr (powfn, arglist);
6613 goto associate;
6615 case BIT_IOR_EXPR:
6616 bit_ior:
6617 if (integer_all_onesp (arg1))
6618 return omit_one_operand (type, arg1, arg0);
6619 if (integer_zerop (arg1))
6620 return non_lvalue (fold_convert (type, arg0));
6621 t1 = distribute_bit_expr (code, type, arg0, arg1);
6622 if (t1 != NULL_TREE)
6623 return t1;
6625 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6627 This results in more efficient code for machines without a NAND
6628 instruction. Combine will canonicalize to the first form
6629 which will allow use of NAND instructions provided by the
6630 backend if they exist. */
6631 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6632 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6634 return fold (build1 (BIT_NOT_EXPR, type,
6635 build (BIT_AND_EXPR, type,
6636 TREE_OPERAND (arg0, 0),
6637 TREE_OPERAND (arg1, 0))));
6640 /* See if this can be simplified into a rotate first. If that
6641 is unsuccessful continue in the association code. */
6642 goto bit_rotate;
6644 case BIT_XOR_EXPR:
6645 if (integer_zerop (arg1))
6646 return non_lvalue (fold_convert (type, arg0));
6647 if (integer_all_onesp (arg1))
6648 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6650 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6651 with a constant, and the two constants have no bits in common,
6652 we should treat this as a BIT_IOR_EXPR since this may produce more
6653 simplifications. */
6654 if (TREE_CODE (arg0) == BIT_AND_EXPR
6655 && TREE_CODE (arg1) == BIT_AND_EXPR
6656 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6657 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6658 && integer_zerop (const_binop (BIT_AND_EXPR,
6659 TREE_OPERAND (arg0, 1),
6660 TREE_OPERAND (arg1, 1), 0)))
6662 code = BIT_IOR_EXPR;
6663 goto bit_ior;
6666 /* See if this can be simplified into a rotate first. If that
6667 is unsuccessful continue in the association code. */
6668 goto bit_rotate;
6670 case BIT_AND_EXPR:
6671 if (integer_all_onesp (arg1))
6672 return non_lvalue (fold_convert (type, arg0));
6673 if (integer_zerop (arg1))
6674 return omit_one_operand (type, arg1, arg0);
6675 t1 = distribute_bit_expr (code, type, arg0, arg1);
6676 if (t1 != NULL_TREE)
6677 return t1;
6678 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6679 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6680 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6682 unsigned int prec
6683 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6685 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6686 && (~TREE_INT_CST_LOW (arg1)
6687 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6688 return fold_convert (type, TREE_OPERAND (arg0, 0));
6691 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6693 This results in more efficient code for machines without a NOR
6694 instruction. Combine will canonicalize to the first form
6695 which will allow use of NOR instructions provided by the
6696 backend if they exist. */
6697 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6698 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6700 return fold (build1 (BIT_NOT_EXPR, type,
6701 build (BIT_IOR_EXPR, type,
6702 TREE_OPERAND (arg0, 0),
6703 TREE_OPERAND (arg1, 0))));
6706 goto associate;
6708 case RDIV_EXPR:
6709 /* Don't touch a floating-point divide by zero unless the mode
6710 of the constant can represent infinity. */
6711 if (TREE_CODE (arg1) == REAL_CST
6712 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6713 && real_zerop (arg1))
6714 return t;
6716 /* (-A) / (-B) -> A / B */
6717 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6718 return fold (build (RDIV_EXPR, type,
6719 TREE_OPERAND (arg0, 0),
6720 negate_expr (arg1)));
6721 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6722 return fold (build (RDIV_EXPR, type,
6723 negate_expr (arg0),
6724 TREE_OPERAND (arg1, 0)));
6726 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6727 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6728 && real_onep (arg1))
6729 return non_lvalue (fold_convert (type, arg0));
6731 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6732 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6733 && real_minus_onep (arg1))
6734 return non_lvalue (fold_convert (type, negate_expr (arg0)));
6736 /* If ARG1 is a constant, we can convert this to a multiply by the
6737 reciprocal. This does not have the same rounding properties,
6738 so only do this if -funsafe-math-optimizations. We can actually
6739 always safely do it if ARG1 is a power of two, but it's hard to
6740 tell if it is or not in a portable manner. */
6741 if (TREE_CODE (arg1) == REAL_CST)
6743 if (flag_unsafe_math_optimizations
6744 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6745 arg1, 0)))
6746 return fold (build (MULT_EXPR, type, arg0, tem));
6747 /* Find the reciprocal if optimizing and the result is exact. */
6748 if (optimize)
6750 REAL_VALUE_TYPE r;
6751 r = TREE_REAL_CST (arg1);
6752 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6754 tem = build_real (type, r);
6755 return fold (build (MULT_EXPR, type, arg0, tem));
6759 /* Convert A/B/C to A/(B*C). */
6760 if (flag_unsafe_math_optimizations
6761 && TREE_CODE (arg0) == RDIV_EXPR)
6762 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6763 fold (build (MULT_EXPR, type,
6764 TREE_OPERAND (arg0, 1), arg1))));
6766 /* Convert A/(B/C) to (A/B)*C. */
6767 if (flag_unsafe_math_optimizations
6768 && TREE_CODE (arg1) == RDIV_EXPR)
6769 return fold (build (MULT_EXPR, type,
6770 fold (build (RDIV_EXPR, type, arg0,
6771 TREE_OPERAND (arg1, 0))),
6772 TREE_OPERAND (arg1, 1)));
6774 /* Convert C1/(X*C2) into (C1/C2)/X. */
6775 if (flag_unsafe_math_optimizations
6776 && TREE_CODE (arg1) == MULT_EXPR
6777 && TREE_CODE (arg0) == REAL_CST
6778 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6780 tree tem = const_binop (RDIV_EXPR, arg0,
6781 TREE_OPERAND (arg1, 1), 0);
6782 if (tem)
6783 return fold (build (RDIV_EXPR, type, tem,
6784 TREE_OPERAND (arg1, 0)));
6787 if (flag_unsafe_math_optimizations)
6789 enum built_in_function fcode = builtin_mathfn_code (arg1);
6790 /* Optimize x/expN(y) into x*expN(-y). */
6791 if (fcode == BUILT_IN_EXP
6792 || fcode == BUILT_IN_EXPF
6793 || fcode == BUILT_IN_EXPL
6794 || fcode == BUILT_IN_EXP2
6795 || fcode == BUILT_IN_EXP2F
6796 || fcode == BUILT_IN_EXP2L
6797 || fcode == BUILT_IN_EXP10
6798 || fcode == BUILT_IN_EXP10F
6799 || fcode == BUILT_IN_EXP10L
6800 || fcode == BUILT_IN_POW10
6801 || fcode == BUILT_IN_POW10F
6802 || fcode == BUILT_IN_POW10L)
6804 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6805 tree arg = build1 (NEGATE_EXPR, type,
6806 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6807 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6808 arg1 = build_function_call_expr (expfn, arglist);
6809 return fold (build (MULT_EXPR, type, arg0, arg1));
6812 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6813 if (fcode == BUILT_IN_POW
6814 || fcode == BUILT_IN_POWF
6815 || fcode == BUILT_IN_POWL)
6817 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6818 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6819 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6820 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6821 tree arglist = tree_cons(NULL_TREE, arg10,
6822 build_tree_list (NULL_TREE, neg11));
6823 arg1 = build_function_call_expr (powfn, arglist);
6824 return fold (build (MULT_EXPR, type, arg0, arg1));
6828 if (flag_unsafe_math_optimizations)
6830 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6831 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6833 /* Optimize sin(x)/cos(x) as tan(x). */
6834 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6835 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6836 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6837 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6838 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6840 tree tanfn;
6842 if (fcode0 == BUILT_IN_SIN)
6843 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6844 else if (fcode0 == BUILT_IN_SINF)
6845 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6846 else if (fcode0 == BUILT_IN_SINL)
6847 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6848 else
6849 tanfn = NULL_TREE;
6851 if (tanfn != NULL_TREE)
6852 return build_function_call_expr (tanfn,
6853 TREE_OPERAND (arg0, 1));
6856 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6857 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6858 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6859 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6860 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6861 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6863 tree tanfn;
6865 if (fcode0 == BUILT_IN_COS)
6866 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6867 else if (fcode0 == BUILT_IN_COSF)
6868 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6869 else if (fcode0 == BUILT_IN_COSL)
6870 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6871 else
6872 tanfn = NULL_TREE;
6874 if (tanfn != NULL_TREE)
6876 tree tmp = TREE_OPERAND (arg0, 1);
6877 tmp = build_function_call_expr (tanfn, tmp);
6878 return fold (build (RDIV_EXPR, type,
6879 build_real (type, dconst1),
6880 tmp));
6884 /* Optimize pow(x,c)/x as pow(x,c-1). */
6885 if (fcode0 == BUILT_IN_POW
6886 || fcode0 == BUILT_IN_POWF
6887 || fcode0 == BUILT_IN_POWL)
6889 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6890 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6891 if (TREE_CODE (arg01) == REAL_CST
6892 && ! TREE_CONSTANT_OVERFLOW (arg01)
6893 && operand_equal_p (arg1, arg00, 0))
6895 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6896 REAL_VALUE_TYPE c;
6897 tree arg, arglist;
6899 c = TREE_REAL_CST (arg01);
6900 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6901 arg = build_real (type, c);
6902 arglist = build_tree_list (NULL_TREE, arg);
6903 arglist = tree_cons (NULL_TREE, arg1, arglist);
6904 return build_function_call_expr (powfn, arglist);
6908 goto binary;
6910 case TRUNC_DIV_EXPR:
6911 case ROUND_DIV_EXPR:
6912 case FLOOR_DIV_EXPR:
6913 case CEIL_DIV_EXPR:
6914 case EXACT_DIV_EXPR:
6915 if (integer_onep (arg1))
6916 return non_lvalue (fold_convert (type, arg0));
6917 if (integer_zerop (arg1))
6918 return t;
6920 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6921 operation, EXACT_DIV_EXPR.
6923 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6924 At one time others generated faster code, it's not clear if they do
6925 after the last round to changes to the DIV code in expmed.c. */
6926 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6927 && multiple_of_p (type, arg0, arg1))
6928 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6930 if (TREE_CODE (arg1) == INTEGER_CST
6931 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6932 code, NULL_TREE)))
6933 return fold_convert (type, tem);
6935 goto binary;
6937 case CEIL_MOD_EXPR:
6938 case FLOOR_MOD_EXPR:
6939 case ROUND_MOD_EXPR:
6940 case TRUNC_MOD_EXPR:
6941 if (integer_onep (arg1))
6942 return omit_one_operand (type, integer_zero_node, arg0);
6943 if (integer_zerop (arg1))
6944 return t;
6946 if (TREE_CODE (arg1) == INTEGER_CST
6947 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6948 code, NULL_TREE)))
6949 return fold_convert (type, tem);
6951 goto binary;
6953 case LROTATE_EXPR:
6954 case RROTATE_EXPR:
6955 if (integer_all_onesp (arg0))
6956 return omit_one_operand (type, arg0, arg1);
6957 goto shift;
6959 case RSHIFT_EXPR:
6960 /* Optimize -1 >> x for arithmetic right shifts. */
6961 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6962 return omit_one_operand (type, arg0, arg1);
6963 /* ... fall through ... */
6965 case LSHIFT_EXPR:
6966 shift:
6967 if (integer_zerop (arg1))
6968 return non_lvalue (fold_convert (type, arg0));
6969 if (integer_zerop (arg0))
6970 return omit_one_operand (type, arg0, arg1);
6972 /* Since negative shift count is not well-defined,
6973 don't try to compute it in the compiler. */
6974 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6975 return t;
6976 /* Rewrite an LROTATE_EXPR by a constant into an
6977 RROTATE_EXPR by a new constant. */
6978 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6980 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
6981 tem = fold_convert (TREE_TYPE (arg1), tem);
6982 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
6983 return fold (build (RROTATE_EXPR, type, arg0, tem));
6986 /* If we have a rotate of a bit operation with the rotate count and
6987 the second operand of the bit operation both constant,
6988 permute the two operations. */
6989 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6990 && (TREE_CODE (arg0) == BIT_AND_EXPR
6991 || TREE_CODE (arg0) == BIT_IOR_EXPR
6992 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6993 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6994 return fold (build (TREE_CODE (arg0), type,
6995 fold (build (code, type,
6996 TREE_OPERAND (arg0, 0), arg1)),
6997 fold (build (code, type,
6998 TREE_OPERAND (arg0, 1), arg1))));
7000 /* Two consecutive rotates adding up to the width of the mode can
7001 be ignored. */
7002 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7003 && TREE_CODE (arg0) == RROTATE_EXPR
7004 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7005 && TREE_INT_CST_HIGH (arg1) == 0
7006 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7007 && ((TREE_INT_CST_LOW (arg1)
7008 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7009 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7010 return TREE_OPERAND (arg0, 0);
7012 goto binary;
7014 case MIN_EXPR:
7015 if (operand_equal_p (arg0, arg1, 0))
7016 return omit_one_operand (type, arg0, arg1);
7017 if (INTEGRAL_TYPE_P (type)
7018 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
7019 return omit_one_operand (type, arg1, arg0);
7020 goto associate;
7022 case MAX_EXPR:
7023 if (operand_equal_p (arg0, arg1, 0))
7024 return omit_one_operand (type, arg0, arg1);
7025 if (INTEGRAL_TYPE_P (type)
7026 && TYPE_MAX_VALUE (type)
7027 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
7028 return omit_one_operand (type, arg1, arg0);
7029 goto associate;
7031 case TRUTH_NOT_EXPR:
7032 /* Note that the operand of this must be an int
7033 and its values must be 0 or 1.
7034 ("true" is a fixed value perhaps depending on the language,
7035 but we don't handle values other than 1 correctly yet.) */
7036 tem = invert_truthvalue (arg0);
7037 /* Avoid infinite recursion. */
7038 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7040 tem = fold_single_bit_test (code, arg0, arg1, type);
7041 if (tem)
7042 return tem;
7043 return t;
7045 return fold_convert (type, tem);
7047 case TRUTH_ANDIF_EXPR:
7048 /* Note that the operands of this must be ints
7049 and their values must be 0 or 1.
7050 ("true" is a fixed value perhaps depending on the language.) */
7051 /* If first arg is constant zero, return it. */
7052 if (integer_zerop (arg0))
7053 return fold_convert (type, arg0);
7054 case TRUTH_AND_EXPR:
7055 /* If either arg is constant true, drop it. */
7056 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7057 return non_lvalue (fold_convert (type, arg1));
7058 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7059 /* Preserve sequence points. */
7060 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7061 return non_lvalue (fold_convert (type, arg0));
7062 /* If second arg is constant zero, result is zero, but first arg
7063 must be evaluated. */
7064 if (integer_zerop (arg1))
7065 return omit_one_operand (type, arg1, arg0);
7066 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7067 case will be handled here. */
7068 if (integer_zerop (arg0))
7069 return omit_one_operand (type, arg0, arg1);
7071 truth_andor:
7072 /* We only do these simplifications if we are optimizing. */
7073 if (!optimize)
7074 return t;
7076 /* Check for things like (A || B) && (A || C). We can convert this
7077 to A || (B && C). Note that either operator can be any of the four
7078 truth and/or operations and the transformation will still be
7079 valid. Also note that we only care about order for the
7080 ANDIF and ORIF operators. If B contains side effects, this
7081 might change the truth-value of A. */
7082 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7083 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7084 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7085 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7086 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7087 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7089 tree a00 = TREE_OPERAND (arg0, 0);
7090 tree a01 = TREE_OPERAND (arg0, 1);
7091 tree a10 = TREE_OPERAND (arg1, 0);
7092 tree a11 = TREE_OPERAND (arg1, 1);
7093 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7094 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7095 && (code == TRUTH_AND_EXPR
7096 || code == TRUTH_OR_EXPR));
7098 if (operand_equal_p (a00, a10, 0))
7099 return fold (build (TREE_CODE (arg0), type, a00,
7100 fold (build (code, type, a01, a11))));
7101 else if (commutative && operand_equal_p (a00, a11, 0))
7102 return fold (build (TREE_CODE (arg0), type, a00,
7103 fold (build (code, type, a01, a10))));
7104 else if (commutative && operand_equal_p (a01, a10, 0))
7105 return fold (build (TREE_CODE (arg0), type, a01,
7106 fold (build (code, type, a00, a11))));
7108 /* This case if tricky because we must either have commutative
7109 operators or else A10 must not have side-effects. */
7111 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7112 && operand_equal_p (a01, a11, 0))
7113 return fold (build (TREE_CODE (arg0), type,
7114 fold (build (code, type, a00, a10)),
7115 a01));
7118 /* See if we can build a range comparison. */
7119 if (0 != (tem = fold_range_test (t)))
7120 return tem;
7122 /* Check for the possibility of merging component references. If our
7123 lhs is another similar operation, try to merge its rhs with our
7124 rhs. Then try to merge our lhs and rhs. */
7125 if (TREE_CODE (arg0) == code
7126 && 0 != (tem = fold_truthop (code, type,
7127 TREE_OPERAND (arg0, 1), arg1)))
7128 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7130 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7131 return tem;
7133 return t;
7135 case TRUTH_ORIF_EXPR:
7136 /* Note that the operands of this must be ints
7137 and their values must be 0 or true.
7138 ("true" is a fixed value perhaps depending on the language.) */
7139 /* If first arg is constant true, return it. */
7140 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7141 return fold_convert (type, arg0);
7142 case TRUTH_OR_EXPR:
7143 /* If either arg is constant zero, drop it. */
7144 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7145 return non_lvalue (fold_convert (type, arg1));
7146 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7147 /* Preserve sequence points. */
7148 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7149 return non_lvalue (fold_convert (type, arg0));
7150 /* If second arg is constant true, result is true, but we must
7151 evaluate first arg. */
7152 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7153 return omit_one_operand (type, arg1, arg0);
7154 /* Likewise for first arg, but note this only occurs here for
7155 TRUTH_OR_EXPR. */
7156 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7157 return omit_one_operand (type, arg0, arg1);
7158 goto truth_andor;
7160 case TRUTH_XOR_EXPR:
7161 /* If either arg is constant zero, drop it. */
7162 if (integer_zerop (arg0))
7163 return non_lvalue (fold_convert (type, arg1));
7164 if (integer_zerop (arg1))
7165 return non_lvalue (fold_convert (type, arg0));
7166 /* If either arg is constant true, this is a logical inversion. */
7167 if (integer_onep (arg0))
7168 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7169 if (integer_onep (arg1))
7170 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7171 return t;
7173 case EQ_EXPR:
7174 case NE_EXPR:
7175 case LT_EXPR:
7176 case GT_EXPR:
7177 case LE_EXPR:
7178 case GE_EXPR:
7179 /* If one arg is a real or integer constant, put it last. */
7180 if (tree_swap_operands_p (arg0, arg1, true))
7181 return fold (build (swap_tree_comparison (code), type, arg1, arg0));
7183 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7185 tree targ0 = strip_float_extensions (arg0);
7186 tree targ1 = strip_float_extensions (arg1);
7187 tree newtype = TREE_TYPE (targ0);
7189 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7190 newtype = TREE_TYPE (targ1);
7192 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7193 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7194 return fold (build (code, type, fold_convert (newtype, targ0),
7195 fold_convert (newtype, targ1)));
7197 /* (-a) CMP (-b) -> b CMP a */
7198 if (TREE_CODE (arg0) == NEGATE_EXPR
7199 && TREE_CODE (arg1) == NEGATE_EXPR)
7200 return fold (build (code, type, TREE_OPERAND (arg1, 0),
7201 TREE_OPERAND (arg0, 0)));
7203 if (TREE_CODE (arg1) == REAL_CST)
7205 REAL_VALUE_TYPE cst;
7206 cst = TREE_REAL_CST (arg1);
7208 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7209 if (TREE_CODE (arg0) == NEGATE_EXPR)
7210 return
7211 fold (build (swap_tree_comparison (code), type,
7212 TREE_OPERAND (arg0, 0),
7213 build_real (TREE_TYPE (arg1),
7214 REAL_VALUE_NEGATE (cst))));
7216 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7217 /* a CMP (-0) -> a CMP 0 */
7218 if (REAL_VALUE_MINUS_ZERO (cst))
7219 return fold (build (code, type, arg0,
7220 build_real (TREE_TYPE (arg1), dconst0)));
7222 /* x != NaN is always true, other ops are always false. */
7223 if (REAL_VALUE_ISNAN (cst)
7224 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7226 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7227 return omit_one_operand (type, fold_convert (type, t), arg0);
7230 /* Fold comparisons against infinity. */
7231 if (REAL_VALUE_ISINF (cst))
7233 tem = fold_inf_compare (code, type, arg0, arg1);
7234 if (tem != NULL_TREE)
7235 return tem;
7239 /* If this is a comparison of a real constant with a PLUS_EXPR
7240 or a MINUS_EXPR of a real constant, we can convert it into a
7241 comparison with a revised real constant as long as no overflow
7242 occurs when unsafe_math_optimizations are enabled. */
7243 if (flag_unsafe_math_optimizations
7244 && TREE_CODE (arg1) == REAL_CST
7245 && (TREE_CODE (arg0) == PLUS_EXPR
7246 || TREE_CODE (arg0) == MINUS_EXPR)
7247 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7248 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7249 ? MINUS_EXPR : PLUS_EXPR,
7250 arg1, TREE_OPERAND (arg0, 1), 0))
7251 && ! TREE_CONSTANT_OVERFLOW (tem))
7252 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7254 /* Likewise, we can simplify a comparison of a real constant with
7255 a MINUS_EXPR whose first operand is also a real constant, i.e.
7256 (c1 - x) < c2 becomes x > c1-c2. */
7257 if (flag_unsafe_math_optimizations
7258 && TREE_CODE (arg1) == REAL_CST
7259 && TREE_CODE (arg0) == MINUS_EXPR
7260 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7261 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7262 arg1, 0))
7263 && ! TREE_CONSTANT_OVERFLOW (tem))
7264 return fold (build (swap_tree_comparison (code), type,
7265 TREE_OPERAND (arg0, 1), tem));
7267 /* Fold comparisons against built-in math functions. */
7268 if (TREE_CODE (arg1) == REAL_CST
7269 && flag_unsafe_math_optimizations
7270 && ! flag_errno_math)
7272 enum built_in_function fcode = builtin_mathfn_code (arg0);
7274 if (fcode != END_BUILTINS)
7276 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7277 if (tem != NULL_TREE)
7278 return tem;
7283 /* Convert foo++ == CONST into ++foo == CONST + INCR.
7284 First, see if one arg is constant; find the constant arg
7285 and the other one. */
7287 tree constop = 0, varop = NULL_TREE;
7288 int constopnum = -1;
7290 if (TREE_CONSTANT (arg1))
7291 constopnum = 1, constop = arg1, varop = arg0;
7292 if (TREE_CONSTANT (arg0))
7293 constopnum = 0, constop = arg0, varop = arg1;
7295 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
7297 /* This optimization is invalid for ordered comparisons
7298 if CONST+INCR overflows or if foo+incr might overflow.
7299 This optimization is invalid for floating point due to rounding.
7300 For pointer types we assume overflow doesn't happen. */
7301 if (POINTER_TYPE_P (TREE_TYPE (varop))
7302 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7303 && (code == EQ_EXPR || code == NE_EXPR)))
7305 tree newconst
7306 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
7307 constop, TREE_OPERAND (varop, 1)));
7309 /* Do not overwrite the current varop to be a preincrement,
7310 create a new node so that we won't confuse our caller who
7311 might create trees and throw them away, reusing the
7312 arguments that they passed to build. This shows up in
7313 the THEN or ELSE parts of ?: being postincrements. */
7314 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
7315 TREE_OPERAND (varop, 0),
7316 TREE_OPERAND (varop, 1));
7318 /* If VAROP is a reference to a bitfield, we must mask
7319 the constant by the width of the field. */
7320 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7321 && DECL_BIT_FIELD(TREE_OPERAND
7322 (TREE_OPERAND (varop, 0), 1)))
7324 int size
7325 = TREE_INT_CST_LOW (DECL_SIZE
7326 (TREE_OPERAND
7327 (TREE_OPERAND (varop, 0), 1)));
7328 tree mask, unsigned_type;
7329 unsigned int precision;
7330 tree folded_compare;
7332 /* First check whether the comparison would come out
7333 always the same. If we don't do that we would
7334 change the meaning with the masking. */
7335 if (constopnum == 0)
7336 folded_compare = fold (build (code, type, constop,
7337 TREE_OPERAND (varop, 0)));
7338 else
7339 folded_compare = fold (build (code, type,
7340 TREE_OPERAND (varop, 0),
7341 constop));
7342 if (integer_zerop (folded_compare)
7343 || integer_onep (folded_compare))
7344 return omit_one_operand (type, folded_compare, varop);
7346 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7347 precision = TYPE_PRECISION (unsigned_type);
7348 mask = build_int_2 (~0, ~0);
7349 TREE_TYPE (mask) = unsigned_type;
7350 force_fit_type (mask, 0);
7351 mask = const_binop (RSHIFT_EXPR, mask,
7352 size_int (precision - size), 0);
7353 newconst = fold (build (BIT_AND_EXPR,
7354 TREE_TYPE (varop), newconst,
7355 fold_convert (TREE_TYPE (varop),
7356 mask)));
7359 t = build (code, type,
7360 (constopnum == 0) ? newconst : varop,
7361 (constopnum == 1) ? newconst : varop);
7362 return t;
7365 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
7367 if (POINTER_TYPE_P (TREE_TYPE (varop))
7368 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7369 && (code == EQ_EXPR || code == NE_EXPR)))
7371 tree newconst
7372 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
7373 constop, TREE_OPERAND (varop, 1)));
7375 /* Do not overwrite the current varop to be a predecrement,
7376 create a new node so that we won't confuse our caller who
7377 might create trees and throw them away, reusing the
7378 arguments that they passed to build. This shows up in
7379 the THEN or ELSE parts of ?: being postdecrements. */
7380 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
7381 TREE_OPERAND (varop, 0),
7382 TREE_OPERAND (varop, 1));
7384 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7385 && DECL_BIT_FIELD(TREE_OPERAND
7386 (TREE_OPERAND (varop, 0), 1)))
7388 int size
7389 = TREE_INT_CST_LOW (DECL_SIZE
7390 (TREE_OPERAND
7391 (TREE_OPERAND (varop, 0), 1)));
7392 tree mask, unsigned_type;
7393 unsigned int precision;
7394 tree folded_compare;
7396 if (constopnum == 0)
7397 folded_compare = fold (build (code, type, constop,
7398 TREE_OPERAND (varop, 0)));
7399 else
7400 folded_compare = fold (build (code, type,
7401 TREE_OPERAND (varop, 0),
7402 constop));
7403 if (integer_zerop (folded_compare)
7404 || integer_onep (folded_compare))
7405 return omit_one_operand (type, folded_compare, varop);
7407 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7408 precision = TYPE_PRECISION (unsigned_type);
7409 mask = build_int_2 (~0, ~0);
7410 TREE_TYPE (mask) = TREE_TYPE (varop);
7411 force_fit_type (mask, 0);
7412 mask = const_binop (RSHIFT_EXPR, mask,
7413 size_int (precision - size), 0);
7414 newconst = fold (build (BIT_AND_EXPR,
7415 TREE_TYPE (varop), newconst,
7416 fold_convert (TREE_TYPE (varop),
7417 mask)));
7420 t = build (code, type,
7421 (constopnum == 0) ? newconst : varop,
7422 (constopnum == 1) ? newconst : varop);
7423 return t;
7428 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7429 This transformation affects the cases which are handled in later
7430 optimizations involving comparisons with non-negative constants. */
7431 if (TREE_CODE (arg1) == INTEGER_CST
7432 && TREE_CODE (arg0) != INTEGER_CST
7433 && tree_int_cst_sgn (arg1) > 0)
7435 switch (code)
7437 case GE_EXPR:
7438 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7439 return fold (build (GT_EXPR, type, arg0, arg1));
7441 case LT_EXPR:
7442 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7443 return fold (build (LE_EXPR, type, arg0, arg1));
7445 default:
7446 break;
7450 /* Comparisons with the highest or lowest possible integer of
7451 the specified size will have known values. */
7453 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7455 if (TREE_CODE (arg1) == INTEGER_CST
7456 && ! TREE_CONSTANT_OVERFLOW (arg1)
7457 && width <= HOST_BITS_PER_WIDE_INT
7458 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7459 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7461 unsigned HOST_WIDE_INT signed_max;
7462 unsigned HOST_WIDE_INT max, min;
7464 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7466 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7468 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7469 min = 0;
7471 else
7473 max = signed_max;
7474 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7477 if (TREE_INT_CST_HIGH (arg1) == 0
7478 && TREE_INT_CST_LOW (arg1) == max)
7479 switch (code)
7481 case GT_EXPR:
7482 return omit_one_operand (type,
7483 fold_convert (type,
7484 integer_zero_node),
7485 arg0);
7486 case GE_EXPR:
7487 return fold (build (EQ_EXPR, type, arg0, arg1));
7489 case LE_EXPR:
7490 return omit_one_operand (type,
7491 fold_convert (type,
7492 integer_one_node),
7493 arg0);
7494 case LT_EXPR:
7495 return fold (build (NE_EXPR, type, arg0, arg1));
7497 /* The GE_EXPR and LT_EXPR cases above are not normally
7498 reached because of previous transformations. */
7500 default:
7501 break;
7503 else if (TREE_INT_CST_HIGH (arg1) == 0
7504 && TREE_INT_CST_LOW (arg1) == max - 1)
7505 switch (code)
7507 case GT_EXPR:
7508 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7509 return fold (build (EQ_EXPR, type, arg0, arg1));
7510 case LE_EXPR:
7511 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7512 return fold (build (NE_EXPR, type, arg0, arg1));
7513 default:
7514 break;
7516 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7517 && TREE_INT_CST_LOW (arg1) == min)
7518 switch (code)
7520 case LT_EXPR:
7521 return omit_one_operand (type,
7522 fold_convert (type,
7523 integer_zero_node),
7524 arg0);
7525 case LE_EXPR:
7526 return fold (build (EQ_EXPR, type, arg0, arg1));
7528 case GE_EXPR:
7529 return omit_one_operand (type,
7530 fold_convert (type,
7531 integer_one_node),
7532 arg0);
7533 case GT_EXPR:
7534 return fold (build (NE_EXPR, type, arg0, arg1));
7536 default:
7537 break;
7539 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7540 && TREE_INT_CST_LOW (arg1) == min + 1)
7541 switch (code)
7543 case GE_EXPR:
7544 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7545 return fold (build (NE_EXPR, type, arg0, arg1));
7546 case LT_EXPR:
7547 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7548 return fold (build (EQ_EXPR, type, arg0, arg1));
7549 default:
7550 break;
7553 else if (TREE_INT_CST_HIGH (arg1) == 0
7554 && TREE_INT_CST_LOW (arg1) == signed_max
7555 && TREE_UNSIGNED (TREE_TYPE (arg1))
7556 /* signed_type does not work on pointer types. */
7557 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7559 /* The following case also applies to X < signed_max+1
7560 and X >= signed_max+1 because previous transformations. */
7561 if (code == LE_EXPR || code == GT_EXPR)
7563 tree st0, st1;
7564 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7565 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7566 return fold
7567 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7568 type, fold_convert (st0, arg0),
7569 fold_convert (st1, integer_zero_node)));
7575 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7576 a MINUS_EXPR of a constant, we can convert it into a comparison with
7577 a revised constant as long as no overflow occurs. */
7578 if ((code == EQ_EXPR || code == NE_EXPR)
7579 && TREE_CODE (arg1) == INTEGER_CST
7580 && (TREE_CODE (arg0) == PLUS_EXPR
7581 || TREE_CODE (arg0) == MINUS_EXPR)
7582 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7583 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7584 ? MINUS_EXPR : PLUS_EXPR,
7585 arg1, TREE_OPERAND (arg0, 1), 0))
7586 && ! TREE_CONSTANT_OVERFLOW (tem))
7587 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7589 /* Similarly for a NEGATE_EXPR. */
7590 else if ((code == EQ_EXPR || code == NE_EXPR)
7591 && TREE_CODE (arg0) == NEGATE_EXPR
7592 && TREE_CODE (arg1) == INTEGER_CST
7593 && 0 != (tem = negate_expr (arg1))
7594 && TREE_CODE (tem) == INTEGER_CST
7595 && ! TREE_CONSTANT_OVERFLOW (tem))
7596 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7598 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7599 for !=. Don't do this for ordered comparisons due to overflow. */
7600 else if ((code == NE_EXPR || code == EQ_EXPR)
7601 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7602 return fold (build (code, type,
7603 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7605 /* If we are widening one operand of an integer comparison,
7606 see if the other operand is similarly being widened. Perhaps we
7607 can do the comparison in the narrower type. */
7608 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7609 && TREE_CODE (arg0) == NOP_EXPR
7610 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7611 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7612 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7613 || (TREE_CODE (t1) == INTEGER_CST
7614 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7615 return fold (build (code, type, tem,
7616 fold_convert (TREE_TYPE (tem), t1)));
7618 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7619 constant, we can simplify it. */
7620 else if (TREE_CODE (arg1) == INTEGER_CST
7621 && (TREE_CODE (arg0) == MIN_EXPR
7622 || TREE_CODE (arg0) == MAX_EXPR)
7623 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7624 return optimize_minmax_comparison (t);
7626 /* If we are comparing an ABS_EXPR with a constant, we can
7627 convert all the cases into explicit comparisons, but they may
7628 well not be faster than doing the ABS and one comparison.
7629 But ABS (X) <= C is a range comparison, which becomes a subtraction
7630 and a comparison, and is probably faster. */
7631 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7632 && TREE_CODE (arg0) == ABS_EXPR
7633 && ! TREE_SIDE_EFFECTS (arg0)
7634 && (0 != (tem = negate_expr (arg1)))
7635 && TREE_CODE (tem) == INTEGER_CST
7636 && ! TREE_CONSTANT_OVERFLOW (tem))
7637 return fold (build (TRUTH_ANDIF_EXPR, type,
7638 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7639 build (LE_EXPR, type,
7640 TREE_OPERAND (arg0, 0), arg1)));
7642 /* If this is an EQ or NE comparison with zero and ARG0 is
7643 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7644 two operations, but the latter can be done in one less insn
7645 on machines that have only two-operand insns or on which a
7646 constant cannot be the first operand. */
7647 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7648 && TREE_CODE (arg0) == BIT_AND_EXPR)
7650 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7651 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7652 return
7653 fold (build (code, type,
7654 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7655 build (RSHIFT_EXPR,
7656 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7657 TREE_OPERAND (arg0, 1),
7658 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7659 fold_convert (TREE_TYPE (arg0),
7660 integer_one_node)),
7661 arg1));
7662 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7663 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7664 return
7665 fold (build (code, type,
7666 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7667 build (RSHIFT_EXPR,
7668 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7669 TREE_OPERAND (arg0, 0),
7670 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7671 fold_convert (TREE_TYPE (arg0),
7672 integer_one_node)),
7673 arg1));
7676 /* If this is an NE or EQ comparison of zero against the result of a
7677 signed MOD operation whose second operand is a power of 2, make
7678 the MOD operation unsigned since it is simpler and equivalent. */
7679 if ((code == NE_EXPR || code == EQ_EXPR)
7680 && integer_zerop (arg1)
7681 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7682 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7683 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7684 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7685 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7686 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7688 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7689 tree newmod = build (TREE_CODE (arg0), newtype,
7690 fold_convert (newtype,
7691 TREE_OPERAND (arg0, 0)),
7692 fold_convert (newtype,
7693 TREE_OPERAND (arg0, 1)));
7695 return build (code, type, newmod, fold_convert (newtype, arg1));
7698 /* If this is an NE comparison of zero with an AND of one, remove the
7699 comparison since the AND will give the correct value. */
7700 if (code == NE_EXPR && integer_zerop (arg1)
7701 && TREE_CODE (arg0) == BIT_AND_EXPR
7702 && integer_onep (TREE_OPERAND (arg0, 1)))
7703 return fold_convert (type, arg0);
7705 /* If we have (A & C) == C where C is a power of 2, convert this into
7706 (A & C) != 0. Similarly for NE_EXPR. */
7707 if ((code == EQ_EXPR || code == NE_EXPR)
7708 && TREE_CODE (arg0) == BIT_AND_EXPR
7709 && integer_pow2p (TREE_OPERAND (arg0, 1))
7710 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7711 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7712 arg0, integer_zero_node));
7714 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7715 2, then fold the expression into shifts and logical operations. */
7716 tem = fold_single_bit_test (code, arg0, arg1, type);
7717 if (tem)
7718 return tem;
7720 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7721 Similarly for NE_EXPR. */
7722 if ((code == EQ_EXPR || code == NE_EXPR)
7723 && TREE_CODE (arg0) == BIT_AND_EXPR
7724 && TREE_CODE (arg1) == INTEGER_CST
7725 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7727 tree dandnotc
7728 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7729 arg1, build1 (BIT_NOT_EXPR,
7730 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7731 TREE_OPERAND (arg0, 1))));
7732 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7733 if (integer_nonzerop (dandnotc))
7734 return omit_one_operand (type, rslt, arg0);
7737 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7738 Similarly for NE_EXPR. */
7739 if ((code == EQ_EXPR || code == NE_EXPR)
7740 && TREE_CODE (arg0) == BIT_IOR_EXPR
7741 && TREE_CODE (arg1) == INTEGER_CST
7742 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7744 tree candnotd
7745 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7746 TREE_OPERAND (arg0, 1),
7747 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7748 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7749 if (integer_nonzerop (candnotd))
7750 return omit_one_operand (type, rslt, arg0);
7753 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7754 and similarly for >= into !=. */
7755 if ((code == LT_EXPR || code == GE_EXPR)
7756 && TREE_UNSIGNED (TREE_TYPE (arg0))
7757 && TREE_CODE (arg1) == LSHIFT_EXPR
7758 && integer_onep (TREE_OPERAND (arg1, 0)))
7759 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7760 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7761 TREE_OPERAND (arg1, 1)),
7762 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7764 else if ((code == LT_EXPR || code == GE_EXPR)
7765 && TREE_UNSIGNED (TREE_TYPE (arg0))
7766 && (TREE_CODE (arg1) == NOP_EXPR
7767 || TREE_CODE (arg1) == CONVERT_EXPR)
7768 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7769 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7770 return
7771 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7772 fold_convert (TREE_TYPE (arg0),
7773 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7774 TREE_OPERAND (TREE_OPERAND (arg1, 0),
7775 1))),
7776 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7778 /* Simplify comparison of something with itself. (For IEEE
7779 floating-point, we can only do some of these simplifications.) */
7780 if (operand_equal_p (arg0, arg1, 0))
7782 switch (code)
7784 case EQ_EXPR:
7785 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7786 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7787 return constant_boolean_node (1, type);
7788 break;
7790 case GE_EXPR:
7791 case LE_EXPR:
7792 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7793 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7794 return constant_boolean_node (1, type);
7795 return fold (build (EQ_EXPR, type, arg0, arg1));
7797 case NE_EXPR:
7798 /* For NE, we can only do this simplification if integer
7799 or we don't honor IEEE floating point NaNs. */
7800 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7801 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7802 break;
7803 /* ... fall through ... */
7804 case GT_EXPR:
7805 case LT_EXPR:
7806 return constant_boolean_node (0, type);
7807 default:
7808 abort ();
7812 /* If we are comparing an expression that just has comparisons
7813 of two integer values, arithmetic expressions of those comparisons,
7814 and constants, we can simplify it. There are only three cases
7815 to check: the two values can either be equal, the first can be
7816 greater, or the second can be greater. Fold the expression for
7817 those three values. Since each value must be 0 or 1, we have
7818 eight possibilities, each of which corresponds to the constant 0
7819 or 1 or one of the six possible comparisons.
7821 This handles common cases like (a > b) == 0 but also handles
7822 expressions like ((x > y) - (y > x)) > 0, which supposedly
7823 occur in macroized code. */
7825 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7827 tree cval1 = 0, cval2 = 0;
7828 int save_p = 0;
7830 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7831 /* Don't handle degenerate cases here; they should already
7832 have been handled anyway. */
7833 && cval1 != 0 && cval2 != 0
7834 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7835 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7836 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7837 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7838 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7839 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7840 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7842 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7843 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7845 /* We can't just pass T to eval_subst in case cval1 or cval2
7846 was the same as ARG1. */
7848 tree high_result
7849 = fold (build (code, type,
7850 eval_subst (arg0, cval1, maxval, cval2, minval),
7851 arg1));
7852 tree equal_result
7853 = fold (build (code, type,
7854 eval_subst (arg0, cval1, maxval, cval2, maxval),
7855 arg1));
7856 tree low_result
7857 = fold (build (code, type,
7858 eval_subst (arg0, cval1, minval, cval2, maxval),
7859 arg1));
7861 /* All three of these results should be 0 or 1. Confirm they
7862 are. Then use those values to select the proper code
7863 to use. */
7865 if ((integer_zerop (high_result)
7866 || integer_onep (high_result))
7867 && (integer_zerop (equal_result)
7868 || integer_onep (equal_result))
7869 && (integer_zerop (low_result)
7870 || integer_onep (low_result)))
7872 /* Make a 3-bit mask with the high-order bit being the
7873 value for `>', the next for '=', and the low for '<'. */
7874 switch ((integer_onep (high_result) * 4)
7875 + (integer_onep (equal_result) * 2)
7876 + integer_onep (low_result))
7878 case 0:
7879 /* Always false. */
7880 return omit_one_operand (type, integer_zero_node, arg0);
7881 case 1:
7882 code = LT_EXPR;
7883 break;
7884 case 2:
7885 code = EQ_EXPR;
7886 break;
7887 case 3:
7888 code = LE_EXPR;
7889 break;
7890 case 4:
7891 code = GT_EXPR;
7892 break;
7893 case 5:
7894 code = NE_EXPR;
7895 break;
7896 case 6:
7897 code = GE_EXPR;
7898 break;
7899 case 7:
7900 /* Always true. */
7901 return omit_one_operand (type, integer_one_node, arg0);
7904 t = build (code, type, cval1, cval2);
7905 if (save_p)
7906 return save_expr (t);
7907 else
7908 return fold (t);
7913 /* If this is a comparison of a field, we may be able to simplify it. */
7914 if (((TREE_CODE (arg0) == COMPONENT_REF
7915 && (*lang_hooks.can_use_bit_fields_p) ())
7916 || TREE_CODE (arg0) == BIT_FIELD_REF)
7917 && (code == EQ_EXPR || code == NE_EXPR)
7918 /* Handle the constant case even without -O
7919 to make sure the warnings are given. */
7920 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7922 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7923 if (t1)
7924 return t1;
7927 /* If this is a comparison of complex values and either or both sides
7928 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7929 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7930 This may prevent needless evaluations. */
7931 if ((code == EQ_EXPR || code == NE_EXPR)
7932 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7933 && (TREE_CODE (arg0) == COMPLEX_EXPR
7934 || TREE_CODE (arg1) == COMPLEX_EXPR
7935 || TREE_CODE (arg0) == COMPLEX_CST
7936 || TREE_CODE (arg1) == COMPLEX_CST))
7938 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7939 tree real0, imag0, real1, imag1;
7941 arg0 = save_expr (arg0);
7942 arg1 = save_expr (arg1);
7943 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7944 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7945 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7946 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7948 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7949 : TRUTH_ORIF_EXPR),
7950 type,
7951 fold (build (code, type, real0, real1)),
7952 fold (build (code, type, imag0, imag1))));
7955 /* Optimize comparisons of strlen vs zero to a compare of the
7956 first character of the string vs zero. To wit,
7957 strlen(ptr) == 0 => *ptr == 0
7958 strlen(ptr) != 0 => *ptr != 0
7959 Other cases should reduce to one of these two (or a constant)
7960 due to the return value of strlen being unsigned. */
7961 if ((code == EQ_EXPR || code == NE_EXPR)
7962 && integer_zerop (arg1)
7963 && TREE_CODE (arg0) == CALL_EXPR)
7965 tree fndecl = get_callee_fndecl (arg0);
7966 tree arglist;
7968 if (fndecl
7969 && DECL_BUILT_IN (fndecl)
7970 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7971 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7972 && (arglist = TREE_OPERAND (arg0, 1))
7973 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7974 && ! TREE_CHAIN (arglist))
7975 return fold (build (code, type,
7976 build1 (INDIRECT_REF, char_type_node,
7977 TREE_VALUE(arglist)),
7978 integer_zero_node));
7981 /* From here on, the only cases we handle are when the result is
7982 known to be a constant.
7984 To compute GT, swap the arguments and do LT.
7985 To compute GE, do LT and invert the result.
7986 To compute LE, swap the arguments, do LT and invert the result.
7987 To compute NE, do EQ and invert the result.
7989 Therefore, the code below must handle only EQ and LT. */
7991 if (code == LE_EXPR || code == GT_EXPR)
7993 tem = arg0, arg0 = arg1, arg1 = tem;
7994 code = swap_tree_comparison (code);
7997 /* Note that it is safe to invert for real values here because we
7998 will check below in the one case that it matters. */
8000 t1 = NULL_TREE;
8001 invert = 0;
8002 if (code == NE_EXPR || code == GE_EXPR)
8004 invert = 1;
8005 code = invert_tree_comparison (code);
8008 /* Compute a result for LT or EQ if args permit;
8009 otherwise return T. */
8010 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8012 if (code == EQ_EXPR)
8013 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
8014 else
8015 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
8016 ? INT_CST_LT_UNSIGNED (arg0, arg1)
8017 : INT_CST_LT (arg0, arg1)),
8021 #if 0 /* This is no longer useful, but breaks some real code. */
8022 /* Assume a nonexplicit constant cannot equal an explicit one,
8023 since such code would be undefined anyway.
8024 Exception: on sysvr4, using #pragma weak,
8025 a label can come out as 0. */
8026 else if (TREE_CODE (arg1) == INTEGER_CST
8027 && !integer_zerop (arg1)
8028 && TREE_CONSTANT (arg0)
8029 && TREE_CODE (arg0) == ADDR_EXPR
8030 && code == EQ_EXPR)
8031 t1 = build_int_2 (0, 0);
8032 #endif
8033 /* Two real constants can be compared explicitly. */
8034 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8036 /* If either operand is a NaN, the result is false with two
8037 exceptions: First, an NE_EXPR is true on NaNs, but that case
8038 is already handled correctly since we will be inverting the
8039 result for NE_EXPR. Second, if we had inverted a LE_EXPR
8040 or a GE_EXPR into a LT_EXPR, we must return true so that it
8041 will be inverted into false. */
8043 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8044 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
8045 t1 = build_int_2 (invert && code == LT_EXPR, 0);
8047 else if (code == EQ_EXPR)
8048 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
8049 TREE_REAL_CST (arg1)),
8051 else
8052 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
8053 TREE_REAL_CST (arg1)),
8057 if (t1 == NULL_TREE)
8058 return t;
8060 if (invert)
8061 TREE_INT_CST_LOW (t1) ^= 1;
8063 TREE_TYPE (t1) = type;
8064 if (TREE_CODE (type) == BOOLEAN_TYPE)
8065 return (*lang_hooks.truthvalue_conversion) (t1);
8066 return t1;
8068 case COND_EXPR:
8069 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8070 so all simple results must be passed through pedantic_non_lvalue. */
8071 if (TREE_CODE (arg0) == INTEGER_CST)
8073 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8074 /* Only optimize constant conditions when the selected branch
8075 has the same type as the COND_EXPR. This avoids optimizing
8076 away "c ? x : throw", where the throw has a void type. */
8077 if (! VOID_TYPE_P (TREE_TYPE (tem))
8078 || VOID_TYPE_P (TREE_TYPE (t)))
8079 return pedantic_non_lvalue (tem);
8080 return t;
8082 if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
8083 return pedantic_omit_one_operand (type, arg1, arg0);
8085 /* If we have A op B ? A : C, we may be able to convert this to a
8086 simpler expression, depending on the operation and the values
8087 of B and C. Signed zeros prevent all of these transformations,
8088 for reasons given above each one. */
8090 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8091 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8092 arg1, TREE_OPERAND (arg0, 1))
8093 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8095 tree arg2 = TREE_OPERAND (t, 2);
8096 enum tree_code comp_code = TREE_CODE (arg0);
8098 STRIP_NOPS (arg2);
8100 /* If we have A op 0 ? A : -A, consider applying the following
8101 transformations:
8103 A == 0? A : -A same as -A
8104 A != 0? A : -A same as A
8105 A >= 0? A : -A same as abs (A)
8106 A > 0? A : -A same as abs (A)
8107 A <= 0? A : -A same as -abs (A)
8108 A < 0? A : -A same as -abs (A)
8110 None of these transformations work for modes with signed
8111 zeros. If A is +/-0, the first two transformations will
8112 change the sign of the result (from +0 to -0, or vice
8113 versa). The last four will fix the sign of the result,
8114 even though the original expressions could be positive or
8115 negative, depending on the sign of A.
8117 Note that all these transformations are correct if A is
8118 NaN, since the two alternatives (A and -A) are also NaNs. */
8119 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
8120 ? real_zerop (TREE_OPERAND (arg0, 1))
8121 : integer_zerop (TREE_OPERAND (arg0, 1)))
8122 && TREE_CODE (arg2) == NEGATE_EXPR
8123 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
8124 switch (comp_code)
8126 case EQ_EXPR:
8127 tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
8128 tem = fold_convert (type, negate_expr (tem));
8129 return pedantic_non_lvalue (tem);
8130 case NE_EXPR:
8131 return pedantic_non_lvalue (fold_convert (type, arg1));
8132 case GE_EXPR:
8133 case GT_EXPR:
8134 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8135 arg1 = fold_convert ((*lang_hooks.types.signed_type)
8136 (TREE_TYPE (arg1)), arg1);
8137 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8138 return pedantic_non_lvalue (fold_convert (type, arg1));
8139 case LE_EXPR:
8140 case LT_EXPR:
8141 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8142 arg1 = fold_convert ((lang_hooks.types.signed_type)
8143 (TREE_TYPE (arg1)), arg1);
8144 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8145 arg1 = negate_expr (fold_convert (type, arg1));
8146 return pedantic_non_lvalue (arg1);
8147 default:
8148 abort ();
8151 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
8152 A == 0 ? A : 0 is always 0 unless A is -0. Note that
8153 both transformations are correct when A is NaN: A != 0
8154 is then true, and A == 0 is false. */
8156 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
8158 if (comp_code == NE_EXPR)
8159 return pedantic_non_lvalue (fold_convert (type, arg1));
8160 else if (comp_code == EQ_EXPR)
8161 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8164 /* Try some transformations of A op B ? A : B.
8166 A == B? A : B same as B
8167 A != B? A : B same as A
8168 A >= B? A : B same as max (A, B)
8169 A > B? A : B same as max (B, A)
8170 A <= B? A : B same as min (A, B)
8171 A < B? A : B same as min (B, A)
8173 As above, these transformations don't work in the presence
8174 of signed zeros. For example, if A and B are zeros of
8175 opposite sign, the first two transformations will change
8176 the sign of the result. In the last four, the original
8177 expressions give different results for (A=+0, B=-0) and
8178 (A=-0, B=+0), but the transformed expressions do not.
8180 The first two transformations are correct if either A or B
8181 is a NaN. In the first transformation, the condition will
8182 be false, and B will indeed be chosen. In the case of the
8183 second transformation, the condition A != B will be true,
8184 and A will be chosen.
8186 The conversions to max() and min() are not correct if B is
8187 a number and A is not. The conditions in the original
8188 expressions will be false, so all four give B. The min()
8189 and max() versions would give a NaN instead. */
8190 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8191 arg2, TREE_OPERAND (arg0, 0)))
8193 tree comp_op0 = TREE_OPERAND (arg0, 0);
8194 tree comp_op1 = TREE_OPERAND (arg0, 1);
8195 tree comp_type = TREE_TYPE (comp_op0);
8197 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8198 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8200 comp_type = type;
8201 comp_op0 = arg1;
8202 comp_op1 = arg2;
8205 switch (comp_code)
8207 case EQ_EXPR:
8208 return pedantic_non_lvalue (fold_convert (type, arg2));
8209 case NE_EXPR:
8210 return pedantic_non_lvalue (fold_convert (type, arg1));
8211 case LE_EXPR:
8212 case LT_EXPR:
8213 /* In C++ a ?: expression can be an lvalue, so put the
8214 operand which will be used if they are equal first
8215 so that we can convert this back to the
8216 corresponding COND_EXPR. */
8217 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8218 return pedantic_non_lvalue (fold_convert
8219 (type, fold (build (MIN_EXPR, comp_type,
8220 (comp_code == LE_EXPR
8221 ? comp_op0 : comp_op1),
8222 (comp_code == LE_EXPR
8223 ? comp_op1 : comp_op0)))));
8224 break;
8225 case GE_EXPR:
8226 case GT_EXPR:
8227 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8228 return pedantic_non_lvalue (fold_convert
8229 (type, fold (build (MAX_EXPR, comp_type,
8230 (comp_code == GE_EXPR
8231 ? comp_op0 : comp_op1),
8232 (comp_code == GE_EXPR
8233 ? comp_op1 : comp_op0)))));
8234 break;
8235 default:
8236 abort ();
8240 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8241 we might still be able to simplify this. For example,
8242 if C1 is one less or one more than C2, this might have started
8243 out as a MIN or MAX and been transformed by this function.
8244 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8246 if (INTEGRAL_TYPE_P (type)
8247 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8248 && TREE_CODE (arg2) == INTEGER_CST)
8249 switch (comp_code)
8251 case EQ_EXPR:
8252 /* We can replace A with C1 in this case. */
8253 arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8254 return fold (build (code, type, TREE_OPERAND (t, 0), arg1,
8255 TREE_OPERAND (t, 2)));
8257 case LT_EXPR:
8258 /* If C1 is C2 + 1, this is min(A, C2). */
8259 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8260 && operand_equal_p (TREE_OPERAND (arg0, 1),
8261 const_binop (PLUS_EXPR, arg2,
8262 integer_one_node, 0), 1))
8263 return pedantic_non_lvalue
8264 (fold (build (MIN_EXPR, type, arg1, arg2)));
8265 break;
8267 case LE_EXPR:
8268 /* If C1 is C2 - 1, this is min(A, C2). */
8269 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8270 && operand_equal_p (TREE_OPERAND (arg0, 1),
8271 const_binop (MINUS_EXPR, arg2,
8272 integer_one_node, 0), 1))
8273 return pedantic_non_lvalue
8274 (fold (build (MIN_EXPR, type, arg1, arg2)));
8275 break;
8277 case GT_EXPR:
8278 /* If C1 is C2 - 1, this is max(A, C2). */
8279 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8280 && operand_equal_p (TREE_OPERAND (arg0, 1),
8281 const_binop (MINUS_EXPR, arg2,
8282 integer_one_node, 0), 1))
8283 return pedantic_non_lvalue
8284 (fold (build (MAX_EXPR, type, arg1, arg2)));
8285 break;
8287 case GE_EXPR:
8288 /* If C1 is C2 + 1, this is max(A, C2). */
8289 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8290 && operand_equal_p (TREE_OPERAND (arg0, 1),
8291 const_binop (PLUS_EXPR, arg2,
8292 integer_one_node, 0), 1))
8293 return pedantic_non_lvalue
8294 (fold (build (MAX_EXPR, type, arg1, arg2)));
8295 break;
8296 case NE_EXPR:
8297 break;
8298 default:
8299 abort ();
8303 /* If the second operand is simpler than the third, swap them
8304 since that produces better jump optimization results. */
8305 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8306 TREE_OPERAND (t, 2), false))
8308 /* See if this can be inverted. If it can't, possibly because
8309 it was a floating-point inequality comparison, don't do
8310 anything. */
8311 tem = invert_truthvalue (arg0);
8313 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8314 return fold (build (code, type, tem,
8315 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8318 /* Convert A ? 1 : 0 to simply A. */
8319 if (integer_onep (TREE_OPERAND (t, 1))
8320 && integer_zerop (TREE_OPERAND (t, 2))
8321 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8322 call to fold will try to move the conversion inside
8323 a COND, which will recurse. In that case, the COND_EXPR
8324 is probably the best choice, so leave it alone. */
8325 && type == TREE_TYPE (arg0))
8326 return pedantic_non_lvalue (arg0);
8328 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8329 over COND_EXPR in cases such as floating point comparisons. */
8330 if (integer_zerop (TREE_OPERAND (t, 1))
8331 && integer_onep (TREE_OPERAND (t, 2))
8332 && truth_value_p (TREE_CODE (arg0)))
8333 return pedantic_non_lvalue (fold_convert (type,
8334 invert_truthvalue (arg0)));
8336 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8337 operation is simply A & 2. */
8339 if (integer_zerop (TREE_OPERAND (t, 2))
8340 && TREE_CODE (arg0) == NE_EXPR
8341 && integer_zerop (TREE_OPERAND (arg0, 1))
8342 && integer_pow2p (arg1)
8343 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8344 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8345 arg1, 1))
8346 return pedantic_non_lvalue (fold_convert (type,
8347 TREE_OPERAND (arg0, 0)));
8349 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8350 if (integer_zerop (TREE_OPERAND (t, 2))
8351 && truth_value_p (TREE_CODE (arg0))
8352 && truth_value_p (TREE_CODE (arg1)))
8353 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8354 arg0, arg1)));
8356 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8357 if (integer_onep (TREE_OPERAND (t, 2))
8358 && truth_value_p (TREE_CODE (arg0))
8359 && truth_value_p (TREE_CODE (arg1)))
8361 /* Only perform transformation if ARG0 is easily inverted. */
8362 tem = invert_truthvalue (arg0);
8363 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8364 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8365 tem, arg1)));
8368 return t;
8370 case COMPOUND_EXPR:
8371 /* When pedantic, a compound expression can be neither an lvalue
8372 nor an integer constant expression. */
8373 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8374 return t;
8375 /* Don't let (0, 0) be null pointer constant. */
8376 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8377 : fold_convert (type, arg1);
8378 return pedantic_non_lvalue (tem);
8380 case COMPLEX_EXPR:
8381 if (wins)
8382 return build_complex (type, arg0, arg1);
8383 return t;
8385 case REALPART_EXPR:
8386 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8387 return t;
8388 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8389 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8390 TREE_OPERAND (arg0, 1));
8391 else if (TREE_CODE (arg0) == COMPLEX_CST)
8392 return TREE_REALPART (arg0);
8393 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8394 return fold (build (TREE_CODE (arg0), type,
8395 fold (build1 (REALPART_EXPR, type,
8396 TREE_OPERAND (arg0, 0))),
8397 fold (build1 (REALPART_EXPR,
8398 type, TREE_OPERAND (arg0, 1)))));
8399 return t;
8401 case IMAGPART_EXPR:
8402 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8403 return fold_convert (type, integer_zero_node);
8404 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8405 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8406 TREE_OPERAND (arg0, 0));
8407 else if (TREE_CODE (arg0) == COMPLEX_CST)
8408 return TREE_IMAGPART (arg0);
8409 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8410 return fold (build (TREE_CODE (arg0), type,
8411 fold (build1 (IMAGPART_EXPR, type,
8412 TREE_OPERAND (arg0, 0))),
8413 fold (build1 (IMAGPART_EXPR, type,
8414 TREE_OPERAND (arg0, 1)))));
8415 return t;
8417 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8418 appropriate. */
8419 case CLEANUP_POINT_EXPR:
8420 if (! has_cleanups (arg0))
8421 return TREE_OPERAND (t, 0);
8424 enum tree_code code0 = TREE_CODE (arg0);
8425 int kind0 = TREE_CODE_CLASS (code0);
8426 tree arg00 = TREE_OPERAND (arg0, 0);
8427 tree arg01;
8429 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8430 return fold (build1 (code0, type,
8431 fold (build1 (CLEANUP_POINT_EXPR,
8432 TREE_TYPE (arg00), arg00))));
8434 if (kind0 == '<' || kind0 == '2'
8435 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8436 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8437 || code0 == TRUTH_XOR_EXPR)
8439 arg01 = TREE_OPERAND (arg0, 1);
8441 if (TREE_CONSTANT (arg00)
8442 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8443 && ! has_cleanups (arg00)))
8444 return fold (build (code0, type, arg00,
8445 fold (build1 (CLEANUP_POINT_EXPR,
8446 TREE_TYPE (arg01), arg01))));
8448 if (TREE_CONSTANT (arg01))
8449 return fold (build (code0, type,
8450 fold (build1 (CLEANUP_POINT_EXPR,
8451 TREE_TYPE (arg00), arg00)),
8452 arg01));
8455 return t;
8458 case CALL_EXPR:
8459 /* Check for a built-in function. */
8460 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8461 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8462 == FUNCTION_DECL)
8463 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8465 tree tmp = fold_builtin (expr);
8466 if (tmp)
8467 return tmp;
8469 return t;
8471 default:
8472 return t;
8473 } /* switch (code) */
8476 #ifdef ENABLE_FOLD_CHECKING
8477 #undef fold
8479 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8480 static void fold_check_failed (tree, tree);
8481 void print_fold_checksum (tree);
8483 /* When --enable-checking=fold, compute a digest of expr before
8484 and after actual fold call to see if fold did not accidentally
8485 change original expr. */
8487 tree
8488 fold (tree expr)
8490 tree ret;
8491 struct md5_ctx ctx;
8492 unsigned char checksum_before[16], checksum_after[16];
8493 htab_t ht;
8495 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8496 md5_init_ctx (&ctx);
8497 fold_checksum_tree (expr, &ctx, ht);
8498 md5_finish_ctx (&ctx, checksum_before);
8499 htab_empty (ht);
8501 ret = fold_1 (expr);
8503 md5_init_ctx (&ctx);
8504 fold_checksum_tree (expr, &ctx, ht);
8505 md5_finish_ctx (&ctx, checksum_after);
8506 htab_delete (ht);
8508 if (memcmp (checksum_before, checksum_after, 16))
8509 fold_check_failed (expr, ret);
8511 return ret;
8514 void
8515 print_fold_checksum (tree expr)
8517 struct md5_ctx ctx;
8518 unsigned char checksum[16], cnt;
8519 htab_t ht;
8521 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8522 md5_init_ctx (&ctx);
8523 fold_checksum_tree (expr, &ctx, ht);
8524 md5_finish_ctx (&ctx, checksum);
8525 htab_delete (ht);
8526 for (cnt = 0; cnt < 16; ++cnt)
8527 fprintf (stderr, "%02x", checksum[cnt]);
8528 putc ('\n', stderr);
8531 static void
8532 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8534 internal_error ("fold check: original tree changed by fold");
8537 static void
8538 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8540 void **slot;
8541 enum tree_code code;
8542 char buf[sizeof (struct tree_decl)];
8543 int i, len;
8545 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8546 > sizeof (struct tree_decl)
8547 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8548 abort ();
8549 if (expr == NULL)
8550 return;
8551 slot = htab_find_slot (ht, expr, INSERT);
8552 if (*slot != NULL)
8553 return;
8554 *slot = expr;
8555 code = TREE_CODE (expr);
8556 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8558 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8559 memcpy (buf, expr, tree_size (expr));
8560 expr = (tree) buf;
8561 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8563 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8565 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8566 memcpy (buf, expr, tree_size (expr));
8567 expr = (tree) buf;
8568 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8570 else if (TREE_CODE_CLASS (code) == 't'
8571 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8573 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8574 memcpy (buf, expr, tree_size (expr));
8575 expr = (tree) buf;
8576 TYPE_POINTER_TO (expr) = NULL;
8577 TYPE_REFERENCE_TO (expr) = NULL;
8579 md5_process_bytes (expr, tree_size (expr), ctx);
8580 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8581 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8582 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8583 len = TREE_CODE_LENGTH (code);
8584 switch (TREE_CODE_CLASS (code))
8586 case 'c':
8587 switch (code)
8589 case STRING_CST:
8590 md5_process_bytes (TREE_STRING_POINTER (expr),
8591 TREE_STRING_LENGTH (expr), ctx);
8592 break;
8593 case COMPLEX_CST:
8594 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8595 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8596 break;
8597 case VECTOR_CST:
8598 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8599 break;
8600 default:
8601 break;
8603 break;
8604 case 'x':
8605 switch (code)
8607 case TREE_LIST:
8608 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8609 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8610 break;
8611 case TREE_VEC:
8612 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8613 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8614 break;
8615 default:
8616 break;
8618 break;
8619 case 'e':
8620 switch (code)
8622 case SAVE_EXPR: len = 2; break;
8623 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8624 case RTL_EXPR: len = 0; break;
8625 case WITH_CLEANUP_EXPR: len = 2; break;
8626 default: break;
8628 /* Fall through. */
8629 case 'r':
8630 case '<':
8631 case '1':
8632 case '2':
8633 case 's':
8634 for (i = 0; i < len; ++i)
8635 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8636 break;
8637 case 'd':
8638 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8639 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8640 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8641 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8642 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8643 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8644 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8645 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8646 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8647 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8648 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8649 break;
8650 case 't':
8651 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8652 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8653 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8654 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8655 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8656 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8657 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8658 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8659 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8660 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8661 break;
8662 default:
8663 break;
8667 #endif
8669 /* Perform constant folding and related simplification of initializer
8670 expression EXPR. This behaves identically to "fold" but ignores
8671 potential run-time traps and exceptions that fold must preserve. */
8673 tree
8674 fold_initializer (tree expr)
8676 int saved_signaling_nans = flag_signaling_nans;
8677 int saved_trapping_math = flag_trapping_math;
8678 int saved_trapv = flag_trapv;
8679 tree result;
8681 flag_signaling_nans = 0;
8682 flag_trapping_math = 0;
8683 flag_trapv = 0;
8685 result = fold (expr);
8687 flag_signaling_nans = saved_signaling_nans;
8688 flag_trapping_math = saved_trapping_math;
8689 flag_trapv = saved_trapv;
8691 return result;
8694 /* Determine if first argument is a multiple of second argument. Return 0 if
8695 it is not, or we cannot easily determined it to be.
8697 An example of the sort of thing we care about (at this point; this routine
8698 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8699 fold cases do now) is discovering that
8701 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8703 is a multiple of
8705 SAVE_EXPR (J * 8)
8707 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8709 This code also handles discovering that
8711 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8713 is a multiple of 8 so we don't have to worry about dealing with a
8714 possible remainder.
8716 Note that we *look* inside a SAVE_EXPR only to determine how it was
8717 calculated; it is not safe for fold to do much of anything else with the
8718 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8719 at run time. For example, the latter example above *cannot* be implemented
8720 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8721 evaluation time of the original SAVE_EXPR is not necessarily the same at
8722 the time the new expression is evaluated. The only optimization of this
8723 sort that would be valid is changing
8725 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8727 divided by 8 to
8729 SAVE_EXPR (I) * SAVE_EXPR (J)
8731 (where the same SAVE_EXPR (J) is used in the original and the
8732 transformed version). */
8734 static int
8735 multiple_of_p (tree type, tree top, tree bottom)
8737 if (operand_equal_p (top, bottom, 0))
8738 return 1;
8740 if (TREE_CODE (type) != INTEGER_TYPE)
8741 return 0;
8743 switch (TREE_CODE (top))
8745 case MULT_EXPR:
8746 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8747 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8749 case PLUS_EXPR:
8750 case MINUS_EXPR:
8751 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8752 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8754 case LSHIFT_EXPR:
8755 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8757 tree op1, t1;
8759 op1 = TREE_OPERAND (top, 1);
8760 /* const_binop may not detect overflow correctly,
8761 so check for it explicitly here. */
8762 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8763 > TREE_INT_CST_LOW (op1)
8764 && TREE_INT_CST_HIGH (op1) == 0
8765 && 0 != (t1 = fold_convert (type,
8766 const_binop (LSHIFT_EXPR,
8767 size_one_node,
8768 op1, 0)))
8769 && ! TREE_OVERFLOW (t1))
8770 return multiple_of_p (type, t1, bottom);
8772 return 0;
8774 case NOP_EXPR:
8775 /* Can't handle conversions from non-integral or wider integral type. */
8776 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8777 || (TYPE_PRECISION (type)
8778 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8779 return 0;
8781 /* .. fall through ... */
8783 case SAVE_EXPR:
8784 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8786 case INTEGER_CST:
8787 if (TREE_CODE (bottom) != INTEGER_CST
8788 || (TREE_UNSIGNED (type)
8789 && (tree_int_cst_sgn (top) < 0
8790 || tree_int_cst_sgn (bottom) < 0)))
8791 return 0;
8792 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8793 top, bottom, 0));
8795 default:
8796 return 0;
8800 /* Return true if `t' is known to be non-negative. */
8803 tree_expr_nonnegative_p (tree t)
8805 switch (TREE_CODE (t))
8807 case ABS_EXPR:
8808 return 1;
8810 case INTEGER_CST:
8811 return tree_int_cst_sgn (t) >= 0;
8813 case REAL_CST:
8814 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8816 case PLUS_EXPR:
8817 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8818 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8819 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8821 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8822 both unsigned and at least 2 bits shorter than the result. */
8823 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8824 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8825 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8827 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8828 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8829 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8830 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8832 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8833 TYPE_PRECISION (inner2)) + 1;
8834 return prec < TYPE_PRECISION (TREE_TYPE (t));
8837 break;
8839 case MULT_EXPR:
8840 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8842 /* x * x for floating point x is always non-negative. */
8843 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8844 return 1;
8845 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8846 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8849 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8850 both unsigned and their total bits is shorter than the result. */
8851 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8852 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8853 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8855 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8856 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8857 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8858 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8859 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8860 < TYPE_PRECISION (TREE_TYPE (t));
8862 return 0;
8864 case TRUNC_DIV_EXPR:
8865 case CEIL_DIV_EXPR:
8866 case FLOOR_DIV_EXPR:
8867 case ROUND_DIV_EXPR:
8868 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8869 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8871 case TRUNC_MOD_EXPR:
8872 case CEIL_MOD_EXPR:
8873 case FLOOR_MOD_EXPR:
8874 case ROUND_MOD_EXPR:
8875 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8877 case RDIV_EXPR:
8878 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8879 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8881 case NOP_EXPR:
8883 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8884 tree outer_type = TREE_TYPE (t);
8886 if (TREE_CODE (outer_type) == REAL_TYPE)
8888 if (TREE_CODE (inner_type) == REAL_TYPE)
8889 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8890 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8892 if (TREE_UNSIGNED (inner_type))
8893 return 1;
8894 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8897 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8899 if (TREE_CODE (inner_type) == REAL_TYPE)
8900 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8901 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8902 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8903 && TREE_UNSIGNED (inner_type);
8906 break;
8908 case COND_EXPR:
8909 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8910 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8911 case COMPOUND_EXPR:
8912 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8913 case MIN_EXPR:
8914 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8915 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8916 case MAX_EXPR:
8917 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8918 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8919 case MODIFY_EXPR:
8920 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8921 case BIND_EXPR:
8922 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8923 case SAVE_EXPR:
8924 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8925 case NON_LVALUE_EXPR:
8926 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8927 case FLOAT_EXPR:
8928 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8929 case RTL_EXPR:
8930 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8932 case CALL_EXPR:
8934 tree fndecl = get_callee_fndecl (t);
8935 tree arglist = TREE_OPERAND (t, 1);
8936 if (fndecl
8937 && DECL_BUILT_IN (fndecl)
8938 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8939 switch (DECL_FUNCTION_CODE (fndecl))
8941 case BUILT_IN_CABS:
8942 case BUILT_IN_CABSL:
8943 case BUILT_IN_CABSF:
8944 case BUILT_IN_EXP:
8945 case BUILT_IN_EXPF:
8946 case BUILT_IN_EXPL:
8947 case BUILT_IN_EXP2:
8948 case BUILT_IN_EXP2F:
8949 case BUILT_IN_EXP2L:
8950 case BUILT_IN_EXP10:
8951 case BUILT_IN_EXP10F:
8952 case BUILT_IN_EXP10L:
8953 case BUILT_IN_FABS:
8954 case BUILT_IN_FABSF:
8955 case BUILT_IN_FABSL:
8956 case BUILT_IN_FFS:
8957 case BUILT_IN_FFSL:
8958 case BUILT_IN_FFSLL:
8959 case BUILT_IN_PARITY:
8960 case BUILT_IN_PARITYL:
8961 case BUILT_IN_PARITYLL:
8962 case BUILT_IN_POPCOUNT:
8963 case BUILT_IN_POPCOUNTL:
8964 case BUILT_IN_POPCOUNTLL:
8965 case BUILT_IN_POW10:
8966 case BUILT_IN_POW10F:
8967 case BUILT_IN_POW10L:
8968 case BUILT_IN_SQRT:
8969 case BUILT_IN_SQRTF:
8970 case BUILT_IN_SQRTL:
8971 return 1;
8973 case BUILT_IN_ATAN:
8974 case BUILT_IN_ATANF:
8975 case BUILT_IN_ATANL:
8976 case BUILT_IN_CEIL:
8977 case BUILT_IN_CEILF:
8978 case BUILT_IN_CEILL:
8979 case BUILT_IN_FLOOR:
8980 case BUILT_IN_FLOORF:
8981 case BUILT_IN_FLOORL:
8982 case BUILT_IN_NEARBYINT:
8983 case BUILT_IN_NEARBYINTF:
8984 case BUILT_IN_NEARBYINTL:
8985 case BUILT_IN_ROUND:
8986 case BUILT_IN_ROUNDF:
8987 case BUILT_IN_ROUNDL:
8988 case BUILT_IN_TRUNC:
8989 case BUILT_IN_TRUNCF:
8990 case BUILT_IN_TRUNCL:
8991 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8993 case BUILT_IN_POW:
8994 case BUILT_IN_POWF:
8995 case BUILT_IN_POWL:
8996 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8998 default:
8999 break;
9003 /* ... fall through ... */
9005 default:
9006 if (truth_value_p (TREE_CODE (t)))
9007 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9008 return 1;
9011 /* We don't know sign of `t', so be conservative and return false. */
9012 return 0;
9015 /* Return true if `r' is known to be non-negative.
9016 Only handles constants at the moment. */
9019 rtl_expr_nonnegative_p (rtx r)
9021 switch (GET_CODE (r))
9023 case CONST_INT:
9024 return INTVAL (r) >= 0;
9026 case CONST_DOUBLE:
9027 if (GET_MODE (r) == VOIDmode)
9028 return CONST_DOUBLE_HIGH (r) >= 0;
9029 return 0;
9031 case CONST_VECTOR:
9033 int units, i;
9034 rtx elt;
9036 units = CONST_VECTOR_NUNITS (r);
9038 for (i = 0; i < units; ++i)
9040 elt = CONST_VECTOR_ELT (r, i);
9041 if (!rtl_expr_nonnegative_p (elt))
9042 return 0;
9045 return 1;
9048 case SYMBOL_REF:
9049 case LABEL_REF:
9050 /* These are always nonnegative. */
9051 return 1;
9053 default:
9054 return 0;
9058 #include "gt-fold-const.h"