* builtins.c (fold_builtin_cabs, fold_builtin): Use
[official-gcc.git] / gcc / fold-const.c
blobd67944a735c4f2d1aa09f9e6d593d1d5ef659328
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_mathfn_p (enum built_in_function);
64 static bool negate_expr_p (tree);
65 static tree negate_expr (tree);
66 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
67 static tree associate_trees (tree, tree, enum tree_code, tree);
68 static tree int_const_binop (enum tree_code, tree, tree, int);
69 static tree const_binop (enum tree_code, tree, tree, int);
70 static hashval_t size_htab_hash (const void *);
71 static int size_htab_eq (const void *, const void *);
72 static tree fold_convert_const (enum tree_code, tree, tree);
73 static tree fold_convert (tree, tree);
74 static enum tree_code invert_tree_comparison (enum tree_code);
75 static enum tree_code swap_tree_comparison (enum tree_code);
76 static int comparison_to_compcode (enum tree_code);
77 static enum tree_code compcode_to_comparison (int);
78 static int truth_value_p (enum tree_code);
79 static int operand_equal_for_comparison_p (tree, tree, tree);
80 static int twoval_comparison_p (tree, tree *, tree *, int *);
81 static tree eval_subst (tree, tree, tree, tree, tree);
82 static tree pedantic_omit_one_operand (tree, tree, tree);
83 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
84 static tree make_bit_field_ref (tree, tree, int, int, int);
85 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
86 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
87 enum machine_mode *, int *, int *,
88 tree *, tree *);
89 static int all_ones_mask_p (tree, int);
90 static tree sign_bit_p (tree, tree);
91 static int simple_operand_p (tree);
92 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
93 static tree make_range (tree, int *, tree *, tree *);
94 static tree build_range_check (tree, tree, int, tree, tree);
95 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
96 tree);
97 static tree fold_range_test (tree);
98 static tree unextend (tree, int, int, tree);
99 static tree fold_truthop (enum tree_code, tree, tree, tree);
100 static tree optimize_minmax_comparison (tree);
101 static tree extract_muldiv (tree, tree, enum tree_code, tree);
102 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
103 static tree strip_compound_expr (tree, tree);
104 static int multiple_of_p (tree, tree, tree);
105 static tree constant_boolean_node (int, tree);
106 static int count_cond (tree, int);
107 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
108 tree, int);
109 static bool fold_real_zero_addition_p (tree, tree, int);
110 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
111 tree, tree, tree);
112 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
113 static bool reorder_operands_p (tree, tree);
114 static bool tree_swap_operands_p (tree, tree, bool);
116 static tree fold_negate_const (tree, tree);
117 static tree fold_abs_const (tree, tree);
118 static tree fold_relational_const (enum tree_code, tree, tree, tree);
120 /* The following constants represent a bit based encoding of GCC's
121 comparison operators. This encoding simplifies transformations
122 on relational comparison operators, such as AND and OR. */
123 #define COMPCODE_FALSE 0
124 #define COMPCODE_LT 1
125 #define COMPCODE_EQ 2
126 #define COMPCODE_LE 3
127 #define COMPCODE_GT 4
128 #define COMPCODE_NE 5
129 #define COMPCODE_GE 6
130 #define COMPCODE_TRUE 7
132 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
133 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
134 and SUM1. Then this yields nonzero if overflow occurred during the
135 addition.
137 Overflow occurs if A and B have the same sign, but A and SUM differ in
138 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
139 sign. */
140 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
142 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
143 We do that by representing the two-word integer in 4 words, with only
144 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
145 number. The value of the word is LOWPART + HIGHPART * BASE. */
147 #define LOWPART(x) \
148 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
149 #define HIGHPART(x) \
150 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
151 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
153 /* Unpack a two-word integer into 4 words.
154 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
155 WORDS points to the array of HOST_WIDE_INTs. */
157 static void
158 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
160 words[0] = LOWPART (low);
161 words[1] = HIGHPART (low);
162 words[2] = LOWPART (hi);
163 words[3] = HIGHPART (hi);
166 /* Pack an array of 4 words into a two-word integer.
167 WORDS points to the array of words.
168 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
170 static void
171 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
172 HOST_WIDE_INT *hi)
174 *low = words[0] + words[1] * BASE;
175 *hi = words[2] + words[3] * BASE;
178 /* Make the integer constant T valid for its type by setting to 0 or 1 all
179 the bits in the constant that don't belong in the type.
181 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
182 nonzero, a signed overflow has already occurred in calculating T, so
183 propagate it. */
186 force_fit_type (tree t, int overflow)
188 unsigned HOST_WIDE_INT low;
189 HOST_WIDE_INT high;
190 unsigned int prec;
192 if (TREE_CODE (t) == REAL_CST)
194 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
195 Consider doing it via real_convert now. */
196 return overflow;
199 else if (TREE_CODE (t) != INTEGER_CST)
200 return overflow;
202 low = TREE_INT_CST_LOW (t);
203 high = TREE_INT_CST_HIGH (t);
205 if (POINTER_TYPE_P (TREE_TYPE (t))
206 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
207 prec = POINTER_SIZE;
208 else
209 prec = TYPE_PRECISION (TREE_TYPE (t));
211 /* First clear all bits that are beyond the type's precision. */
213 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
215 else if (prec > HOST_BITS_PER_WIDE_INT)
216 TREE_INT_CST_HIGH (t)
217 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
218 else
220 TREE_INT_CST_HIGH (t) = 0;
221 if (prec < HOST_BITS_PER_WIDE_INT)
222 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
225 /* Unsigned types do not suffer sign extension or overflow unless they
226 are a sizetype. */
227 if (TYPE_UNSIGNED (TREE_TYPE (t))
228 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
229 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
230 return overflow;
232 /* If the value's sign bit is set, extend the sign. */
233 if (prec != 2 * HOST_BITS_PER_WIDE_INT
234 && (prec > HOST_BITS_PER_WIDE_INT
235 ? 0 != (TREE_INT_CST_HIGH (t)
236 & ((HOST_WIDE_INT) 1
237 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
238 : 0 != (TREE_INT_CST_LOW (t)
239 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
241 /* Value is negative:
242 set to 1 all the bits that are outside this type's precision. */
243 if (prec > HOST_BITS_PER_WIDE_INT)
244 TREE_INT_CST_HIGH (t)
245 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
246 else
248 TREE_INT_CST_HIGH (t) = -1;
249 if (prec < HOST_BITS_PER_WIDE_INT)
250 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
254 /* Return nonzero if signed overflow occurred. */
255 return
256 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
257 != 0);
260 /* Add two doubleword integers with doubleword result.
261 Each argument is given as two `HOST_WIDE_INT' pieces.
262 One argument is L1 and H1; the other, L2 and H2.
263 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
266 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
267 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
268 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
270 unsigned HOST_WIDE_INT l;
271 HOST_WIDE_INT h;
273 l = l1 + l2;
274 h = h1 + h2 + (l < l1);
276 *lv = l;
277 *hv = h;
278 return OVERFLOW_SUM_SIGN (h1, h2, h);
281 /* Negate a doubleword integer with doubleword result.
282 Return nonzero if the operation overflows, assuming it's signed.
283 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
284 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
287 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
288 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
290 if (l1 == 0)
292 *lv = 0;
293 *hv = - h1;
294 return (*hv & h1) < 0;
296 else
298 *lv = -l1;
299 *hv = ~h1;
300 return 0;
304 /* Multiply two doubleword integers with doubleword result.
305 Return nonzero if the operation overflows, assuming it's signed.
306 Each argument is given as two `HOST_WIDE_INT' pieces.
307 One argument is L1 and H1; the other, L2 and H2.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
313 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
315 HOST_WIDE_INT arg1[4];
316 HOST_WIDE_INT arg2[4];
317 HOST_WIDE_INT prod[4 * 2];
318 unsigned HOST_WIDE_INT carry;
319 int i, j, k;
320 unsigned HOST_WIDE_INT toplow, neglow;
321 HOST_WIDE_INT tophigh, neghigh;
323 encode (arg1, l1, h1);
324 encode (arg2, l2, h2);
326 memset (prod, 0, sizeof prod);
328 for (i = 0; i < 4; i++)
330 carry = 0;
331 for (j = 0; j < 4; j++)
333 k = i + j;
334 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
335 carry += arg1[i] * arg2[j];
336 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
337 carry += prod[k];
338 prod[k] = LOWPART (carry);
339 carry = HIGHPART (carry);
341 prod[i + 4] = carry;
344 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
346 /* Check for overflow by calculating the top half of the answer in full;
347 it should agree with the low half's sign bit. */
348 decode (prod + 4, &toplow, &tophigh);
349 if (h1 < 0)
351 neg_double (l2, h2, &neglow, &neghigh);
352 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
354 if (h2 < 0)
356 neg_double (l1, h1, &neglow, &neghigh);
357 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
359 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
362 /* Shift the doubleword integer in L1, H1 left by COUNT places
363 keeping only PREC bits of result.
364 Shift right if COUNT is negative.
365 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
366 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
368 void
369 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
370 HOST_WIDE_INT count, unsigned int prec,
371 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
373 unsigned HOST_WIDE_INT signmask;
375 if (count < 0)
377 rshift_double (l1, h1, -count, prec, lv, hv, arith);
378 return;
381 if (SHIFT_COUNT_TRUNCATED)
382 count %= prec;
384 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
386 /* Shifting by the host word size is undefined according to the
387 ANSI standard, so we must handle this as a special case. */
388 *hv = 0;
389 *lv = 0;
391 else if (count >= HOST_BITS_PER_WIDE_INT)
393 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
394 *lv = 0;
396 else
398 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
399 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
400 *lv = l1 << count;
403 /* Sign extend all bits that are beyond the precision. */
405 signmask = -((prec > HOST_BITS_PER_WIDE_INT
406 ? ((unsigned HOST_WIDE_INT) *hv
407 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
408 : (*lv >> (prec - 1))) & 1);
410 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
412 else if (prec >= HOST_BITS_PER_WIDE_INT)
414 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
415 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
417 else
419 *hv = signmask;
420 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
421 *lv |= signmask << prec;
425 /* Shift the doubleword integer in L1, H1 right by COUNT places
426 keeping only PREC bits of result. COUNT must be positive.
427 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
428 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
430 void
431 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
432 HOST_WIDE_INT count, unsigned int prec,
433 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
434 int arith)
436 unsigned HOST_WIDE_INT signmask;
438 signmask = (arith
439 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
440 : 0);
442 if (SHIFT_COUNT_TRUNCATED)
443 count %= prec;
445 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
447 /* Shifting by the host word size is undefined according to the
448 ANSI standard, so we must handle this as a special case. */
449 *hv = 0;
450 *lv = 0;
452 else if (count >= HOST_BITS_PER_WIDE_INT)
454 *hv = 0;
455 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
457 else
459 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
460 *lv = ((l1 >> count)
461 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
464 /* Zero / sign extend all bits that are beyond the precision. */
466 if (count >= (HOST_WIDE_INT)prec)
468 *hv = signmask;
469 *lv = signmask;
471 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
473 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
475 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
476 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
478 else
480 *hv = signmask;
481 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
482 *lv |= signmask << (prec - count);
486 /* Rotate the doubleword integer in L1, H1 left by COUNT places
487 keeping only PREC bits of result.
488 Rotate right if COUNT is negative.
489 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
491 void
492 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
493 HOST_WIDE_INT count, unsigned int prec,
494 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
496 unsigned HOST_WIDE_INT s1l, s2l;
497 HOST_WIDE_INT s1h, s2h;
499 count %= prec;
500 if (count < 0)
501 count += prec;
503 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
504 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
505 *lv = s1l | s2l;
506 *hv = s1h | s2h;
509 /* Rotate the doubleword integer in L1, H1 left by COUNT places
510 keeping only PREC bits of result. COUNT must be positive.
511 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
513 void
514 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
515 HOST_WIDE_INT count, unsigned int prec,
516 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
518 unsigned HOST_WIDE_INT s1l, s2l;
519 HOST_WIDE_INT s1h, s2h;
521 count %= prec;
522 if (count < 0)
523 count += prec;
525 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
526 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
527 *lv = s1l | s2l;
528 *hv = s1h | s2h;
531 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
532 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
533 CODE is a tree code for a kind of division, one of
534 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
535 or EXACT_DIV_EXPR
536 It controls how the quotient is rounded to an integer.
537 Return nonzero if the operation overflows.
538 UNS nonzero says do unsigned division. */
541 div_and_round_double (enum tree_code code, int uns,
542 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
543 HOST_WIDE_INT hnum_orig,
544 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
545 HOST_WIDE_INT hden_orig,
546 unsigned HOST_WIDE_INT *lquo,
547 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
548 HOST_WIDE_INT *hrem)
550 int quo_neg = 0;
551 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
552 HOST_WIDE_INT den[4], quo[4];
553 int i, j;
554 unsigned HOST_WIDE_INT work;
555 unsigned HOST_WIDE_INT carry = 0;
556 unsigned HOST_WIDE_INT lnum = lnum_orig;
557 HOST_WIDE_INT hnum = hnum_orig;
558 unsigned HOST_WIDE_INT lden = lden_orig;
559 HOST_WIDE_INT hden = hden_orig;
560 int overflow = 0;
562 if (hden == 0 && lden == 0)
563 overflow = 1, lden = 1;
565 /* Calculate quotient sign and convert operands to unsigned. */
566 if (!uns)
568 if (hnum < 0)
570 quo_neg = ~ quo_neg;
571 /* (minimum integer) / (-1) is the only overflow case. */
572 if (neg_double (lnum, hnum, &lnum, &hnum)
573 && ((HOST_WIDE_INT) lden & hden) == -1)
574 overflow = 1;
576 if (hden < 0)
578 quo_neg = ~ quo_neg;
579 neg_double (lden, hden, &lden, &hden);
583 if (hnum == 0 && hden == 0)
584 { /* single precision */
585 *hquo = *hrem = 0;
586 /* This unsigned division rounds toward zero. */
587 *lquo = lnum / lden;
588 goto finish_up;
591 if (hnum == 0)
592 { /* trivial case: dividend < divisor */
593 /* hden != 0 already checked. */
594 *hquo = *lquo = 0;
595 *hrem = hnum;
596 *lrem = lnum;
597 goto finish_up;
600 memset (quo, 0, sizeof quo);
602 memset (num, 0, sizeof num); /* to zero 9th element */
603 memset (den, 0, sizeof den);
605 encode (num, lnum, hnum);
606 encode (den, lden, hden);
608 /* Special code for when the divisor < BASE. */
609 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
611 /* hnum != 0 already checked. */
612 for (i = 4 - 1; i >= 0; i--)
614 work = num[i] + carry * BASE;
615 quo[i] = work / lden;
616 carry = work % lden;
619 else
621 /* Full double precision division,
622 with thanks to Don Knuth's "Seminumerical Algorithms". */
623 int num_hi_sig, den_hi_sig;
624 unsigned HOST_WIDE_INT quo_est, scale;
626 /* Find the highest nonzero divisor digit. */
627 for (i = 4 - 1;; i--)
628 if (den[i] != 0)
630 den_hi_sig = i;
631 break;
634 /* Insure that the first digit of the divisor is at least BASE/2.
635 This is required by the quotient digit estimation algorithm. */
637 scale = BASE / (den[den_hi_sig] + 1);
638 if (scale > 1)
639 { /* scale divisor and dividend */
640 carry = 0;
641 for (i = 0; i <= 4 - 1; i++)
643 work = (num[i] * scale) + carry;
644 num[i] = LOWPART (work);
645 carry = HIGHPART (work);
648 num[4] = carry;
649 carry = 0;
650 for (i = 0; i <= 4 - 1; i++)
652 work = (den[i] * scale) + carry;
653 den[i] = LOWPART (work);
654 carry = HIGHPART (work);
655 if (den[i] != 0) den_hi_sig = i;
659 num_hi_sig = 4;
661 /* Main loop */
662 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
664 /* Guess the next quotient digit, quo_est, by dividing the first
665 two remaining dividend digits by the high order quotient digit.
666 quo_est is never low and is at most 2 high. */
667 unsigned HOST_WIDE_INT tmp;
669 num_hi_sig = i + den_hi_sig + 1;
670 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
671 if (num[num_hi_sig] != den[den_hi_sig])
672 quo_est = work / den[den_hi_sig];
673 else
674 quo_est = BASE - 1;
676 /* Refine quo_est so it's usually correct, and at most one high. */
677 tmp = work - quo_est * den[den_hi_sig];
678 if (tmp < BASE
679 && (den[den_hi_sig - 1] * quo_est
680 > (tmp * BASE + num[num_hi_sig - 2])))
681 quo_est--;
683 /* Try QUO_EST as the quotient digit, by multiplying the
684 divisor by QUO_EST and subtracting from the remaining dividend.
685 Keep in mind that QUO_EST is the I - 1st digit. */
687 carry = 0;
688 for (j = 0; j <= den_hi_sig; j++)
690 work = quo_est * den[j] + carry;
691 carry = HIGHPART (work);
692 work = num[i + j] - LOWPART (work);
693 num[i + j] = LOWPART (work);
694 carry += HIGHPART (work) != 0;
697 /* If quo_est was high by one, then num[i] went negative and
698 we need to correct things. */
699 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
701 quo_est--;
702 carry = 0; /* add divisor back in */
703 for (j = 0; j <= den_hi_sig; j++)
705 work = num[i + j] + den[j] + carry;
706 carry = HIGHPART (work);
707 num[i + j] = LOWPART (work);
710 num [num_hi_sig] += carry;
713 /* Store the quotient digit. */
714 quo[i] = quo_est;
718 decode (quo, lquo, hquo);
720 finish_up:
721 /* If result is negative, make it so. */
722 if (quo_neg)
723 neg_double (*lquo, *hquo, lquo, hquo);
725 /* Compute trial remainder: rem = num - (quo * den) */
726 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
727 neg_double (*lrem, *hrem, lrem, hrem);
728 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
730 switch (code)
732 case TRUNC_DIV_EXPR:
733 case TRUNC_MOD_EXPR: /* round toward zero */
734 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
735 return overflow;
737 case FLOOR_DIV_EXPR:
738 case FLOOR_MOD_EXPR: /* round toward negative infinity */
739 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
741 /* quo = quo - 1; */
742 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
743 lquo, hquo);
745 else
746 return overflow;
747 break;
749 case CEIL_DIV_EXPR:
750 case CEIL_MOD_EXPR: /* round toward positive infinity */
751 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
753 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
754 lquo, hquo);
756 else
757 return overflow;
758 break;
760 case ROUND_DIV_EXPR:
761 case ROUND_MOD_EXPR: /* round to closest integer */
763 unsigned HOST_WIDE_INT labs_rem = *lrem;
764 HOST_WIDE_INT habs_rem = *hrem;
765 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
766 HOST_WIDE_INT habs_den = hden, htwice;
768 /* Get absolute values. */
769 if (*hrem < 0)
770 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
771 if (hden < 0)
772 neg_double (lden, hden, &labs_den, &habs_den);
774 /* If (2 * abs (lrem) >= abs (lden)) */
775 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
776 labs_rem, habs_rem, &ltwice, &htwice);
778 if (((unsigned HOST_WIDE_INT) habs_den
779 < (unsigned HOST_WIDE_INT) htwice)
780 || (((unsigned HOST_WIDE_INT) habs_den
781 == (unsigned HOST_WIDE_INT) htwice)
782 && (labs_den < ltwice)))
784 if (*hquo < 0)
785 /* quo = quo - 1; */
786 add_double (*lquo, *hquo,
787 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
788 else
789 /* quo = quo + 1; */
790 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
791 lquo, hquo);
793 else
794 return overflow;
796 break;
798 default:
799 abort ();
802 /* Compute true remainder: rem = num - (quo * den) */
803 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
804 neg_double (*lrem, *hrem, lrem, hrem);
805 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
806 return overflow;
809 /* Return true if built-in mathematical function specified by CODE
810 preserves the sign of it argument, i.e. -f(x) == f(-x). */
812 static bool
813 negate_mathfn_p (enum built_in_function code)
815 switch (code)
817 case BUILT_IN_ASIN:
818 case BUILT_IN_ASINF:
819 case BUILT_IN_ASINL:
820 case BUILT_IN_ATAN:
821 case BUILT_IN_ATANF:
822 case BUILT_IN_ATANL:
823 case BUILT_IN_SIN:
824 case BUILT_IN_SINF:
825 case BUILT_IN_SINL:
826 case BUILT_IN_TAN:
827 case BUILT_IN_TANF:
828 case BUILT_IN_TANL:
829 return true;
831 default:
832 break;
834 return false;
837 /* Determine whether an expression T can be cheaply negated using
838 the function negate_expr. */
840 static bool
841 negate_expr_p (tree t)
843 unsigned HOST_WIDE_INT val;
844 unsigned int prec;
845 tree type;
847 if (t == 0)
848 return false;
850 type = TREE_TYPE (t);
852 STRIP_SIGN_NOPS (t);
853 switch (TREE_CODE (t))
855 case INTEGER_CST:
856 if (TYPE_UNSIGNED (type) || ! flag_trapv)
857 return true;
859 /* Check that -CST will not overflow type. */
860 prec = TYPE_PRECISION (type);
861 if (prec > HOST_BITS_PER_WIDE_INT)
863 if (TREE_INT_CST_LOW (t) != 0)
864 return true;
865 prec -= HOST_BITS_PER_WIDE_INT;
866 val = TREE_INT_CST_HIGH (t);
868 else
869 val = TREE_INT_CST_LOW (t);
870 if (prec < HOST_BITS_PER_WIDE_INT)
871 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
872 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
874 case REAL_CST:
875 case NEGATE_EXPR:
876 return true;
878 case COMPLEX_CST:
879 return negate_expr_p (TREE_REALPART (t))
880 && negate_expr_p (TREE_IMAGPART (t));
882 case PLUS_EXPR:
883 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
884 return false;
885 /* -(A + B) -> (-B) - A. */
886 if (negate_expr_p (TREE_OPERAND (t, 1))
887 && reorder_operands_p (TREE_OPERAND (t, 0),
888 TREE_OPERAND (t, 1)))
889 return true;
890 /* -(A + B) -> (-A) - B. */
891 return negate_expr_p (TREE_OPERAND (t, 0));
893 case MINUS_EXPR:
894 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
895 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
896 && reorder_operands_p (TREE_OPERAND (t, 0),
897 TREE_OPERAND (t, 1));
899 case MULT_EXPR:
900 if (TYPE_UNSIGNED (TREE_TYPE (t)))
901 break;
903 /* Fall through. */
905 case RDIV_EXPR:
906 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
907 return negate_expr_p (TREE_OPERAND (t, 1))
908 || negate_expr_p (TREE_OPERAND (t, 0));
909 break;
911 case NOP_EXPR:
912 /* Negate -((double)float) as (double)(-float). */
913 if (TREE_CODE (type) == REAL_TYPE)
915 tree tem = strip_float_extensions (t);
916 if (tem != t)
917 return negate_expr_p (tem);
919 break;
921 case CALL_EXPR:
922 /* Negate -f(x) as f(-x). */
923 if (negate_mathfn_p (builtin_mathfn_code (t)))
924 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
925 break;
927 case RSHIFT_EXPR:
928 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
929 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
931 tree op1 = TREE_OPERAND (t, 1);
932 if (TREE_INT_CST_HIGH (op1) == 0
933 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
934 == TREE_INT_CST_LOW (op1))
935 return true;
937 break;
939 default:
940 break;
942 return false;
945 /* Given T, an expression, return the negation of T. Allow for T to be
946 null, in which case return null. */
948 static tree
949 negate_expr (tree t)
951 tree type;
952 tree tem;
954 if (t == 0)
955 return 0;
957 type = TREE_TYPE (t);
958 STRIP_SIGN_NOPS (t);
960 switch (TREE_CODE (t))
962 case INTEGER_CST:
963 tem = fold_negate_const (t, type);
964 if (! TREE_OVERFLOW (tem)
965 || TYPE_UNSIGNED (type)
966 || ! flag_trapv)
967 return tem;
968 break;
970 case REAL_CST:
971 tem = fold_negate_const (t, type);
972 /* Two's complement FP formats, such as c4x, may overflow. */
973 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
974 return fold_convert (type, tem);
975 break;
977 case COMPLEX_CST:
979 tree rpart = negate_expr (TREE_REALPART (t));
980 tree ipart = negate_expr (TREE_IMAGPART (t));
982 if ((TREE_CODE (rpart) == REAL_CST
983 && TREE_CODE (ipart) == REAL_CST)
984 || (TREE_CODE (rpart) == INTEGER_CST
985 && TREE_CODE (ipart) == INTEGER_CST))
986 return build_complex (type, rpart, ipart);
988 break;
990 case NEGATE_EXPR:
991 return fold_convert (type, TREE_OPERAND (t, 0));
993 case PLUS_EXPR:
994 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
996 /* -(A + B) -> (-B) - A. */
997 if (negate_expr_p (TREE_OPERAND (t, 1))
998 && reorder_operands_p (TREE_OPERAND (t, 0),
999 TREE_OPERAND (t, 1)))
1000 return fold_convert (type,
1001 fold (build (MINUS_EXPR, TREE_TYPE (t),
1002 negate_expr (TREE_OPERAND (t, 1)),
1003 TREE_OPERAND (t, 0))));
1004 /* -(A + B) -> (-A) - B. */
1005 if (negate_expr_p (TREE_OPERAND (t, 0)))
1006 return fold_convert (type,
1007 fold (build (MINUS_EXPR, TREE_TYPE (t),
1008 negate_expr (TREE_OPERAND (t, 0)),
1009 TREE_OPERAND (t, 1))));
1011 break;
1013 case MINUS_EXPR:
1014 /* - (A - B) -> B - A */
1015 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1016 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1017 return fold_convert (type,
1018 fold (build (MINUS_EXPR, TREE_TYPE (t),
1019 TREE_OPERAND (t, 1),
1020 TREE_OPERAND (t, 0))));
1021 break;
1023 case MULT_EXPR:
1024 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1025 break;
1027 /* Fall through. */
1029 case RDIV_EXPR:
1030 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1032 tem = TREE_OPERAND (t, 1);
1033 if (negate_expr_p (tem))
1034 return fold_convert (type,
1035 fold (build (TREE_CODE (t), TREE_TYPE (t),
1036 TREE_OPERAND (t, 0),
1037 negate_expr (tem))));
1038 tem = TREE_OPERAND (t, 0);
1039 if (negate_expr_p (tem))
1040 return fold_convert (type,
1041 fold (build (TREE_CODE (t), TREE_TYPE (t),
1042 negate_expr (tem),
1043 TREE_OPERAND (t, 1))));
1045 break;
1047 case NOP_EXPR:
1048 /* Convert -((double)float) into (double)(-float). */
1049 if (TREE_CODE (type) == REAL_TYPE)
1051 tem = strip_float_extensions (t);
1052 if (tem != t && negate_expr_p (tem))
1053 return fold_convert (type, negate_expr (tem));
1055 break;
1057 case CALL_EXPR:
1058 /* Negate -f(x) as f(-x). */
1059 if (negate_mathfn_p (builtin_mathfn_code (t))
1060 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1062 tree fndecl, arg, arglist;
1064 fndecl = get_callee_fndecl (t);
1065 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1066 arglist = build_tree_list (NULL_TREE, arg);
1067 return build_function_call_expr (fndecl, arglist);
1069 break;
1071 case RSHIFT_EXPR:
1072 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1073 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1075 tree op1 = TREE_OPERAND (t, 1);
1076 if (TREE_INT_CST_HIGH (op1) == 0
1077 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1078 == TREE_INT_CST_LOW (op1))
1080 tree ntype = TYPE_UNSIGNED (type)
1081 ? lang_hooks.types.signed_type (type)
1082 : lang_hooks.types.unsigned_type (type);
1083 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1084 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1085 return fold_convert (type, temp);
1088 break;
1090 default:
1091 break;
1094 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1095 return fold_convert (type, tem);
1098 /* Split a tree IN into a constant, literal and variable parts that could be
1099 combined with CODE to make IN. "constant" means an expression with
1100 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1101 commutative arithmetic operation. Store the constant part into *CONP,
1102 the literal in *LITP and return the variable part. If a part isn't
1103 present, set it to null. If the tree does not decompose in this way,
1104 return the entire tree as the variable part and the other parts as null.
1106 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1107 case, we negate an operand that was subtracted. Except if it is a
1108 literal for which we use *MINUS_LITP instead.
1110 If NEGATE_P is true, we are negating all of IN, again except a literal
1111 for which we use *MINUS_LITP instead.
1113 If IN is itself a literal or constant, return it as appropriate.
1115 Note that we do not guarantee that any of the three values will be the
1116 same type as IN, but they will have the same signedness and mode. */
1118 static tree
1119 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1120 tree *minus_litp, int negate_p)
1122 tree var = 0;
1124 *conp = 0;
1125 *litp = 0;
1126 *minus_litp = 0;
1128 /* Strip any conversions that don't change the machine mode or signedness. */
1129 STRIP_SIGN_NOPS (in);
1131 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1132 *litp = in;
1133 else if (TREE_CODE (in) == code
1134 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1135 /* We can associate addition and subtraction together (even
1136 though the C standard doesn't say so) for integers because
1137 the value is not affected. For reals, the value might be
1138 affected, so we can't. */
1139 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1140 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1142 tree op0 = TREE_OPERAND (in, 0);
1143 tree op1 = TREE_OPERAND (in, 1);
1144 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1145 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1147 /* First see if either of the operands is a literal, then a constant. */
1148 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1149 *litp = op0, op0 = 0;
1150 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1151 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1153 if (op0 != 0 && TREE_CONSTANT (op0))
1154 *conp = op0, op0 = 0;
1155 else if (op1 != 0 && TREE_CONSTANT (op1))
1156 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1158 /* If we haven't dealt with either operand, this is not a case we can
1159 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1160 if (op0 != 0 && op1 != 0)
1161 var = in;
1162 else if (op0 != 0)
1163 var = op0;
1164 else
1165 var = op1, neg_var_p = neg1_p;
1167 /* Now do any needed negations. */
1168 if (neg_litp_p)
1169 *minus_litp = *litp, *litp = 0;
1170 if (neg_conp_p)
1171 *conp = negate_expr (*conp);
1172 if (neg_var_p)
1173 var = negate_expr (var);
1175 else if (TREE_CONSTANT (in))
1176 *conp = in;
1177 else
1178 var = in;
1180 if (negate_p)
1182 if (*litp)
1183 *minus_litp = *litp, *litp = 0;
1184 else if (*minus_litp)
1185 *litp = *minus_litp, *minus_litp = 0;
1186 *conp = negate_expr (*conp);
1187 var = negate_expr (var);
1190 return var;
1193 /* Re-associate trees split by the above function. T1 and T2 are either
1194 expressions to associate or null. Return the new expression, if any. If
1195 we build an operation, do it in TYPE and with CODE. */
1197 static tree
1198 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1200 if (t1 == 0)
1201 return t2;
1202 else if (t2 == 0)
1203 return t1;
1205 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1206 try to fold this since we will have infinite recursion. But do
1207 deal with any NEGATE_EXPRs. */
1208 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1209 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1211 if (code == PLUS_EXPR)
1213 if (TREE_CODE (t1) == NEGATE_EXPR)
1214 return build (MINUS_EXPR, type, fold_convert (type, t2),
1215 fold_convert (type, TREE_OPERAND (t1, 0)));
1216 else if (TREE_CODE (t2) == NEGATE_EXPR)
1217 return build (MINUS_EXPR, type, fold_convert (type, t1),
1218 fold_convert (type, TREE_OPERAND (t2, 0)));
1220 return build (code, type, fold_convert (type, t1),
1221 fold_convert (type, t2));
1224 return fold (build (code, type, fold_convert (type, t1),
1225 fold_convert (type, t2)));
1228 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1229 to produce a new constant.
1231 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1233 static tree
1234 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1236 unsigned HOST_WIDE_INT int1l, int2l;
1237 HOST_WIDE_INT int1h, int2h;
1238 unsigned HOST_WIDE_INT low;
1239 HOST_WIDE_INT hi;
1240 unsigned HOST_WIDE_INT garbagel;
1241 HOST_WIDE_INT garbageh;
1242 tree t;
1243 tree type = TREE_TYPE (arg1);
1244 int uns = TYPE_UNSIGNED (type);
1245 int is_sizetype
1246 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1247 int overflow = 0;
1248 int no_overflow = 0;
1250 int1l = TREE_INT_CST_LOW (arg1);
1251 int1h = TREE_INT_CST_HIGH (arg1);
1252 int2l = TREE_INT_CST_LOW (arg2);
1253 int2h = TREE_INT_CST_HIGH (arg2);
1255 switch (code)
1257 case BIT_IOR_EXPR:
1258 low = int1l | int2l, hi = int1h | int2h;
1259 break;
1261 case BIT_XOR_EXPR:
1262 low = int1l ^ int2l, hi = int1h ^ int2h;
1263 break;
1265 case BIT_AND_EXPR:
1266 low = int1l & int2l, hi = int1h & int2h;
1267 break;
1269 case RSHIFT_EXPR:
1270 int2l = -int2l;
1271 case LSHIFT_EXPR:
1272 /* It's unclear from the C standard whether shifts can overflow.
1273 The following code ignores overflow; perhaps a C standard
1274 interpretation ruling is needed. */
1275 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1276 &low, &hi, !uns);
1277 no_overflow = 1;
1278 break;
1280 case RROTATE_EXPR:
1281 int2l = - int2l;
1282 case LROTATE_EXPR:
1283 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1284 &low, &hi);
1285 break;
1287 case PLUS_EXPR:
1288 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1289 break;
1291 case MINUS_EXPR:
1292 neg_double (int2l, int2h, &low, &hi);
1293 add_double (int1l, int1h, low, hi, &low, &hi);
1294 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1295 break;
1297 case MULT_EXPR:
1298 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1299 break;
1301 case TRUNC_DIV_EXPR:
1302 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1303 case EXACT_DIV_EXPR:
1304 /* This is a shortcut for a common special case. */
1305 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1306 && ! TREE_CONSTANT_OVERFLOW (arg1)
1307 && ! TREE_CONSTANT_OVERFLOW (arg2)
1308 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1310 if (code == CEIL_DIV_EXPR)
1311 int1l += int2l - 1;
1313 low = int1l / int2l, hi = 0;
1314 break;
1317 /* ... fall through ... */
1319 case ROUND_DIV_EXPR:
1320 if (int2h == 0 && int2l == 1)
1322 low = int1l, hi = int1h;
1323 break;
1325 if (int1l == int2l && int1h == int2h
1326 && ! (int1l == 0 && int1h == 0))
1328 low = 1, hi = 0;
1329 break;
1331 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1332 &low, &hi, &garbagel, &garbageh);
1333 break;
1335 case TRUNC_MOD_EXPR:
1336 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1337 /* This is a shortcut for a common special case. */
1338 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1339 && ! TREE_CONSTANT_OVERFLOW (arg1)
1340 && ! TREE_CONSTANT_OVERFLOW (arg2)
1341 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1343 if (code == CEIL_MOD_EXPR)
1344 int1l += int2l - 1;
1345 low = int1l % int2l, hi = 0;
1346 break;
1349 /* ... fall through ... */
1351 case ROUND_MOD_EXPR:
1352 overflow = div_and_round_double (code, uns,
1353 int1l, int1h, int2l, int2h,
1354 &garbagel, &garbageh, &low, &hi);
1355 break;
1357 case MIN_EXPR:
1358 case MAX_EXPR:
1359 if (uns)
1360 low = (((unsigned HOST_WIDE_INT) int1h
1361 < (unsigned HOST_WIDE_INT) int2h)
1362 || (((unsigned HOST_WIDE_INT) int1h
1363 == (unsigned HOST_WIDE_INT) int2h)
1364 && int1l < int2l));
1365 else
1366 low = (int1h < int2h
1367 || (int1h == int2h && int1l < int2l));
1369 if (low == (code == MIN_EXPR))
1370 low = int1l, hi = int1h;
1371 else
1372 low = int2l, hi = int2h;
1373 break;
1375 default:
1376 abort ();
1379 /* If this is for a sizetype, can be represented as one (signed)
1380 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1381 constants. */
1382 if (is_sizetype
1383 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1384 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1385 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1386 return size_int_type_wide (low, type);
1387 else
1389 t = build_int_2 (low, hi);
1390 TREE_TYPE (t) = TREE_TYPE (arg1);
1393 TREE_OVERFLOW (t)
1394 = ((notrunc
1395 ? (!uns || is_sizetype) && overflow
1396 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1397 && ! no_overflow))
1398 | TREE_OVERFLOW (arg1)
1399 | TREE_OVERFLOW (arg2));
1401 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1402 So check if force_fit_type truncated the value. */
1403 if (is_sizetype
1404 && ! TREE_OVERFLOW (t)
1405 && (TREE_INT_CST_HIGH (t) != hi
1406 || TREE_INT_CST_LOW (t) != low))
1407 TREE_OVERFLOW (t) = 1;
1409 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1410 | TREE_CONSTANT_OVERFLOW (arg1)
1411 | TREE_CONSTANT_OVERFLOW (arg2));
1412 return t;
1415 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1416 constant. We assume ARG1 and ARG2 have the same data type, or at least
1417 are the same kind of constant and the same machine mode.
1419 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1421 static tree
1422 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1424 STRIP_NOPS (arg1);
1425 STRIP_NOPS (arg2);
1427 if (TREE_CODE (arg1) == INTEGER_CST)
1428 return int_const_binop (code, arg1, arg2, notrunc);
1430 if (TREE_CODE (arg1) == REAL_CST)
1432 enum machine_mode mode;
1433 REAL_VALUE_TYPE d1;
1434 REAL_VALUE_TYPE d2;
1435 REAL_VALUE_TYPE value;
1436 tree t, type;
1438 d1 = TREE_REAL_CST (arg1);
1439 d2 = TREE_REAL_CST (arg2);
1441 type = TREE_TYPE (arg1);
1442 mode = TYPE_MODE (type);
1444 /* Don't perform operation if we honor signaling NaNs and
1445 either operand is a NaN. */
1446 if (HONOR_SNANS (mode)
1447 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1448 return NULL_TREE;
1450 /* Don't perform operation if it would raise a division
1451 by zero exception. */
1452 if (code == RDIV_EXPR
1453 && REAL_VALUES_EQUAL (d2, dconst0)
1454 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1455 return NULL_TREE;
1457 /* If either operand is a NaN, just return it. Otherwise, set up
1458 for floating-point trap; we return an overflow. */
1459 if (REAL_VALUE_ISNAN (d1))
1460 return arg1;
1461 else if (REAL_VALUE_ISNAN (d2))
1462 return arg2;
1464 REAL_ARITHMETIC (value, code, d1, d2);
1466 t = build_real (type, real_value_truncate (mode, value));
1468 TREE_OVERFLOW (t)
1469 = (force_fit_type (t, 0)
1470 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1471 TREE_CONSTANT_OVERFLOW (t)
1472 = TREE_OVERFLOW (t)
1473 | TREE_CONSTANT_OVERFLOW (arg1)
1474 | TREE_CONSTANT_OVERFLOW (arg2);
1475 return t;
1477 if (TREE_CODE (arg1) == COMPLEX_CST)
1479 tree type = TREE_TYPE (arg1);
1480 tree r1 = TREE_REALPART (arg1);
1481 tree i1 = TREE_IMAGPART (arg1);
1482 tree r2 = TREE_REALPART (arg2);
1483 tree i2 = TREE_IMAGPART (arg2);
1484 tree t;
1486 switch (code)
1488 case PLUS_EXPR:
1489 t = build_complex (type,
1490 const_binop (PLUS_EXPR, r1, r2, notrunc),
1491 const_binop (PLUS_EXPR, i1, i2, notrunc));
1492 break;
1494 case MINUS_EXPR:
1495 t = build_complex (type,
1496 const_binop (MINUS_EXPR, r1, r2, notrunc),
1497 const_binop (MINUS_EXPR, i1, i2, notrunc));
1498 break;
1500 case MULT_EXPR:
1501 t = build_complex (type,
1502 const_binop (MINUS_EXPR,
1503 const_binop (MULT_EXPR,
1504 r1, r2, notrunc),
1505 const_binop (MULT_EXPR,
1506 i1, i2, notrunc),
1507 notrunc),
1508 const_binop (PLUS_EXPR,
1509 const_binop (MULT_EXPR,
1510 r1, i2, notrunc),
1511 const_binop (MULT_EXPR,
1512 i1, r2, notrunc),
1513 notrunc));
1514 break;
1516 case RDIV_EXPR:
1518 tree magsquared
1519 = const_binop (PLUS_EXPR,
1520 const_binop (MULT_EXPR, r2, r2, notrunc),
1521 const_binop (MULT_EXPR, i2, i2, notrunc),
1522 notrunc);
1524 t = build_complex (type,
1525 const_binop
1526 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1527 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1528 const_binop (PLUS_EXPR,
1529 const_binop (MULT_EXPR, r1, r2,
1530 notrunc),
1531 const_binop (MULT_EXPR, i1, i2,
1532 notrunc),
1533 notrunc),
1534 magsquared, notrunc),
1535 const_binop
1536 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1537 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1538 const_binop (MINUS_EXPR,
1539 const_binop (MULT_EXPR, i1, r2,
1540 notrunc),
1541 const_binop (MULT_EXPR, r1, i2,
1542 notrunc),
1543 notrunc),
1544 magsquared, notrunc));
1546 break;
1548 default:
1549 abort ();
1551 return t;
1553 return 0;
1556 /* These are the hash table functions for the hash table of INTEGER_CST
1557 nodes of a sizetype. */
1559 /* Return the hash code code X, an INTEGER_CST. */
1561 static hashval_t
1562 size_htab_hash (const void *x)
1564 tree t = (tree) x;
1566 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1567 ^ htab_hash_pointer (TREE_TYPE (t))
1568 ^ (TREE_OVERFLOW (t) << 20));
1571 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1572 is the same as that given by *Y, which is the same. */
1574 static int
1575 size_htab_eq (const void *x, const void *y)
1577 tree xt = (tree) x;
1578 tree yt = (tree) y;
1580 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1581 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1582 && TREE_TYPE (xt) == TREE_TYPE (yt)
1583 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1586 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1587 bits are given by NUMBER and of the sizetype represented by KIND. */
1589 tree
1590 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1592 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1595 /* Likewise, but the desired type is specified explicitly. */
1597 static GTY (()) tree new_const;
1598 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1599 htab_t size_htab;
1601 tree
1602 size_int_type_wide (HOST_WIDE_INT number, tree type)
1604 void **slot;
1606 if (size_htab == 0)
1608 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1609 new_const = make_node (INTEGER_CST);
1612 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1613 hash table, we return the value from the hash table. Otherwise, we
1614 place that in the hash table and make a new node for the next time. */
1615 TREE_INT_CST_LOW (new_const) = number;
1616 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1617 TREE_TYPE (new_const) = type;
1618 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1619 = force_fit_type (new_const, 0);
1621 slot = htab_find_slot (size_htab, new_const, INSERT);
1622 if (*slot == 0)
1624 tree t = new_const;
1626 *slot = new_const;
1627 new_const = make_node (INTEGER_CST);
1628 return t;
1630 else
1631 return (tree) *slot;
1634 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1635 is a tree code. The type of the result is taken from the operands.
1636 Both must be the same type integer type and it must be a size type.
1637 If the operands are constant, so is the result. */
1639 tree
1640 size_binop (enum tree_code code, tree arg0, tree arg1)
1642 tree type = TREE_TYPE (arg0);
1644 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1645 || type != TREE_TYPE (arg1))
1646 abort ();
1648 /* Handle the special case of two integer constants faster. */
1649 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1651 /* And some specific cases even faster than that. */
1652 if (code == PLUS_EXPR && integer_zerop (arg0))
1653 return arg1;
1654 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1655 && integer_zerop (arg1))
1656 return arg0;
1657 else if (code == MULT_EXPR && integer_onep (arg0))
1658 return arg1;
1660 /* Handle general case of two integer constants. */
1661 return int_const_binop (code, arg0, arg1, 0);
1664 if (arg0 == error_mark_node || arg1 == error_mark_node)
1665 return error_mark_node;
1667 return fold (build (code, type, arg0, arg1));
1670 /* Given two values, either both of sizetype or both of bitsizetype,
1671 compute the difference between the two values. Return the value
1672 in signed type corresponding to the type of the operands. */
1674 tree
1675 size_diffop (tree arg0, tree arg1)
1677 tree type = TREE_TYPE (arg0);
1678 tree ctype;
1680 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1681 || type != TREE_TYPE (arg1))
1682 abort ();
1684 /* If the type is already signed, just do the simple thing. */
1685 if (!TYPE_UNSIGNED (type))
1686 return size_binop (MINUS_EXPR, arg0, arg1);
1688 ctype = (type == bitsizetype || type == ubitsizetype
1689 ? sbitsizetype : ssizetype);
1691 /* If either operand is not a constant, do the conversions to the signed
1692 type and subtract. The hardware will do the right thing with any
1693 overflow in the subtraction. */
1694 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1695 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1696 fold_convert (ctype, arg1));
1698 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1699 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1700 overflow) and negate (which can't either). Special-case a result
1701 of zero while we're here. */
1702 if (tree_int_cst_equal (arg0, arg1))
1703 return fold_convert (ctype, integer_zero_node);
1704 else if (tree_int_cst_lt (arg1, arg0))
1705 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1706 else
1707 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1708 fold_convert (ctype, size_binop (MINUS_EXPR,
1709 arg1, arg0)));
1713 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1714 type TYPE. If no simplification can be done return NULL_TREE. */
1716 static tree
1717 fold_convert_const (enum tree_code code, tree type, tree arg1)
1719 int overflow = 0;
1720 tree t;
1722 if (TREE_TYPE (arg1) == type)
1723 return arg1;
1725 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1727 if (TREE_CODE (arg1) == INTEGER_CST)
1729 /* If we would build a constant wider than GCC supports,
1730 leave the conversion unfolded. */
1731 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1732 return NULL_TREE;
1734 /* If we are trying to make a sizetype for a small integer, use
1735 size_int to pick up cached types to reduce duplicate nodes. */
1736 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1737 && !TREE_CONSTANT_OVERFLOW (arg1)
1738 && compare_tree_int (arg1, 10000) < 0)
1739 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1741 /* Given an integer constant, make new constant with new type,
1742 appropriately sign-extended or truncated. */
1743 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1744 TREE_INT_CST_HIGH (arg1));
1745 TREE_TYPE (t) = type;
1746 /* Indicate an overflow if (1) ARG1 already overflowed,
1747 or (2) force_fit_type indicates an overflow.
1748 Tell force_fit_type that an overflow has already occurred
1749 if ARG1 is a too-large unsigned value and T is signed.
1750 But don't indicate an overflow if converting a pointer. */
1751 TREE_OVERFLOW (t)
1752 = ((force_fit_type (t,
1753 (TREE_INT_CST_HIGH (arg1) < 0
1754 && (TYPE_UNSIGNED (type)
1755 < TYPE_UNSIGNED (TREE_TYPE (arg1)))))
1756 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1757 || TREE_OVERFLOW (arg1));
1758 TREE_CONSTANT_OVERFLOW (t)
1759 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1760 return t;
1762 else if (TREE_CODE (arg1) == REAL_CST)
1764 /* The following code implements the floating point to integer
1765 conversion rules required by the Java Language Specification,
1766 that IEEE NaNs are mapped to zero and values that overflow
1767 the target precision saturate, i.e. values greater than
1768 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1769 are mapped to INT_MIN. These semantics are allowed by the
1770 C and C++ standards that simply state that the behavior of
1771 FP-to-integer conversion is unspecified upon overflow. */
1773 HOST_WIDE_INT high, low;
1775 REAL_VALUE_TYPE r;
1776 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1778 switch (code)
1780 case FIX_TRUNC_EXPR:
1781 real_trunc (&r, VOIDmode, &x);
1782 break;
1784 case FIX_CEIL_EXPR:
1785 real_ceil (&r, VOIDmode, &x);
1786 break;
1788 case FIX_FLOOR_EXPR:
1789 real_floor (&r, VOIDmode, &x);
1790 break;
1792 default:
1793 abort ();
1796 /* If R is NaN, return zero and show we have an overflow. */
1797 if (REAL_VALUE_ISNAN (r))
1799 overflow = 1;
1800 high = 0;
1801 low = 0;
1804 /* See if R is less than the lower bound or greater than the
1805 upper bound. */
1807 if (! overflow)
1809 tree lt = TYPE_MIN_VALUE (type);
1810 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1811 if (REAL_VALUES_LESS (r, l))
1813 overflow = 1;
1814 high = TREE_INT_CST_HIGH (lt);
1815 low = TREE_INT_CST_LOW (lt);
1819 if (! overflow)
1821 tree ut = TYPE_MAX_VALUE (type);
1822 if (ut)
1824 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1825 if (REAL_VALUES_LESS (u, r))
1827 overflow = 1;
1828 high = TREE_INT_CST_HIGH (ut);
1829 low = TREE_INT_CST_LOW (ut);
1834 if (! overflow)
1835 REAL_VALUE_TO_INT (&low, &high, r);
1837 t = build_int_2 (low, high);
1838 TREE_TYPE (t) = type;
1839 TREE_OVERFLOW (t)
1840 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1841 TREE_CONSTANT_OVERFLOW (t)
1842 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1843 return t;
1846 else if (TREE_CODE (type) == REAL_TYPE)
1848 if (TREE_CODE (arg1) == INTEGER_CST)
1849 return build_real_from_int_cst (type, arg1);
1850 if (TREE_CODE (arg1) == REAL_CST)
1852 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1854 /* We make a copy of ARG1 so that we don't modify an
1855 existing constant tree. */
1856 t = copy_node (arg1);
1857 TREE_TYPE (t) = type;
1858 return t;
1861 t = build_real (type,
1862 real_value_truncate (TYPE_MODE (type),
1863 TREE_REAL_CST (arg1)));
1865 TREE_OVERFLOW (t)
1866 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1867 TREE_CONSTANT_OVERFLOW (t)
1868 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1869 return t;
1872 return NULL_TREE;
1875 /* Convert expression ARG to type TYPE. Used by the middle-end for
1876 simple conversions in preference to calling the front-end's convert. */
1878 static tree
1879 fold_convert (tree type, tree arg)
1881 tree orig = TREE_TYPE (arg);
1882 tree tem;
1884 if (type == orig)
1885 return arg;
1887 if (TREE_CODE (arg) == ERROR_MARK
1888 || TREE_CODE (type) == ERROR_MARK
1889 || TREE_CODE (orig) == ERROR_MARK)
1890 return error_mark_node;
1892 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1893 return fold (build1 (NOP_EXPR, type, arg));
1895 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1897 if (TREE_CODE (arg) == INTEGER_CST)
1899 tem = fold_convert_const (NOP_EXPR, type, arg);
1900 if (tem != NULL_TREE)
1901 return tem;
1903 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1904 return fold (build1 (NOP_EXPR, type, arg));
1905 if (TREE_CODE (orig) == COMPLEX_TYPE)
1907 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1908 return fold_convert (type, tem);
1910 if (TREE_CODE (orig) == VECTOR_TYPE
1911 && GET_MODE_SIZE (TYPE_MODE (type))
1912 == GET_MODE_SIZE (TYPE_MODE (orig)))
1913 return fold (build1 (NOP_EXPR, type, arg));
1915 else if (TREE_CODE (type) == REAL_TYPE)
1917 if (TREE_CODE (arg) == INTEGER_CST)
1919 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1920 if (tem != NULL_TREE)
1921 return tem;
1923 else if (TREE_CODE (arg) == REAL_CST)
1925 tem = fold_convert_const (NOP_EXPR, type, arg);
1926 if (tem != NULL_TREE)
1927 return tem;
1930 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1931 return fold (build1 (FLOAT_EXPR, type, arg));
1932 if (TREE_CODE (orig) == REAL_TYPE)
1933 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1934 type, arg));
1935 if (TREE_CODE (orig) == COMPLEX_TYPE)
1937 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1938 return fold_convert (type, tem);
1941 else if (TREE_CODE (type) == COMPLEX_TYPE)
1943 if (INTEGRAL_TYPE_P (orig)
1944 || POINTER_TYPE_P (orig)
1945 || TREE_CODE (orig) == REAL_TYPE)
1946 return build (COMPLEX_EXPR, type,
1947 fold_convert (TREE_TYPE (type), arg),
1948 fold_convert (TREE_TYPE (type), integer_zero_node));
1949 if (TREE_CODE (orig) == COMPLEX_TYPE)
1951 tree rpart, ipart;
1953 if (TREE_CODE (arg) == COMPLEX_EXPR)
1955 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1956 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1957 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1960 arg = save_expr (arg);
1961 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1962 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1963 rpart = fold_convert (TREE_TYPE (type), rpart);
1964 ipart = fold_convert (TREE_TYPE (type), ipart);
1965 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1968 else if (TREE_CODE (type) == VECTOR_TYPE)
1970 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1971 && GET_MODE_SIZE (TYPE_MODE (type))
1972 == GET_MODE_SIZE (TYPE_MODE (orig)))
1973 return fold (build1 (NOP_EXPR, type, arg));
1974 if (TREE_CODE (orig) == VECTOR_TYPE
1975 && GET_MODE_SIZE (TYPE_MODE (type))
1976 == GET_MODE_SIZE (TYPE_MODE (orig)))
1977 return fold (build1 (NOP_EXPR, type, arg));
1979 else if (VOID_TYPE_P (type))
1980 return fold (build1 (CONVERT_EXPR, type, arg));
1981 abort ();
1984 /* Return an expr equal to X but certainly not valid as an lvalue. */
1986 tree
1987 non_lvalue (tree x)
1989 tree result;
1991 /* These things are certainly not lvalues. */
1992 if (TREE_CODE (x) == NON_LVALUE_EXPR
1993 || TREE_CODE (x) == INTEGER_CST
1994 || TREE_CODE (x) == REAL_CST
1995 || TREE_CODE (x) == STRING_CST
1996 || TREE_CODE (x) == ADDR_EXPR)
1997 return x;
1999 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2000 TREE_CONSTANT (result) = TREE_CONSTANT (x);
2001 return result;
2004 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2005 Zero means allow extended lvalues. */
2007 int pedantic_lvalues;
2009 /* When pedantic, return an expr equal to X but certainly not valid as a
2010 pedantic lvalue. Otherwise, return X. */
2012 tree
2013 pedantic_non_lvalue (tree x)
2015 if (pedantic_lvalues)
2016 return non_lvalue (x);
2017 else
2018 return x;
2021 /* Given a tree comparison code, return the code that is the logical inverse
2022 of the given code. It is not safe to do this for floating-point
2023 comparisons, except for NE_EXPR and EQ_EXPR. */
2025 static enum tree_code
2026 invert_tree_comparison (enum tree_code code)
2028 switch (code)
2030 case EQ_EXPR:
2031 return NE_EXPR;
2032 case NE_EXPR:
2033 return EQ_EXPR;
2034 case GT_EXPR:
2035 return LE_EXPR;
2036 case GE_EXPR:
2037 return LT_EXPR;
2038 case LT_EXPR:
2039 return GE_EXPR;
2040 case LE_EXPR:
2041 return GT_EXPR;
2042 default:
2043 abort ();
2047 /* Similar, but return the comparison that results if the operands are
2048 swapped. This is safe for floating-point. */
2050 static enum tree_code
2051 swap_tree_comparison (enum tree_code code)
2053 switch (code)
2055 case EQ_EXPR:
2056 case NE_EXPR:
2057 return code;
2058 case GT_EXPR:
2059 return LT_EXPR;
2060 case GE_EXPR:
2061 return LE_EXPR;
2062 case LT_EXPR:
2063 return GT_EXPR;
2064 case LE_EXPR:
2065 return GE_EXPR;
2066 default:
2067 abort ();
2072 /* Convert a comparison tree code from an enum tree_code representation
2073 into a compcode bit-based encoding. This function is the inverse of
2074 compcode_to_comparison. */
2076 static int
2077 comparison_to_compcode (enum tree_code code)
2079 switch (code)
2081 case LT_EXPR:
2082 return COMPCODE_LT;
2083 case EQ_EXPR:
2084 return COMPCODE_EQ;
2085 case LE_EXPR:
2086 return COMPCODE_LE;
2087 case GT_EXPR:
2088 return COMPCODE_GT;
2089 case NE_EXPR:
2090 return COMPCODE_NE;
2091 case GE_EXPR:
2092 return COMPCODE_GE;
2093 default:
2094 abort ();
2098 /* Convert a compcode bit-based encoding of a comparison operator back
2099 to GCC's enum tree_code representation. This function is the
2100 inverse of comparison_to_compcode. */
2102 static enum tree_code
2103 compcode_to_comparison (int code)
2105 switch (code)
2107 case COMPCODE_LT:
2108 return LT_EXPR;
2109 case COMPCODE_EQ:
2110 return EQ_EXPR;
2111 case COMPCODE_LE:
2112 return LE_EXPR;
2113 case COMPCODE_GT:
2114 return GT_EXPR;
2115 case COMPCODE_NE:
2116 return NE_EXPR;
2117 case COMPCODE_GE:
2118 return GE_EXPR;
2119 default:
2120 abort ();
2124 /* Return nonzero if CODE is a tree code that represents a truth value. */
2126 static int
2127 truth_value_p (enum tree_code code)
2129 return (TREE_CODE_CLASS (code) == '<'
2130 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2131 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2132 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2135 /* Return nonzero if two operands (typically of the same tree node)
2136 are necessarily equal. If either argument has side-effects this
2137 function returns zero.
2139 If ONLY_CONST is nonzero, only return nonzero for constants.
2140 This function tests whether the operands are indistinguishable;
2141 it does not test whether they are equal using C's == operation.
2142 The distinction is important for IEEE floating point, because
2143 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2144 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2146 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
2147 even though it may hold multiple values during a function.
2148 This is because a GCC tree node guarantees that nothing else is
2149 executed between the evaluation of its "operands" (which may often
2150 be evaluated in arbitrary order). Hence if the operands themselves
2151 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2152 same value in each operand/subexpression. Hence a zero value for
2153 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2154 If comparing arbitrary expression trees, such as from different
2155 statements, ONLY_CONST must usually be nonzero. */
2158 operand_equal_p (tree arg0, tree arg1, int only_const)
2160 tree fndecl;
2162 /* If either is ERROR_MARK, they aren't equal. */
2163 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2164 return 0;
2166 /* If both types don't have the same signedness, then we can't consider
2167 them equal. We must check this before the STRIP_NOPS calls
2168 because they may change the signedness of the arguments. */
2169 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2170 return 0;
2172 STRIP_NOPS (arg0);
2173 STRIP_NOPS (arg1);
2175 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2176 /* This is needed for conversions and for COMPONENT_REF.
2177 Might as well play it safe and always test this. */
2178 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2179 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2180 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2181 return 0;
2183 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2184 We don't care about side effects in that case because the SAVE_EXPR
2185 takes care of that for us. In all other cases, two expressions are
2186 equal if they have no side effects. If we have two identical
2187 expressions with side effects that should be treated the same due
2188 to the only side effects being identical SAVE_EXPR's, that will
2189 be detected in the recursive calls below. */
2190 if (arg0 == arg1 && ! only_const
2191 && (TREE_CODE (arg0) == SAVE_EXPR
2192 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2193 return 1;
2195 /* Next handle constant cases, those for which we can return 1 even
2196 if ONLY_CONST is set. */
2197 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2198 switch (TREE_CODE (arg0))
2200 case INTEGER_CST:
2201 return (! TREE_CONSTANT_OVERFLOW (arg0)
2202 && ! TREE_CONSTANT_OVERFLOW (arg1)
2203 && tree_int_cst_equal (arg0, arg1));
2205 case REAL_CST:
2206 return (! TREE_CONSTANT_OVERFLOW (arg0)
2207 && ! TREE_CONSTANT_OVERFLOW (arg1)
2208 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2209 TREE_REAL_CST (arg1)));
2211 case VECTOR_CST:
2213 tree v1, v2;
2215 if (TREE_CONSTANT_OVERFLOW (arg0)
2216 || TREE_CONSTANT_OVERFLOW (arg1))
2217 return 0;
2219 v1 = TREE_VECTOR_CST_ELTS (arg0);
2220 v2 = TREE_VECTOR_CST_ELTS (arg1);
2221 while (v1 && v2)
2223 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2224 only_const))
2225 return 0;
2226 v1 = TREE_CHAIN (v1);
2227 v2 = TREE_CHAIN (v2);
2230 return 1;
2233 case COMPLEX_CST:
2234 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2235 only_const)
2236 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2237 only_const));
2239 case STRING_CST:
2240 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2241 && ! memcmp (TREE_STRING_POINTER (arg0),
2242 TREE_STRING_POINTER (arg1),
2243 TREE_STRING_LENGTH (arg0)));
2245 case ADDR_EXPR:
2246 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2248 default:
2249 break;
2252 if (only_const)
2253 return 0;
2255 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2257 case '1':
2258 /* Two conversions are equal only if signedness and modes match. */
2259 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2260 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2261 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2262 return 0;
2264 return operand_equal_p (TREE_OPERAND (arg0, 0),
2265 TREE_OPERAND (arg1, 0), 0);
2267 case '<':
2268 case '2':
2269 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2270 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2272 return 1;
2274 /* For commutative ops, allow the other order. */
2275 return (commutative_tree_code (TREE_CODE (arg0))
2276 && operand_equal_p (TREE_OPERAND (arg0, 0),
2277 TREE_OPERAND (arg1, 1), 0)
2278 && operand_equal_p (TREE_OPERAND (arg0, 1),
2279 TREE_OPERAND (arg1, 0), 0));
2281 case 'r':
2282 /* If either of the pointer (or reference) expressions we are
2283 dereferencing contain a side effect, these cannot be equal. */
2284 if (TREE_SIDE_EFFECTS (arg0)
2285 || TREE_SIDE_EFFECTS (arg1))
2286 return 0;
2288 switch (TREE_CODE (arg0))
2290 case INDIRECT_REF:
2291 return operand_equal_p (TREE_OPERAND (arg0, 0),
2292 TREE_OPERAND (arg1, 0), 0);
2294 case COMPONENT_REF:
2295 case ARRAY_REF:
2296 case ARRAY_RANGE_REF:
2297 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2298 TREE_OPERAND (arg1, 0), 0)
2299 && operand_equal_p (TREE_OPERAND (arg0, 1),
2300 TREE_OPERAND (arg1, 1), 0));
2302 case BIT_FIELD_REF:
2303 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2304 TREE_OPERAND (arg1, 0), 0)
2305 && operand_equal_p (TREE_OPERAND (arg0, 1),
2306 TREE_OPERAND (arg1, 1), 0)
2307 && operand_equal_p (TREE_OPERAND (arg0, 2),
2308 TREE_OPERAND (arg1, 2), 0));
2309 default:
2310 return 0;
2313 case 'e':
2314 switch (TREE_CODE (arg0))
2316 case ADDR_EXPR:
2317 case TRUTH_NOT_EXPR:
2318 return operand_equal_p (TREE_OPERAND (arg0, 0),
2319 TREE_OPERAND (arg1, 0), 0);
2321 case RTL_EXPR:
2322 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2324 case CALL_EXPR:
2325 /* If the CALL_EXPRs call different functions, then they
2326 clearly can not be equal. */
2327 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2328 TREE_OPERAND (arg1, 0), 0))
2329 return 0;
2331 /* Only consider const functions equivalent. */
2332 fndecl = get_callee_fndecl (arg0);
2333 if (fndecl == NULL_TREE
2334 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2335 return 0;
2337 /* Now see if all the arguments are the same. operand_equal_p
2338 does not handle TREE_LIST, so we walk the operands here
2339 feeding them to operand_equal_p. */
2340 arg0 = TREE_OPERAND (arg0, 1);
2341 arg1 = TREE_OPERAND (arg1, 1);
2342 while (arg0 && arg1)
2344 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2345 return 0;
2347 arg0 = TREE_CHAIN (arg0);
2348 arg1 = TREE_CHAIN (arg1);
2351 /* If we get here and both argument lists are exhausted
2352 then the CALL_EXPRs are equal. */
2353 return ! (arg0 || arg1);
2355 default:
2356 return 0;
2359 case 'd':
2360 /* Consider __builtin_sqrt equal to sqrt. */
2361 return TREE_CODE (arg0) == FUNCTION_DECL
2362 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2363 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2364 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2366 default:
2367 return 0;
2371 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2372 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2374 When in doubt, return 0. */
2376 static int
2377 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2379 int unsignedp1, unsignedpo;
2380 tree primarg0, primarg1, primother;
2381 unsigned int correct_width;
2383 if (operand_equal_p (arg0, arg1, 0))
2384 return 1;
2386 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2387 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2388 return 0;
2390 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2391 and see if the inner values are the same. This removes any
2392 signedness comparison, which doesn't matter here. */
2393 primarg0 = arg0, primarg1 = arg1;
2394 STRIP_NOPS (primarg0);
2395 STRIP_NOPS (primarg1);
2396 if (operand_equal_p (primarg0, primarg1, 0))
2397 return 1;
2399 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2400 actual comparison operand, ARG0.
2402 First throw away any conversions to wider types
2403 already present in the operands. */
2405 primarg1 = get_narrower (arg1, &unsignedp1);
2406 primother = get_narrower (other, &unsignedpo);
2408 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2409 if (unsignedp1 == unsignedpo
2410 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2411 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2413 tree type = TREE_TYPE (arg0);
2415 /* Make sure shorter operand is extended the right way
2416 to match the longer operand. */
2417 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2418 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2420 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2421 return 1;
2424 return 0;
2427 /* See if ARG is an expression that is either a comparison or is performing
2428 arithmetic on comparisons. The comparisons must only be comparing
2429 two different values, which will be stored in *CVAL1 and *CVAL2; if
2430 they are nonzero it means that some operands have already been found.
2431 No variables may be used anywhere else in the expression except in the
2432 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2433 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2435 If this is true, return 1. Otherwise, return zero. */
2437 static int
2438 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2440 enum tree_code code = TREE_CODE (arg);
2441 char class = TREE_CODE_CLASS (code);
2443 /* We can handle some of the 'e' cases here. */
2444 if (class == 'e' && code == TRUTH_NOT_EXPR)
2445 class = '1';
2446 else if (class == 'e'
2447 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2448 || code == COMPOUND_EXPR))
2449 class = '2';
2451 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2452 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2454 /* If we've already found a CVAL1 or CVAL2, this expression is
2455 two complex to handle. */
2456 if (*cval1 || *cval2)
2457 return 0;
2459 class = '1';
2460 *save_p = 1;
2463 switch (class)
2465 case '1':
2466 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2468 case '2':
2469 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2470 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2471 cval1, cval2, save_p));
2473 case 'c':
2474 return 1;
2476 case 'e':
2477 if (code == COND_EXPR)
2478 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2479 cval1, cval2, save_p)
2480 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2481 cval1, cval2, save_p)
2482 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2483 cval1, cval2, save_p));
2484 return 0;
2486 case '<':
2487 /* First see if we can handle the first operand, then the second. For
2488 the second operand, we know *CVAL1 can't be zero. It must be that
2489 one side of the comparison is each of the values; test for the
2490 case where this isn't true by failing if the two operands
2491 are the same. */
2493 if (operand_equal_p (TREE_OPERAND (arg, 0),
2494 TREE_OPERAND (arg, 1), 0))
2495 return 0;
2497 if (*cval1 == 0)
2498 *cval1 = TREE_OPERAND (arg, 0);
2499 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2501 else if (*cval2 == 0)
2502 *cval2 = TREE_OPERAND (arg, 0);
2503 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2505 else
2506 return 0;
2508 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2510 else if (*cval2 == 0)
2511 *cval2 = TREE_OPERAND (arg, 1);
2512 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2514 else
2515 return 0;
2517 return 1;
2519 default:
2520 return 0;
2524 /* ARG is a tree that is known to contain just arithmetic operations and
2525 comparisons. Evaluate the operations in the tree substituting NEW0 for
2526 any occurrence of OLD0 as an operand of a comparison and likewise for
2527 NEW1 and OLD1. */
2529 static tree
2530 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2532 tree type = TREE_TYPE (arg);
2533 enum tree_code code = TREE_CODE (arg);
2534 char class = TREE_CODE_CLASS (code);
2536 /* We can handle some of the 'e' cases here. */
2537 if (class == 'e' && code == TRUTH_NOT_EXPR)
2538 class = '1';
2539 else if (class == 'e'
2540 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2541 class = '2';
2543 switch (class)
2545 case '1':
2546 return fold (build1 (code, type,
2547 eval_subst (TREE_OPERAND (arg, 0),
2548 old0, new0, old1, new1)));
2550 case '2':
2551 return fold (build (code, type,
2552 eval_subst (TREE_OPERAND (arg, 0),
2553 old0, new0, old1, new1),
2554 eval_subst (TREE_OPERAND (arg, 1),
2555 old0, new0, old1, new1)));
2557 case 'e':
2558 switch (code)
2560 case SAVE_EXPR:
2561 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2563 case COMPOUND_EXPR:
2564 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2566 case COND_EXPR:
2567 return fold (build (code, type,
2568 eval_subst (TREE_OPERAND (arg, 0),
2569 old0, new0, old1, new1),
2570 eval_subst (TREE_OPERAND (arg, 1),
2571 old0, new0, old1, new1),
2572 eval_subst (TREE_OPERAND (arg, 2),
2573 old0, new0, old1, new1)));
2574 default:
2575 break;
2577 /* Fall through - ??? */
2579 case '<':
2581 tree arg0 = TREE_OPERAND (arg, 0);
2582 tree arg1 = TREE_OPERAND (arg, 1);
2584 /* We need to check both for exact equality and tree equality. The
2585 former will be true if the operand has a side-effect. In that
2586 case, we know the operand occurred exactly once. */
2588 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2589 arg0 = new0;
2590 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2591 arg0 = new1;
2593 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2594 arg1 = new0;
2595 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2596 arg1 = new1;
2598 return fold (build (code, type, arg0, arg1));
2601 default:
2602 return arg;
2606 /* Return a tree for the case when the result of an expression is RESULT
2607 converted to TYPE and OMITTED was previously an operand of the expression
2608 but is now not needed (e.g., we folded OMITTED * 0).
2610 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2611 the conversion of RESULT to TYPE. */
2613 tree
2614 omit_one_operand (tree type, tree result, tree omitted)
2616 tree t = fold_convert (type, result);
2618 if (TREE_SIDE_EFFECTS (omitted))
2619 return build (COMPOUND_EXPR, type, omitted, t);
2621 return non_lvalue (t);
2624 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2626 static tree
2627 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2629 tree t = fold_convert (type, result);
2631 if (TREE_SIDE_EFFECTS (omitted))
2632 return build (COMPOUND_EXPR, type, omitted, t);
2634 return pedantic_non_lvalue (t);
2637 /* Return a simplified tree node for the truth-negation of ARG. This
2638 never alters ARG itself. We assume that ARG is an operation that
2639 returns a truth value (0 or 1). */
2641 tree
2642 invert_truthvalue (tree arg)
2644 tree type = TREE_TYPE (arg);
2645 enum tree_code code = TREE_CODE (arg);
2647 if (code == ERROR_MARK)
2648 return arg;
2650 /* If this is a comparison, we can simply invert it, except for
2651 floating-point non-equality comparisons, in which case we just
2652 enclose a TRUTH_NOT_EXPR around what we have. */
2654 if (TREE_CODE_CLASS (code) == '<')
2656 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2657 && !flag_unsafe_math_optimizations
2658 && code != NE_EXPR
2659 && code != EQ_EXPR)
2660 return build1 (TRUTH_NOT_EXPR, type, arg);
2661 else if (code == UNORDERED_EXPR
2662 || code == ORDERED_EXPR
2663 || code == UNEQ_EXPR
2664 || code == UNLT_EXPR
2665 || code == UNLE_EXPR
2666 || code == UNGT_EXPR
2667 || code == UNGE_EXPR)
2668 return build1 (TRUTH_NOT_EXPR, type, arg);
2669 else
2670 return build (invert_tree_comparison (code), type,
2671 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2674 switch (code)
2676 case INTEGER_CST:
2677 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2679 case TRUTH_AND_EXPR:
2680 return build (TRUTH_OR_EXPR, type,
2681 invert_truthvalue (TREE_OPERAND (arg, 0)),
2682 invert_truthvalue (TREE_OPERAND (arg, 1)));
2684 case TRUTH_OR_EXPR:
2685 return build (TRUTH_AND_EXPR, type,
2686 invert_truthvalue (TREE_OPERAND (arg, 0)),
2687 invert_truthvalue (TREE_OPERAND (arg, 1)));
2689 case TRUTH_XOR_EXPR:
2690 /* Here we can invert either operand. We invert the first operand
2691 unless the second operand is a TRUTH_NOT_EXPR in which case our
2692 result is the XOR of the first operand with the inside of the
2693 negation of the second operand. */
2695 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2696 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2697 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2698 else
2699 return build (TRUTH_XOR_EXPR, type,
2700 invert_truthvalue (TREE_OPERAND (arg, 0)),
2701 TREE_OPERAND (arg, 1));
2703 case TRUTH_ANDIF_EXPR:
2704 return build (TRUTH_ORIF_EXPR, type,
2705 invert_truthvalue (TREE_OPERAND (arg, 0)),
2706 invert_truthvalue (TREE_OPERAND (arg, 1)));
2708 case TRUTH_ORIF_EXPR:
2709 return build (TRUTH_ANDIF_EXPR, type,
2710 invert_truthvalue (TREE_OPERAND (arg, 0)),
2711 invert_truthvalue (TREE_OPERAND (arg, 1)));
2713 case TRUTH_NOT_EXPR:
2714 return TREE_OPERAND (arg, 0);
2716 case COND_EXPR:
2717 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2718 invert_truthvalue (TREE_OPERAND (arg, 1)),
2719 invert_truthvalue (TREE_OPERAND (arg, 2)));
2721 case COMPOUND_EXPR:
2722 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2723 invert_truthvalue (TREE_OPERAND (arg, 1)));
2725 case NON_LVALUE_EXPR:
2726 return invert_truthvalue (TREE_OPERAND (arg, 0));
2728 case NOP_EXPR:
2729 case CONVERT_EXPR:
2730 case FLOAT_EXPR:
2731 return build1 (TREE_CODE (arg), type,
2732 invert_truthvalue (TREE_OPERAND (arg, 0)));
2734 case BIT_AND_EXPR:
2735 if (!integer_onep (TREE_OPERAND (arg, 1)))
2736 break;
2737 return build (EQ_EXPR, type, arg,
2738 fold_convert (type, integer_zero_node));
2740 case SAVE_EXPR:
2741 return build1 (TRUTH_NOT_EXPR, type, arg);
2743 case CLEANUP_POINT_EXPR:
2744 return build1 (CLEANUP_POINT_EXPR, type,
2745 invert_truthvalue (TREE_OPERAND (arg, 0)));
2747 default:
2748 break;
2750 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2751 abort ();
2752 return build1 (TRUTH_NOT_EXPR, type, arg);
2755 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2756 operands are another bit-wise operation with a common input. If so,
2757 distribute the bit operations to save an operation and possibly two if
2758 constants are involved. For example, convert
2759 (A | B) & (A | C) into A | (B & C)
2760 Further simplification will occur if B and C are constants.
2762 If this optimization cannot be done, 0 will be returned. */
2764 static tree
2765 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2767 tree common;
2768 tree left, right;
2770 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2771 || TREE_CODE (arg0) == code
2772 || (TREE_CODE (arg0) != BIT_AND_EXPR
2773 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2774 return 0;
2776 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2778 common = TREE_OPERAND (arg0, 0);
2779 left = TREE_OPERAND (arg0, 1);
2780 right = TREE_OPERAND (arg1, 1);
2782 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2784 common = TREE_OPERAND (arg0, 0);
2785 left = TREE_OPERAND (arg0, 1);
2786 right = TREE_OPERAND (arg1, 0);
2788 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2790 common = TREE_OPERAND (arg0, 1);
2791 left = TREE_OPERAND (arg0, 0);
2792 right = TREE_OPERAND (arg1, 1);
2794 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2796 common = TREE_OPERAND (arg0, 1);
2797 left = TREE_OPERAND (arg0, 0);
2798 right = TREE_OPERAND (arg1, 0);
2800 else
2801 return 0;
2803 return fold (build (TREE_CODE (arg0), type, common,
2804 fold (build (code, type, left, right))));
2807 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2808 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2810 static tree
2811 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2812 int unsignedp)
2814 tree result = build (BIT_FIELD_REF, type, inner,
2815 size_int (bitsize), bitsize_int (bitpos));
2817 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
2819 return result;
2822 /* Optimize a bit-field compare.
2824 There are two cases: First is a compare against a constant and the
2825 second is a comparison of two items where the fields are at the same
2826 bit position relative to the start of a chunk (byte, halfword, word)
2827 large enough to contain it. In these cases we can avoid the shift
2828 implicit in bitfield extractions.
2830 For constants, we emit a compare of the shifted constant with the
2831 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2832 compared. For two fields at the same position, we do the ANDs with the
2833 similar mask and compare the result of the ANDs.
2835 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2836 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2837 are the left and right operands of the comparison, respectively.
2839 If the optimization described above can be done, we return the resulting
2840 tree. Otherwise we return zero. */
2842 static tree
2843 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2844 tree lhs, tree rhs)
2846 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2847 tree type = TREE_TYPE (lhs);
2848 tree signed_type, unsigned_type;
2849 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2850 enum machine_mode lmode, rmode, nmode;
2851 int lunsignedp, runsignedp;
2852 int lvolatilep = 0, rvolatilep = 0;
2853 tree linner, rinner = NULL_TREE;
2854 tree mask;
2855 tree offset;
2857 /* Get all the information about the extractions being done. If the bit size
2858 if the same as the size of the underlying object, we aren't doing an
2859 extraction at all and so can do nothing. We also don't want to
2860 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2861 then will no longer be able to replace it. */
2862 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2863 &lunsignedp, &lvolatilep);
2864 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2865 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2866 return 0;
2868 if (!const_p)
2870 /* If this is not a constant, we can only do something if bit positions,
2871 sizes, and signedness are the same. */
2872 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2873 &runsignedp, &rvolatilep);
2875 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2876 || lunsignedp != runsignedp || offset != 0
2877 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2878 return 0;
2881 /* See if we can find a mode to refer to this field. We should be able to,
2882 but fail if we can't. */
2883 nmode = get_best_mode (lbitsize, lbitpos,
2884 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2885 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2886 TYPE_ALIGN (TREE_TYPE (rinner))),
2887 word_mode, lvolatilep || rvolatilep);
2888 if (nmode == VOIDmode)
2889 return 0;
2891 /* Set signed and unsigned types of the precision of this mode for the
2892 shifts below. */
2893 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
2894 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
2896 /* Compute the bit position and size for the new reference and our offset
2897 within it. If the new reference is the same size as the original, we
2898 won't optimize anything, so return zero. */
2899 nbitsize = GET_MODE_BITSIZE (nmode);
2900 nbitpos = lbitpos & ~ (nbitsize - 1);
2901 lbitpos -= nbitpos;
2902 if (nbitsize == lbitsize)
2903 return 0;
2905 if (BYTES_BIG_ENDIAN)
2906 lbitpos = nbitsize - lbitsize - lbitpos;
2908 /* Make the mask to be used against the extracted field. */
2909 mask = build_int_2 (~0, ~0);
2910 TREE_TYPE (mask) = unsigned_type;
2911 force_fit_type (mask, 0);
2912 mask = fold_convert (unsigned_type, mask);
2913 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2914 mask = const_binop (RSHIFT_EXPR, mask,
2915 size_int (nbitsize - lbitsize - lbitpos), 0);
2917 if (! const_p)
2918 /* If not comparing with constant, just rework the comparison
2919 and return. */
2920 return build (code, compare_type,
2921 build (BIT_AND_EXPR, unsigned_type,
2922 make_bit_field_ref (linner, unsigned_type,
2923 nbitsize, nbitpos, 1),
2924 mask),
2925 build (BIT_AND_EXPR, unsigned_type,
2926 make_bit_field_ref (rinner, unsigned_type,
2927 nbitsize, nbitpos, 1),
2928 mask));
2930 /* Otherwise, we are handling the constant case. See if the constant is too
2931 big for the field. Warn and return a tree of for 0 (false) if so. We do
2932 this not only for its own sake, but to avoid having to test for this
2933 error case below. If we didn't, we might generate wrong code.
2935 For unsigned fields, the constant shifted right by the field length should
2936 be all zero. For signed fields, the high-order bits should agree with
2937 the sign bit. */
2939 if (lunsignedp)
2941 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2942 fold_convert (unsigned_type, rhs),
2943 size_int (lbitsize), 0)))
2945 warning ("comparison is always %d due to width of bit-field",
2946 code == NE_EXPR);
2947 return fold_convert (compare_type,
2948 (code == NE_EXPR
2949 ? integer_one_node : integer_zero_node));
2952 else
2954 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
2955 size_int (lbitsize - 1), 0);
2956 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2958 warning ("comparison is always %d due to width of bit-field",
2959 code == NE_EXPR);
2960 return fold_convert (compare_type,
2961 (code == NE_EXPR
2962 ? integer_one_node : integer_zero_node));
2966 /* Single-bit compares should always be against zero. */
2967 if (lbitsize == 1 && ! integer_zerop (rhs))
2969 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2970 rhs = fold_convert (type, integer_zero_node);
2973 /* Make a new bitfield reference, shift the constant over the
2974 appropriate number of bits and mask it with the computed mask
2975 (in case this was a signed field). If we changed it, make a new one. */
2976 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2977 if (lvolatilep)
2979 TREE_SIDE_EFFECTS (lhs) = 1;
2980 TREE_THIS_VOLATILE (lhs) = 1;
2983 rhs = fold (const_binop (BIT_AND_EXPR,
2984 const_binop (LSHIFT_EXPR,
2985 fold_convert (unsigned_type, rhs),
2986 size_int (lbitpos), 0),
2987 mask, 0));
2989 return build (code, compare_type,
2990 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2991 rhs);
2994 /* Subroutine for fold_truthop: decode a field reference.
2996 If EXP is a comparison reference, we return the innermost reference.
2998 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2999 set to the starting bit number.
3001 If the innermost field can be completely contained in a mode-sized
3002 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3004 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3005 otherwise it is not changed.
3007 *PUNSIGNEDP is set to the signedness of the field.
3009 *PMASK is set to the mask used. This is either contained in a
3010 BIT_AND_EXPR or derived from the width of the field.
3012 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3014 Return 0 if this is not a component reference or is one that we can't
3015 do anything with. */
3017 static tree
3018 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3019 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3020 int *punsignedp, int *pvolatilep,
3021 tree *pmask, tree *pand_mask)
3023 tree outer_type = 0;
3024 tree and_mask = 0;
3025 tree mask, inner, offset;
3026 tree unsigned_type;
3027 unsigned int precision;
3029 /* All the optimizations using this function assume integer fields.
3030 There are problems with FP fields since the type_for_size call
3031 below can fail for, e.g., XFmode. */
3032 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3033 return 0;
3035 /* We are interested in the bare arrangement of bits, so strip everything
3036 that doesn't affect the machine mode. However, record the type of the
3037 outermost expression if it may matter below. */
3038 if (TREE_CODE (exp) == NOP_EXPR
3039 || TREE_CODE (exp) == CONVERT_EXPR
3040 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3041 outer_type = TREE_TYPE (exp);
3042 STRIP_NOPS (exp);
3044 if (TREE_CODE (exp) == BIT_AND_EXPR)
3046 and_mask = TREE_OPERAND (exp, 1);
3047 exp = TREE_OPERAND (exp, 0);
3048 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3049 if (TREE_CODE (and_mask) != INTEGER_CST)
3050 return 0;
3053 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3054 punsignedp, pvolatilep);
3055 if ((inner == exp && and_mask == 0)
3056 || *pbitsize < 0 || offset != 0
3057 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3058 return 0;
3060 /* If the number of bits in the reference is the same as the bitsize of
3061 the outer type, then the outer type gives the signedness. Otherwise
3062 (in case of a small bitfield) the signedness is unchanged. */
3063 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3064 *punsignedp = TYPE_UNSIGNED (outer_type);
3066 /* Compute the mask to access the bitfield. */
3067 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3068 precision = TYPE_PRECISION (unsigned_type);
3070 mask = build_int_2 (~0, ~0);
3071 TREE_TYPE (mask) = unsigned_type;
3072 force_fit_type (mask, 0);
3073 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3074 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3076 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3077 if (and_mask != 0)
3078 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3079 fold_convert (unsigned_type, and_mask), mask));
3081 *pmask = mask;
3082 *pand_mask = and_mask;
3083 return inner;
3086 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3087 bit positions. */
3089 static int
3090 all_ones_mask_p (tree mask, int size)
3092 tree type = TREE_TYPE (mask);
3093 unsigned int precision = TYPE_PRECISION (type);
3094 tree tmask;
3096 tmask = build_int_2 (~0, ~0);
3097 TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
3098 force_fit_type (tmask, 0);
3099 return
3100 tree_int_cst_equal (mask,
3101 const_binop (RSHIFT_EXPR,
3102 const_binop (LSHIFT_EXPR, tmask,
3103 size_int (precision - size),
3105 size_int (precision - size), 0));
3108 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3109 represents the sign bit of EXP's type. If EXP represents a sign
3110 or zero extension, also test VAL against the unextended type.
3111 The return value is the (sub)expression whose sign bit is VAL,
3112 or NULL_TREE otherwise. */
3114 static tree
3115 sign_bit_p (tree exp, tree val)
3117 unsigned HOST_WIDE_INT mask_lo, lo;
3118 HOST_WIDE_INT mask_hi, hi;
3119 int width;
3120 tree t;
3122 /* Tree EXP must have an integral type. */
3123 t = TREE_TYPE (exp);
3124 if (! INTEGRAL_TYPE_P (t))
3125 return NULL_TREE;
3127 /* Tree VAL must be an integer constant. */
3128 if (TREE_CODE (val) != INTEGER_CST
3129 || TREE_CONSTANT_OVERFLOW (val))
3130 return NULL_TREE;
3132 width = TYPE_PRECISION (t);
3133 if (width > HOST_BITS_PER_WIDE_INT)
3135 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3136 lo = 0;
3138 mask_hi = ((unsigned HOST_WIDE_INT) -1
3139 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3140 mask_lo = -1;
3142 else
3144 hi = 0;
3145 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3147 mask_hi = 0;
3148 mask_lo = ((unsigned HOST_WIDE_INT) -1
3149 >> (HOST_BITS_PER_WIDE_INT - width));
3152 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3153 treat VAL as if it were unsigned. */
3154 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3155 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3156 return exp;
3158 /* Handle extension from a narrower type. */
3159 if (TREE_CODE (exp) == NOP_EXPR
3160 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3161 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3163 return NULL_TREE;
3166 /* Subroutine for fold_truthop: determine if an operand is simple enough
3167 to be evaluated unconditionally. */
3169 static int
3170 simple_operand_p (tree exp)
3172 /* Strip any conversions that don't change the machine mode. */
3173 while ((TREE_CODE (exp) == NOP_EXPR
3174 || TREE_CODE (exp) == CONVERT_EXPR)
3175 && (TYPE_MODE (TREE_TYPE (exp))
3176 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3177 exp = TREE_OPERAND (exp, 0);
3179 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3180 || (DECL_P (exp)
3181 && ! TREE_ADDRESSABLE (exp)
3182 && ! TREE_THIS_VOLATILE (exp)
3183 && ! DECL_NONLOCAL (exp)
3184 /* Don't regard global variables as simple. They may be
3185 allocated in ways unknown to the compiler (shared memory,
3186 #pragma weak, etc). */
3187 && ! TREE_PUBLIC (exp)
3188 && ! DECL_EXTERNAL (exp)
3189 /* Loading a static variable is unduly expensive, but global
3190 registers aren't expensive. */
3191 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3194 /* The following functions are subroutines to fold_range_test and allow it to
3195 try to change a logical combination of comparisons into a range test.
3197 For example, both
3198 X == 2 || X == 3 || X == 4 || X == 5
3200 X >= 2 && X <= 5
3201 are converted to
3202 (unsigned) (X - 2) <= 3
3204 We describe each set of comparisons as being either inside or outside
3205 a range, using a variable named like IN_P, and then describe the
3206 range with a lower and upper bound. If one of the bounds is omitted,
3207 it represents either the highest or lowest value of the type.
3209 In the comments below, we represent a range by two numbers in brackets
3210 preceded by a "+" to designate being inside that range, or a "-" to
3211 designate being outside that range, so the condition can be inverted by
3212 flipping the prefix. An omitted bound is represented by a "-". For
3213 example, "- [-, 10]" means being outside the range starting at the lowest
3214 possible value and ending at 10, in other words, being greater than 10.
3215 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3216 always false.
3218 We set up things so that the missing bounds are handled in a consistent
3219 manner so neither a missing bound nor "true" and "false" need to be
3220 handled using a special case. */
3222 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3223 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3224 and UPPER1_P are nonzero if the respective argument is an upper bound
3225 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3226 must be specified for a comparison. ARG1 will be converted to ARG0's
3227 type if both are specified. */
3229 static tree
3230 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3231 tree arg1, int upper1_p)
3233 tree tem;
3234 int result;
3235 int sgn0, sgn1;
3237 /* If neither arg represents infinity, do the normal operation.
3238 Else, if not a comparison, return infinity. Else handle the special
3239 comparison rules. Note that most of the cases below won't occur, but
3240 are handled for consistency. */
3242 if (arg0 != 0 && arg1 != 0)
3244 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3245 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3246 STRIP_NOPS (tem);
3247 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3250 if (TREE_CODE_CLASS (code) != '<')
3251 return 0;
3253 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3254 for neither. In real maths, we cannot assume open ended ranges are
3255 the same. But, this is computer arithmetic, where numbers are finite.
3256 We can therefore make the transformation of any unbounded range with
3257 the value Z, Z being greater than any representable number. This permits
3258 us to treat unbounded ranges as equal. */
3259 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3260 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3261 switch (code)
3263 case EQ_EXPR:
3264 result = sgn0 == sgn1;
3265 break;
3266 case NE_EXPR:
3267 result = sgn0 != sgn1;
3268 break;
3269 case LT_EXPR:
3270 result = sgn0 < sgn1;
3271 break;
3272 case LE_EXPR:
3273 result = sgn0 <= sgn1;
3274 break;
3275 case GT_EXPR:
3276 result = sgn0 > sgn1;
3277 break;
3278 case GE_EXPR:
3279 result = sgn0 >= sgn1;
3280 break;
3281 default:
3282 abort ();
3285 return fold_convert (type, result ? integer_one_node : integer_zero_node);
3288 /* Given EXP, a logical expression, set the range it is testing into
3289 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3290 actually being tested. *PLOW and *PHIGH will be made of the same type
3291 as the returned expression. If EXP is not a comparison, we will most
3292 likely not be returning a useful value and range. */
3294 static tree
3295 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3297 enum tree_code code;
3298 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3299 tree orig_type = NULL_TREE;
3300 int in_p, n_in_p;
3301 tree low, high, n_low, n_high;
3303 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3304 and see if we can refine the range. Some of the cases below may not
3305 happen, but it doesn't seem worth worrying about this. We "continue"
3306 the outer loop when we've changed something; otherwise we "break"
3307 the switch, which will "break" the while. */
3309 in_p = 0;
3310 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3312 while (1)
3314 code = TREE_CODE (exp);
3316 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3318 if (first_rtl_op (code) > 0)
3319 arg0 = TREE_OPERAND (exp, 0);
3320 if (TREE_CODE_CLASS (code) == '<'
3321 || TREE_CODE_CLASS (code) == '1'
3322 || TREE_CODE_CLASS (code) == '2')
3323 type = TREE_TYPE (arg0);
3324 if (TREE_CODE_CLASS (code) == '2'
3325 || TREE_CODE_CLASS (code) == '<'
3326 || (TREE_CODE_CLASS (code) == 'e'
3327 && TREE_CODE_LENGTH (code) > 1))
3328 arg1 = TREE_OPERAND (exp, 1);
3331 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3332 lose a cast by accident. */
3333 if (type != NULL_TREE && orig_type == NULL_TREE)
3334 orig_type = type;
3336 switch (code)
3338 case TRUTH_NOT_EXPR:
3339 in_p = ! in_p, exp = arg0;
3340 continue;
3342 case EQ_EXPR: case NE_EXPR:
3343 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3344 /* We can only do something if the range is testing for zero
3345 and if the second operand is an integer constant. Note that
3346 saying something is "in" the range we make is done by
3347 complementing IN_P since it will set in the initial case of
3348 being not equal to zero; "out" is leaving it alone. */
3349 if (low == 0 || high == 0
3350 || ! integer_zerop (low) || ! integer_zerop (high)
3351 || TREE_CODE (arg1) != INTEGER_CST)
3352 break;
3354 switch (code)
3356 case NE_EXPR: /* - [c, c] */
3357 low = high = arg1;
3358 break;
3359 case EQ_EXPR: /* + [c, c] */
3360 in_p = ! in_p, low = high = arg1;
3361 break;
3362 case GT_EXPR: /* - [-, c] */
3363 low = 0, high = arg1;
3364 break;
3365 case GE_EXPR: /* + [c, -] */
3366 in_p = ! in_p, low = arg1, high = 0;
3367 break;
3368 case LT_EXPR: /* - [c, -] */
3369 low = arg1, high = 0;
3370 break;
3371 case LE_EXPR: /* + [-, c] */
3372 in_p = ! in_p, low = 0, high = arg1;
3373 break;
3374 default:
3375 abort ();
3378 exp = arg0;
3380 /* If this is an unsigned comparison, we also know that EXP is
3381 greater than or equal to zero. We base the range tests we make
3382 on that fact, so we record it here so we can parse existing
3383 range tests. */
3384 if (TYPE_UNSIGNED (type) && (low == 0 || high == 0))
3386 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3387 1, fold_convert (type, integer_zero_node),
3388 NULL_TREE))
3389 break;
3391 in_p = n_in_p, low = n_low, high = n_high;
3393 /* If the high bound is missing, but we have a nonzero low
3394 bound, reverse the range so it goes from zero to the low bound
3395 minus 1. */
3396 if (high == 0 && low && ! integer_zerop (low))
3398 in_p = ! in_p;
3399 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3400 integer_one_node, 0);
3401 low = fold_convert (type, integer_zero_node);
3404 continue;
3406 case NEGATE_EXPR:
3407 /* (-x) IN [a,b] -> x in [-b, -a] */
3408 n_low = range_binop (MINUS_EXPR, type,
3409 fold_convert (type, integer_zero_node),
3410 0, high, 1);
3411 n_high = range_binop (MINUS_EXPR, type,
3412 fold_convert (type, integer_zero_node),
3413 0, low, 0);
3414 low = n_low, high = n_high;
3415 exp = arg0;
3416 continue;
3418 case BIT_NOT_EXPR:
3419 /* ~ X -> -X - 1 */
3420 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3421 fold_convert (type, integer_one_node));
3422 continue;
3424 case PLUS_EXPR: case MINUS_EXPR:
3425 if (TREE_CODE (arg1) != INTEGER_CST)
3426 break;
3428 /* If EXP is signed, any overflow in the computation is undefined,
3429 so we don't worry about it so long as our computations on
3430 the bounds don't overflow. For unsigned, overflow is defined
3431 and this is exactly the right thing. */
3432 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3433 type, low, 0, arg1, 0);
3434 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3435 type, high, 1, arg1, 0);
3436 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3437 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3438 break;
3440 /* Check for an unsigned range which has wrapped around the maximum
3441 value thus making n_high < n_low, and normalize it. */
3442 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3444 low = range_binop (PLUS_EXPR, type, n_high, 0,
3445 integer_one_node, 0);
3446 high = range_binop (MINUS_EXPR, type, n_low, 0,
3447 integer_one_node, 0);
3449 /* If the range is of the form +/- [ x+1, x ], we won't
3450 be able to normalize it. But then, it represents the
3451 whole range or the empty set, so make it
3452 +/- [ -, - ]. */
3453 if (tree_int_cst_equal (n_low, low)
3454 && tree_int_cst_equal (n_high, high))
3455 low = high = 0;
3456 else
3457 in_p = ! in_p;
3459 else
3460 low = n_low, high = n_high;
3462 exp = arg0;
3463 continue;
3465 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3466 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3467 break;
3469 if (! INTEGRAL_TYPE_P (type)
3470 || (low != 0 && ! int_fits_type_p (low, type))
3471 || (high != 0 && ! int_fits_type_p (high, type)))
3472 break;
3474 n_low = low, n_high = high;
3476 if (n_low != 0)
3477 n_low = fold_convert (type, n_low);
3479 if (n_high != 0)
3480 n_high = fold_convert (type, n_high);
3482 /* If we're converting from an unsigned to a signed type,
3483 we will be doing the comparison as unsigned. The tests above
3484 have already verified that LOW and HIGH are both positive.
3486 So we have to make sure that the original unsigned value will
3487 be interpreted as positive. */
3488 if (TYPE_UNSIGNED (type) && ! TYPE_UNSIGNED (TREE_TYPE (exp)))
3490 tree equiv_type = lang_hooks.types.type_for_mode
3491 (TYPE_MODE (type), 1);
3492 tree high_positive;
3494 /* A range without an upper bound is, naturally, unbounded.
3495 Since convert would have cropped a very large value, use
3496 the max value for the destination type. */
3497 high_positive
3498 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3499 : TYPE_MAX_VALUE (type);
3501 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3502 high_positive = fold (build (RSHIFT_EXPR, type,
3503 fold_convert (type,
3504 high_positive),
3505 fold_convert (type,
3506 integer_one_node)));
3508 /* If the low bound is specified, "and" the range with the
3509 range for which the original unsigned value will be
3510 positive. */
3511 if (low != 0)
3513 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3514 1, n_low, n_high, 1,
3515 fold_convert (type, integer_zero_node),
3516 high_positive))
3517 break;
3519 in_p = (n_in_p == in_p);
3521 else
3523 /* Otherwise, "or" the range with the range of the input
3524 that will be interpreted as negative. */
3525 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3526 0, n_low, n_high, 1,
3527 fold_convert (type, integer_zero_node),
3528 high_positive))
3529 break;
3531 in_p = (in_p != n_in_p);
3535 exp = arg0;
3536 low = n_low, high = n_high;
3537 continue;
3539 default:
3540 break;
3543 break;
3546 /* If EXP is a constant, we can evaluate whether this is true or false. */
3547 if (TREE_CODE (exp) == INTEGER_CST)
3549 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3550 exp, 0, low, 0))
3551 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3552 exp, 1, high, 1)));
3553 low = high = 0;
3554 exp = 0;
3557 *pin_p = in_p, *plow = low, *phigh = high;
3558 return exp;
3561 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3562 type, TYPE, return an expression to test if EXP is in (or out of, depending
3563 on IN_P) the range. */
3565 static tree
3566 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3568 tree etype = TREE_TYPE (exp);
3569 tree value;
3571 if (! in_p
3572 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3573 return invert_truthvalue (value);
3575 if (low == 0 && high == 0)
3576 return fold_convert (type, integer_one_node);
3578 if (low == 0)
3579 return fold (build (LE_EXPR, type, exp, high));
3581 if (high == 0)
3582 return fold (build (GE_EXPR, type, exp, low));
3584 if (operand_equal_p (low, high, 0))
3585 return fold (build (EQ_EXPR, type, exp, low));
3587 if (integer_zerop (low))
3589 if (! TYPE_UNSIGNED (etype))
3591 etype = lang_hooks.types.unsigned_type (etype);
3592 high = fold_convert (etype, high);
3593 exp = fold_convert (etype, exp);
3595 return build_range_check (type, exp, 1, 0, high);
3598 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3599 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3601 unsigned HOST_WIDE_INT lo;
3602 HOST_WIDE_INT hi;
3603 int prec;
3605 prec = TYPE_PRECISION (etype);
3606 if (prec <= HOST_BITS_PER_WIDE_INT)
3608 hi = 0;
3609 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3611 else
3613 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3614 lo = (unsigned HOST_WIDE_INT) -1;
3617 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3619 if (TYPE_UNSIGNED (etype))
3621 etype = lang_hooks.types.signed_type (etype);
3622 exp = fold_convert (etype, exp);
3624 return fold (build (GT_EXPR, type, exp,
3625 fold_convert (etype, integer_zero_node)));
3629 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3630 && ! TREE_OVERFLOW (value))
3631 return build_range_check (type,
3632 fold (build (MINUS_EXPR, etype, exp, low)),
3633 1, fold_convert (etype, integer_zero_node),
3634 value);
3636 return 0;
3639 /* Given two ranges, see if we can merge them into one. Return 1 if we
3640 can, 0 if we can't. Set the output range into the specified parameters. */
3642 static int
3643 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3644 tree high0, int in1_p, tree low1, tree high1)
3646 int no_overlap;
3647 int subset;
3648 int temp;
3649 tree tem;
3650 int in_p;
3651 tree low, high;
3652 int lowequal = ((low0 == 0 && low1 == 0)
3653 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3654 low0, 0, low1, 0)));
3655 int highequal = ((high0 == 0 && high1 == 0)
3656 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3657 high0, 1, high1, 1)));
3659 /* Make range 0 be the range that starts first, or ends last if they
3660 start at the same value. Swap them if it isn't. */
3661 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3662 low0, 0, low1, 0))
3663 || (lowequal
3664 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3665 high1, 1, high0, 1))))
3667 temp = in0_p, in0_p = in1_p, in1_p = temp;
3668 tem = low0, low0 = low1, low1 = tem;
3669 tem = high0, high0 = high1, high1 = tem;
3672 /* Now flag two cases, whether the ranges are disjoint or whether the
3673 second range is totally subsumed in the first. Note that the tests
3674 below are simplified by the ones above. */
3675 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3676 high0, 1, low1, 0));
3677 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3678 high1, 1, high0, 1));
3680 /* We now have four cases, depending on whether we are including or
3681 excluding the two ranges. */
3682 if (in0_p && in1_p)
3684 /* If they don't overlap, the result is false. If the second range
3685 is a subset it is the result. Otherwise, the range is from the start
3686 of the second to the end of the first. */
3687 if (no_overlap)
3688 in_p = 0, low = high = 0;
3689 else if (subset)
3690 in_p = 1, low = low1, high = high1;
3691 else
3692 in_p = 1, low = low1, high = high0;
3695 else if (in0_p && ! in1_p)
3697 /* If they don't overlap, the result is the first range. If they are
3698 equal, the result is false. If the second range is a subset of the
3699 first, and the ranges begin at the same place, we go from just after
3700 the end of the first range to the end of the second. If the second
3701 range is not a subset of the first, or if it is a subset and both
3702 ranges end at the same place, the range starts at the start of the
3703 first range and ends just before the second range.
3704 Otherwise, we can't describe this as a single range. */
3705 if (no_overlap)
3706 in_p = 1, low = low0, high = high0;
3707 else if (lowequal && highequal)
3708 in_p = 0, low = high = 0;
3709 else if (subset && lowequal)
3711 in_p = 1, high = high0;
3712 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3713 integer_one_node, 0);
3715 else if (! subset || highequal)
3717 in_p = 1, low = low0;
3718 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3719 integer_one_node, 0);
3721 else
3722 return 0;
3725 else if (! in0_p && in1_p)
3727 /* If they don't overlap, the result is the second range. If the second
3728 is a subset of the first, the result is false. Otherwise,
3729 the range starts just after the first range and ends at the
3730 end of the second. */
3731 if (no_overlap)
3732 in_p = 1, low = low1, high = high1;
3733 else if (subset || highequal)
3734 in_p = 0, low = high = 0;
3735 else
3737 in_p = 1, high = high1;
3738 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3739 integer_one_node, 0);
3743 else
3745 /* The case where we are excluding both ranges. Here the complex case
3746 is if they don't overlap. In that case, the only time we have a
3747 range is if they are adjacent. If the second is a subset of the
3748 first, the result is the first. Otherwise, the range to exclude
3749 starts at the beginning of the first range and ends at the end of the
3750 second. */
3751 if (no_overlap)
3753 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3754 range_binop (PLUS_EXPR, NULL_TREE,
3755 high0, 1,
3756 integer_one_node, 1),
3757 1, low1, 0)))
3758 in_p = 0, low = low0, high = high1;
3759 else
3760 return 0;
3762 else if (subset)
3763 in_p = 0, low = low0, high = high0;
3764 else
3765 in_p = 0, low = low0, high = high1;
3768 *pin_p = in_p, *plow = low, *phigh = high;
3769 return 1;
3772 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3773 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3774 #endif
3776 /* EXP is some logical combination of boolean tests. See if we can
3777 merge it into some range test. Return the new tree if so. */
3779 static tree
3780 fold_range_test (tree exp)
3782 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3783 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3784 int in0_p, in1_p, in_p;
3785 tree low0, low1, low, high0, high1, high;
3786 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3787 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3788 tree tem;
3790 /* If this is an OR operation, invert both sides; we will invert
3791 again at the end. */
3792 if (or_op)
3793 in0_p = ! in0_p, in1_p = ! in1_p;
3795 /* If both expressions are the same, if we can merge the ranges, and we
3796 can build the range test, return it or it inverted. If one of the
3797 ranges is always true or always false, consider it to be the same
3798 expression as the other. */
3799 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3800 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3801 in1_p, low1, high1)
3802 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3803 lhs != 0 ? lhs
3804 : rhs != 0 ? rhs : integer_zero_node,
3805 in_p, low, high))))
3806 return or_op ? invert_truthvalue (tem) : tem;
3808 /* On machines where the branch cost is expensive, if this is a
3809 short-circuited branch and the underlying object on both sides
3810 is the same, make a non-short-circuit operation. */
3811 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3812 && lhs != 0 && rhs != 0
3813 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3814 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3815 && operand_equal_p (lhs, rhs, 0))
3817 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3818 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3819 which cases we can't do this. */
3820 if (simple_operand_p (lhs))
3821 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3822 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3823 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3824 TREE_OPERAND (exp, 1));
3826 else if (lang_hooks.decls.global_bindings_p () == 0
3827 && ! CONTAINS_PLACEHOLDER_P (lhs))
3829 tree common = save_expr (lhs);
3831 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3832 or_op ? ! in0_p : in0_p,
3833 low0, high0))
3834 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3835 or_op ? ! in1_p : in1_p,
3836 low1, high1))))
3837 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3838 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3839 TREE_TYPE (exp), lhs, rhs);
3843 return 0;
3846 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3847 bit value. Arrange things so the extra bits will be set to zero if and
3848 only if C is signed-extended to its full width. If MASK is nonzero,
3849 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3851 static tree
3852 unextend (tree c, int p, int unsignedp, tree mask)
3854 tree type = TREE_TYPE (c);
3855 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3856 tree temp;
3858 if (p == modesize || unsignedp)
3859 return c;
3861 /* We work by getting just the sign bit into the low-order bit, then
3862 into the high-order bit, then sign-extend. We then XOR that value
3863 with C. */
3864 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3865 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3867 /* We must use a signed type in order to get an arithmetic right shift.
3868 However, we must also avoid introducing accidental overflows, so that
3869 a subsequent call to integer_zerop will work. Hence we must
3870 do the type conversion here. At this point, the constant is either
3871 zero or one, and the conversion to a signed type can never overflow.
3872 We could get an overflow if this conversion is done anywhere else. */
3873 if (TYPE_UNSIGNED (type))
3874 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
3876 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3877 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3878 if (mask != 0)
3879 temp = const_binop (BIT_AND_EXPR, temp,
3880 fold_convert (TREE_TYPE (c), mask), 0);
3881 /* If necessary, convert the type back to match the type of C. */
3882 if (TYPE_UNSIGNED (type))
3883 temp = fold_convert (type, temp);
3885 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3888 /* Find ways of folding logical expressions of LHS and RHS:
3889 Try to merge two comparisons to the same innermost item.
3890 Look for range tests like "ch >= '0' && ch <= '9'".
3891 Look for combinations of simple terms on machines with expensive branches
3892 and evaluate the RHS unconditionally.
3894 For example, if we have p->a == 2 && p->b == 4 and we can make an
3895 object large enough to span both A and B, we can do this with a comparison
3896 against the object ANDed with the a mask.
3898 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3899 operations to do this with one comparison.
3901 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3902 function and the one above.
3904 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3905 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3907 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3908 two operands.
3910 We return the simplified tree or 0 if no optimization is possible. */
3912 static tree
3913 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3915 /* If this is the "or" of two comparisons, we can do something if
3916 the comparisons are NE_EXPR. If this is the "and", we can do something
3917 if the comparisons are EQ_EXPR. I.e.,
3918 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3920 WANTED_CODE is this operation code. For single bit fields, we can
3921 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3922 comparison for one-bit fields. */
3924 enum tree_code wanted_code;
3925 enum tree_code lcode, rcode;
3926 tree ll_arg, lr_arg, rl_arg, rr_arg;
3927 tree ll_inner, lr_inner, rl_inner, rr_inner;
3928 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3929 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3930 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3931 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3932 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3933 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3934 enum machine_mode lnmode, rnmode;
3935 tree ll_mask, lr_mask, rl_mask, rr_mask;
3936 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3937 tree l_const, r_const;
3938 tree lntype, rntype, result;
3939 int first_bit, end_bit;
3940 int volatilep;
3942 /* Start by getting the comparison codes. Fail if anything is volatile.
3943 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3944 it were surrounded with a NE_EXPR. */
3946 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3947 return 0;
3949 lcode = TREE_CODE (lhs);
3950 rcode = TREE_CODE (rhs);
3952 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3953 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3955 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3956 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3958 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3959 return 0;
3961 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3962 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3964 ll_arg = TREE_OPERAND (lhs, 0);
3965 lr_arg = TREE_OPERAND (lhs, 1);
3966 rl_arg = TREE_OPERAND (rhs, 0);
3967 rr_arg = TREE_OPERAND (rhs, 1);
3969 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3970 if (simple_operand_p (ll_arg)
3971 && simple_operand_p (lr_arg)
3972 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3974 int compcode;
3976 if (operand_equal_p (ll_arg, rl_arg, 0)
3977 && operand_equal_p (lr_arg, rr_arg, 0))
3979 int lcompcode, rcompcode;
3981 lcompcode = comparison_to_compcode (lcode);
3982 rcompcode = comparison_to_compcode (rcode);
3983 compcode = (code == TRUTH_AND_EXPR)
3984 ? lcompcode & rcompcode
3985 : lcompcode | rcompcode;
3987 else if (operand_equal_p (ll_arg, rr_arg, 0)
3988 && operand_equal_p (lr_arg, rl_arg, 0))
3990 int lcompcode, rcompcode;
3992 rcode = swap_tree_comparison (rcode);
3993 lcompcode = comparison_to_compcode (lcode);
3994 rcompcode = comparison_to_compcode (rcode);
3995 compcode = (code == TRUTH_AND_EXPR)
3996 ? lcompcode & rcompcode
3997 : lcompcode | rcompcode;
3999 else
4000 compcode = -1;
4002 if (compcode == COMPCODE_TRUE)
4003 return fold_convert (truth_type, integer_one_node);
4004 else if (compcode == COMPCODE_FALSE)
4005 return fold_convert (truth_type, integer_zero_node);
4006 else if (compcode != -1)
4007 return build (compcode_to_comparison (compcode),
4008 truth_type, ll_arg, lr_arg);
4011 /* If the RHS can be evaluated unconditionally and its operands are
4012 simple, it wins to evaluate the RHS unconditionally on machines
4013 with expensive branches. In this case, this isn't a comparison
4014 that can be merged. Avoid doing this if the RHS is a floating-point
4015 comparison since those can trap. */
4017 if (BRANCH_COST >= 2
4018 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4019 && simple_operand_p (rl_arg)
4020 && simple_operand_p (rr_arg))
4022 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4023 if (code == TRUTH_OR_EXPR
4024 && lcode == NE_EXPR && integer_zerop (lr_arg)
4025 && rcode == NE_EXPR && integer_zerop (rr_arg)
4026 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4027 return build (NE_EXPR, truth_type,
4028 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4029 ll_arg, rl_arg),
4030 integer_zero_node);
4032 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4033 if (code == TRUTH_AND_EXPR
4034 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4035 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4036 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4037 return build (EQ_EXPR, truth_type,
4038 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4039 ll_arg, rl_arg),
4040 integer_zero_node);
4042 return build (code, truth_type, lhs, rhs);
4045 /* See if the comparisons can be merged. Then get all the parameters for
4046 each side. */
4048 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4049 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4050 return 0;
4052 volatilep = 0;
4053 ll_inner = decode_field_reference (ll_arg,
4054 &ll_bitsize, &ll_bitpos, &ll_mode,
4055 &ll_unsignedp, &volatilep, &ll_mask,
4056 &ll_and_mask);
4057 lr_inner = decode_field_reference (lr_arg,
4058 &lr_bitsize, &lr_bitpos, &lr_mode,
4059 &lr_unsignedp, &volatilep, &lr_mask,
4060 &lr_and_mask);
4061 rl_inner = decode_field_reference (rl_arg,
4062 &rl_bitsize, &rl_bitpos, &rl_mode,
4063 &rl_unsignedp, &volatilep, &rl_mask,
4064 &rl_and_mask);
4065 rr_inner = decode_field_reference (rr_arg,
4066 &rr_bitsize, &rr_bitpos, &rr_mode,
4067 &rr_unsignedp, &volatilep, &rr_mask,
4068 &rr_and_mask);
4070 /* It must be true that the inner operation on the lhs of each
4071 comparison must be the same if we are to be able to do anything.
4072 Then see if we have constants. If not, the same must be true for
4073 the rhs's. */
4074 if (volatilep || ll_inner == 0 || rl_inner == 0
4075 || ! operand_equal_p (ll_inner, rl_inner, 0))
4076 return 0;
4078 if (TREE_CODE (lr_arg) == INTEGER_CST
4079 && TREE_CODE (rr_arg) == INTEGER_CST)
4080 l_const = lr_arg, r_const = rr_arg;
4081 else if (lr_inner == 0 || rr_inner == 0
4082 || ! operand_equal_p (lr_inner, rr_inner, 0))
4083 return 0;
4084 else
4085 l_const = r_const = 0;
4087 /* If either comparison code is not correct for our logical operation,
4088 fail. However, we can convert a one-bit comparison against zero into
4089 the opposite comparison against that bit being set in the field. */
4091 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4092 if (lcode != wanted_code)
4094 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4096 /* Make the left operand unsigned, since we are only interested
4097 in the value of one bit. Otherwise we are doing the wrong
4098 thing below. */
4099 ll_unsignedp = 1;
4100 l_const = ll_mask;
4102 else
4103 return 0;
4106 /* This is analogous to the code for l_const above. */
4107 if (rcode != wanted_code)
4109 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4111 rl_unsignedp = 1;
4112 r_const = rl_mask;
4114 else
4115 return 0;
4118 /* After this point all optimizations will generate bit-field
4119 references, which we might not want. */
4120 if (! lang_hooks.can_use_bit_fields_p ())
4121 return 0;
4123 /* See if we can find a mode that contains both fields being compared on
4124 the left. If we can't, fail. Otherwise, update all constants and masks
4125 to be relative to a field of that size. */
4126 first_bit = MIN (ll_bitpos, rl_bitpos);
4127 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4128 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4129 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4130 volatilep);
4131 if (lnmode == VOIDmode)
4132 return 0;
4134 lnbitsize = GET_MODE_BITSIZE (lnmode);
4135 lnbitpos = first_bit & ~ (lnbitsize - 1);
4136 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4137 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4139 if (BYTES_BIG_ENDIAN)
4141 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4142 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4145 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4146 size_int (xll_bitpos), 0);
4147 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4148 size_int (xrl_bitpos), 0);
4150 if (l_const)
4152 l_const = fold_convert (lntype, l_const);
4153 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4154 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4155 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4156 fold (build1 (BIT_NOT_EXPR,
4157 lntype, ll_mask)),
4158 0)))
4160 warning ("comparison is always %d", wanted_code == NE_EXPR);
4162 return fold_convert (truth_type,
4163 wanted_code == NE_EXPR
4164 ? integer_one_node : integer_zero_node);
4167 if (r_const)
4169 r_const = fold_convert (lntype, r_const);
4170 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4171 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4172 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4173 fold (build1 (BIT_NOT_EXPR,
4174 lntype, rl_mask)),
4175 0)))
4177 warning ("comparison is always %d", wanted_code == NE_EXPR);
4179 return fold_convert (truth_type,
4180 wanted_code == NE_EXPR
4181 ? integer_one_node : integer_zero_node);
4185 /* If the right sides are not constant, do the same for it. Also,
4186 disallow this optimization if a size or signedness mismatch occurs
4187 between the left and right sides. */
4188 if (l_const == 0)
4190 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4191 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4192 /* Make sure the two fields on the right
4193 correspond to the left without being swapped. */
4194 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4195 return 0;
4197 first_bit = MIN (lr_bitpos, rr_bitpos);
4198 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4199 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4200 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4201 volatilep);
4202 if (rnmode == VOIDmode)
4203 return 0;
4205 rnbitsize = GET_MODE_BITSIZE (rnmode);
4206 rnbitpos = first_bit & ~ (rnbitsize - 1);
4207 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4208 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4210 if (BYTES_BIG_ENDIAN)
4212 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4213 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4216 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4217 size_int (xlr_bitpos), 0);
4218 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4219 size_int (xrr_bitpos), 0);
4221 /* Make a mask that corresponds to both fields being compared.
4222 Do this for both items being compared. If the operands are the
4223 same size and the bits being compared are in the same position
4224 then we can do this by masking both and comparing the masked
4225 results. */
4226 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4227 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4228 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4230 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4231 ll_unsignedp || rl_unsignedp);
4232 if (! all_ones_mask_p (ll_mask, lnbitsize))
4233 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4235 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4236 lr_unsignedp || rr_unsignedp);
4237 if (! all_ones_mask_p (lr_mask, rnbitsize))
4238 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4240 return build (wanted_code, truth_type, lhs, rhs);
4243 /* There is still another way we can do something: If both pairs of
4244 fields being compared are adjacent, we may be able to make a wider
4245 field containing them both.
4247 Note that we still must mask the lhs/rhs expressions. Furthermore,
4248 the mask must be shifted to account for the shift done by
4249 make_bit_field_ref. */
4250 if ((ll_bitsize + ll_bitpos == rl_bitpos
4251 && lr_bitsize + lr_bitpos == rr_bitpos)
4252 || (ll_bitpos == rl_bitpos + rl_bitsize
4253 && lr_bitpos == rr_bitpos + rr_bitsize))
4255 tree type;
4257 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4258 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4259 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4260 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4262 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4263 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4264 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4265 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4267 /* Convert to the smaller type before masking out unwanted bits. */
4268 type = lntype;
4269 if (lntype != rntype)
4271 if (lnbitsize > rnbitsize)
4273 lhs = fold_convert (rntype, lhs);
4274 ll_mask = fold_convert (rntype, ll_mask);
4275 type = rntype;
4277 else if (lnbitsize < rnbitsize)
4279 rhs = fold_convert (lntype, rhs);
4280 lr_mask = fold_convert (lntype, lr_mask);
4281 type = lntype;
4285 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4286 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4288 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4289 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4291 return build (wanted_code, truth_type, lhs, rhs);
4294 return 0;
4297 /* Handle the case of comparisons with constants. If there is something in
4298 common between the masks, those bits of the constants must be the same.
4299 If not, the condition is always false. Test for this to avoid generating
4300 incorrect code below. */
4301 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4302 if (! integer_zerop (result)
4303 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4304 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4306 if (wanted_code == NE_EXPR)
4308 warning ("`or' of unmatched not-equal tests is always 1");
4309 return fold_convert (truth_type, integer_one_node);
4311 else
4313 warning ("`and' of mutually exclusive equal-tests is always 0");
4314 return fold_convert (truth_type, integer_zero_node);
4318 /* Construct the expression we will return. First get the component
4319 reference we will make. Unless the mask is all ones the width of
4320 that field, perform the mask operation. Then compare with the
4321 merged constant. */
4322 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4323 ll_unsignedp || rl_unsignedp);
4325 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4326 if (! all_ones_mask_p (ll_mask, lnbitsize))
4327 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4329 return build (wanted_code, truth_type, result,
4330 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4333 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4334 constant. */
4336 static tree
4337 optimize_minmax_comparison (tree t)
4339 tree type = TREE_TYPE (t);
4340 tree arg0 = TREE_OPERAND (t, 0);
4341 enum tree_code op_code;
4342 tree comp_const = TREE_OPERAND (t, 1);
4343 tree minmax_const;
4344 int consts_equal, consts_lt;
4345 tree inner;
4347 STRIP_SIGN_NOPS (arg0);
4349 op_code = TREE_CODE (arg0);
4350 minmax_const = TREE_OPERAND (arg0, 1);
4351 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4352 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4353 inner = TREE_OPERAND (arg0, 0);
4355 /* If something does not permit us to optimize, return the original tree. */
4356 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4357 || TREE_CODE (comp_const) != INTEGER_CST
4358 || TREE_CONSTANT_OVERFLOW (comp_const)
4359 || TREE_CODE (minmax_const) != INTEGER_CST
4360 || TREE_CONSTANT_OVERFLOW (minmax_const))
4361 return t;
4363 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4364 and GT_EXPR, doing the rest with recursive calls using logical
4365 simplifications. */
4366 switch (TREE_CODE (t))
4368 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4369 return
4370 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4372 case GE_EXPR:
4373 return
4374 fold (build (TRUTH_ORIF_EXPR, type,
4375 optimize_minmax_comparison
4376 (build (EQ_EXPR, type, arg0, comp_const)),
4377 optimize_minmax_comparison
4378 (build (GT_EXPR, type, arg0, comp_const))));
4380 case EQ_EXPR:
4381 if (op_code == MAX_EXPR && consts_equal)
4382 /* MAX (X, 0) == 0 -> X <= 0 */
4383 return fold (build (LE_EXPR, type, inner, comp_const));
4385 else if (op_code == MAX_EXPR && consts_lt)
4386 /* MAX (X, 0) == 5 -> X == 5 */
4387 return fold (build (EQ_EXPR, type, inner, comp_const));
4389 else if (op_code == MAX_EXPR)
4390 /* MAX (X, 0) == -1 -> false */
4391 return omit_one_operand (type, integer_zero_node, inner);
4393 else if (consts_equal)
4394 /* MIN (X, 0) == 0 -> X >= 0 */
4395 return fold (build (GE_EXPR, type, inner, comp_const));
4397 else if (consts_lt)
4398 /* MIN (X, 0) == 5 -> false */
4399 return omit_one_operand (type, integer_zero_node, inner);
4401 else
4402 /* MIN (X, 0) == -1 -> X == -1 */
4403 return fold (build (EQ_EXPR, type, inner, comp_const));
4405 case GT_EXPR:
4406 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4407 /* MAX (X, 0) > 0 -> X > 0
4408 MAX (X, 0) > 5 -> X > 5 */
4409 return fold (build (GT_EXPR, type, inner, comp_const));
4411 else if (op_code == MAX_EXPR)
4412 /* MAX (X, 0) > -1 -> true */
4413 return omit_one_operand (type, integer_one_node, inner);
4415 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4416 /* MIN (X, 0) > 0 -> false
4417 MIN (X, 0) > 5 -> false */
4418 return omit_one_operand (type, integer_zero_node, inner);
4420 else
4421 /* MIN (X, 0) > -1 -> X > -1 */
4422 return fold (build (GT_EXPR, type, inner, comp_const));
4424 default:
4425 return t;
4429 /* T is an integer expression that is being multiplied, divided, or taken a
4430 modulus (CODE says which and what kind of divide or modulus) by a
4431 constant C. See if we can eliminate that operation by folding it with
4432 other operations already in T. WIDE_TYPE, if non-null, is a type that
4433 should be used for the computation if wider than our type.
4435 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4436 (X * 2) + (Y * 4). We must, however, be assured that either the original
4437 expression would not overflow or that overflow is undefined for the type
4438 in the language in question.
4440 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4441 the machine has a multiply-accumulate insn or that this is part of an
4442 addressing calculation.
4444 If we return a non-null expression, it is an equivalent form of the
4445 original computation, but need not be in the original type. */
4447 static tree
4448 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4450 /* To avoid exponential search depth, refuse to allow recursion past
4451 three levels. Beyond that (1) it's highly unlikely that we'll find
4452 something interesting and (2) we've probably processed it before
4453 when we built the inner expression. */
4455 static int depth;
4456 tree ret;
4458 if (depth > 3)
4459 return NULL;
4461 depth++;
4462 ret = extract_muldiv_1 (t, c, code, wide_type);
4463 depth--;
4465 return ret;
4468 static tree
4469 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4471 tree type = TREE_TYPE (t);
4472 enum tree_code tcode = TREE_CODE (t);
4473 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4474 > GET_MODE_SIZE (TYPE_MODE (type)))
4475 ? wide_type : type);
4476 tree t1, t2;
4477 int same_p = tcode == code;
4478 tree op0 = NULL_TREE, op1 = NULL_TREE;
4480 /* Don't deal with constants of zero here; they confuse the code below. */
4481 if (integer_zerop (c))
4482 return NULL_TREE;
4484 if (TREE_CODE_CLASS (tcode) == '1')
4485 op0 = TREE_OPERAND (t, 0);
4487 if (TREE_CODE_CLASS (tcode) == '2')
4488 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4490 /* Note that we need not handle conditional operations here since fold
4491 already handles those cases. So just do arithmetic here. */
4492 switch (tcode)
4494 case INTEGER_CST:
4495 /* For a constant, we can always simplify if we are a multiply
4496 or (for divide and modulus) if it is a multiple of our constant. */
4497 if (code == MULT_EXPR
4498 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4499 return const_binop (code, fold_convert (ctype, t),
4500 fold_convert (ctype, c), 0);
4501 break;
4503 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4504 /* If op0 is an expression ... */
4505 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4506 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4507 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4508 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4509 /* ... and is unsigned, and its type is smaller than ctype,
4510 then we cannot pass through as widening. */
4511 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
4512 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4513 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4514 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4515 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4516 /* ... or its type is larger than ctype,
4517 then we cannot pass through this truncation. */
4518 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4519 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4520 /* ... or signedness changes for division or modulus,
4521 then we cannot pass through this conversion. */
4522 || (code != MULT_EXPR
4523 && (TYPE_UNSIGNED (ctype)
4524 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
4525 break;
4527 /* Pass the constant down and see if we can make a simplification. If
4528 we can, replace this expression with the inner simplification for
4529 possible later conversion to our or some other type. */
4530 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4531 && TREE_CODE (t2) == INTEGER_CST
4532 && ! TREE_CONSTANT_OVERFLOW (t2)
4533 && (0 != (t1 = extract_muldiv (op0, t2, code,
4534 code == MULT_EXPR
4535 ? ctype : NULL_TREE))))
4536 return t1;
4537 break;
4539 case NEGATE_EXPR: case ABS_EXPR:
4540 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4541 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4542 break;
4544 case MIN_EXPR: case MAX_EXPR:
4545 /* If widening the type changes the signedness, then we can't perform
4546 this optimization as that changes the result. */
4547 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
4548 break;
4550 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4551 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4552 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4554 if (tree_int_cst_sgn (c) < 0)
4555 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4557 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4558 fold_convert (ctype, t2)));
4560 break;
4562 case LSHIFT_EXPR: case RSHIFT_EXPR:
4563 /* If the second operand is constant, this is a multiplication
4564 or floor division, by a power of two, so we can treat it that
4565 way unless the multiplier or divisor overflows. */
4566 if (TREE_CODE (op1) == INTEGER_CST
4567 /* const_binop may not detect overflow correctly,
4568 so check for it explicitly here. */
4569 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4570 && TREE_INT_CST_HIGH (op1) == 0
4571 && 0 != (t1 = fold_convert (ctype,
4572 const_binop (LSHIFT_EXPR,
4573 size_one_node,
4574 op1, 0)))
4575 && ! TREE_OVERFLOW (t1))
4576 return extract_muldiv (build (tcode == LSHIFT_EXPR
4577 ? MULT_EXPR : FLOOR_DIV_EXPR,
4578 ctype, fold_convert (ctype, op0), t1),
4579 c, code, wide_type);
4580 break;
4582 case PLUS_EXPR: case MINUS_EXPR:
4583 /* See if we can eliminate the operation on both sides. If we can, we
4584 can return a new PLUS or MINUS. If we can't, the only remaining
4585 cases where we can do anything are if the second operand is a
4586 constant. */
4587 t1 = extract_muldiv (op0, c, code, wide_type);
4588 t2 = extract_muldiv (op1, c, code, wide_type);
4589 if (t1 != 0 && t2 != 0
4590 && (code == MULT_EXPR
4591 /* If not multiplication, we can only do this if both operands
4592 are divisible by c. */
4593 || (multiple_of_p (ctype, op0, c)
4594 && multiple_of_p (ctype, op1, c))))
4595 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4596 fold_convert (ctype, t2)));
4598 /* If this was a subtraction, negate OP1 and set it to be an addition.
4599 This simplifies the logic below. */
4600 if (tcode == MINUS_EXPR)
4601 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4603 if (TREE_CODE (op1) != INTEGER_CST)
4604 break;
4606 /* If either OP1 or C are negative, this optimization is not safe for
4607 some of the division and remainder types while for others we need
4608 to change the code. */
4609 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4611 if (code == CEIL_DIV_EXPR)
4612 code = FLOOR_DIV_EXPR;
4613 else if (code == FLOOR_DIV_EXPR)
4614 code = CEIL_DIV_EXPR;
4615 else if (code != MULT_EXPR
4616 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4617 break;
4620 /* If it's a multiply or a division/modulus operation of a multiple
4621 of our constant, do the operation and verify it doesn't overflow. */
4622 if (code == MULT_EXPR
4623 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4625 op1 = const_binop (code, fold_convert (ctype, op1),
4626 fold_convert (ctype, c), 0);
4627 /* We allow the constant to overflow with wrapping semantics. */
4628 if (op1 == 0
4629 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4630 break;
4632 else
4633 break;
4635 /* If we have an unsigned type is not a sizetype, we cannot widen
4636 the operation since it will change the result if the original
4637 computation overflowed. */
4638 if (TYPE_UNSIGNED (ctype)
4639 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4640 && ctype != type)
4641 break;
4643 /* If we were able to eliminate our operation from the first side,
4644 apply our operation to the second side and reform the PLUS. */
4645 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4646 return fold (build (tcode, ctype, fold_convert (ctype, t1), op1));
4648 /* The last case is if we are a multiply. In that case, we can
4649 apply the distributive law to commute the multiply and addition
4650 if the multiplication of the constants doesn't overflow. */
4651 if (code == MULT_EXPR)
4652 return fold (build (tcode, ctype,
4653 fold (build (code, ctype,
4654 fold_convert (ctype, op0),
4655 fold_convert (ctype, c))),
4656 op1));
4658 break;
4660 case MULT_EXPR:
4661 /* We have a special case here if we are doing something like
4662 (C * 8) % 4 since we know that's zero. */
4663 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4664 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4665 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4666 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4667 return omit_one_operand (type, integer_zero_node, op0);
4669 /* ... fall through ... */
4671 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4672 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4673 /* If we can extract our operation from the LHS, do so and return a
4674 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4675 do something only if the second operand is a constant. */
4676 if (same_p
4677 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4678 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4679 fold_convert (ctype, op1)));
4680 else if (tcode == MULT_EXPR && code == MULT_EXPR
4681 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4682 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4683 fold_convert (ctype, t1)));
4684 else if (TREE_CODE (op1) != INTEGER_CST)
4685 return 0;
4687 /* If these are the same operation types, we can associate them
4688 assuming no overflow. */
4689 if (tcode == code
4690 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4691 fold_convert (ctype, c), 0))
4692 && ! TREE_OVERFLOW (t1))
4693 return fold (build (tcode, ctype, fold_convert (ctype, op0), t1));
4695 /* If these operations "cancel" each other, we have the main
4696 optimizations of this pass, which occur when either constant is a
4697 multiple of the other, in which case we replace this with either an
4698 operation or CODE or TCODE.
4700 If we have an unsigned type that is not a sizetype, we cannot do
4701 this since it will change the result if the original computation
4702 overflowed. */
4703 if ((! TYPE_UNSIGNED (ctype)
4704 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4705 && ! flag_wrapv
4706 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4707 || (tcode == MULT_EXPR
4708 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4709 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4711 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4712 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4713 fold_convert (ctype,
4714 const_binop (TRUNC_DIV_EXPR,
4715 op1, c, 0))));
4716 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4717 return fold (build (code, ctype, fold_convert (ctype, op0),
4718 fold_convert (ctype,
4719 const_binop (TRUNC_DIV_EXPR,
4720 c, op1, 0))));
4722 break;
4724 default:
4725 break;
4728 return 0;
4731 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4732 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4733 that we may sometimes modify the tree. */
4735 static tree
4736 strip_compound_expr (tree t, tree s)
4738 enum tree_code code = TREE_CODE (t);
4740 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4741 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4742 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4743 return TREE_OPERAND (t, 1);
4745 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4746 don't bother handling any other types. */
4747 else if (code == COND_EXPR)
4749 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4750 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4751 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4753 else if (TREE_CODE_CLASS (code) == '1')
4754 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4755 else if (TREE_CODE_CLASS (code) == '<'
4756 || TREE_CODE_CLASS (code) == '2')
4758 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4759 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4762 return t;
4765 /* Return a node which has the indicated constant VALUE (either 0 or
4766 1), and is of the indicated TYPE. */
4768 static tree
4769 constant_boolean_node (int value, tree type)
4771 if (type == integer_type_node)
4772 return value ? integer_one_node : integer_zero_node;
4773 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4774 return lang_hooks.truthvalue_conversion (value ? integer_one_node
4775 : integer_zero_node);
4776 else
4778 tree t = build_int_2 (value, 0);
4780 TREE_TYPE (t) = type;
4781 return t;
4785 /* Utility function for the following routine, to see how complex a nesting of
4786 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4787 we don't care (to avoid spending too much time on complex expressions.). */
4789 static int
4790 count_cond (tree expr, int lim)
4792 int ctrue, cfalse;
4794 if (TREE_CODE (expr) != COND_EXPR)
4795 return 0;
4796 else if (lim <= 0)
4797 return 0;
4799 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4800 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4801 return MIN (lim, 1 + ctrue + cfalse);
4804 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4805 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4806 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4807 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4808 COND is the first argument to CODE; otherwise (as in the example
4809 given here), it is the second argument. TYPE is the type of the
4810 original expression. Return NULL_TREE if no simplication is
4811 possible. */
4813 static tree
4814 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4815 tree cond, tree arg, int cond_first_p)
4817 tree test, true_value, false_value;
4818 tree lhs = NULL_TREE;
4819 tree rhs = NULL_TREE;
4820 /* In the end, we'll produce a COND_EXPR. Both arms of the
4821 conditional expression will be binary operations. The left-hand
4822 side of the expression to be executed if the condition is true
4823 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4824 of the expression to be executed if the condition is true will be
4825 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4826 but apply to the expression to be executed if the conditional is
4827 false. */
4828 tree *true_lhs;
4829 tree *true_rhs;
4830 tree *false_lhs;
4831 tree *false_rhs;
4832 /* These are the codes to use for the left-hand side and right-hand
4833 side of the COND_EXPR. Normally, they are the same as CODE. */
4834 enum tree_code lhs_code = code;
4835 enum tree_code rhs_code = code;
4836 /* And these are the types of the expressions. */
4837 tree lhs_type = type;
4838 tree rhs_type = type;
4839 int save = 0;
4841 if (TREE_CODE (cond) != COND_EXPR
4842 && TREE_CODE_CLASS (code) == '<')
4843 return NULL_TREE;
4845 if (TREE_CODE (arg) == COND_EXPR
4846 && count_cond (cond, 25) + count_cond (arg, 25) > 25)
4847 return NULL_TREE;
4849 if (TREE_SIDE_EFFECTS (arg)
4850 && (lang_hooks.decls.global_bindings_p () != 0
4851 || CONTAINS_PLACEHOLDER_P (arg)))
4852 return NULL_TREE;
4854 if (cond_first_p)
4856 true_rhs = false_rhs = &arg;
4857 true_lhs = &true_value;
4858 false_lhs = &false_value;
4860 else
4862 true_lhs = false_lhs = &arg;
4863 true_rhs = &true_value;
4864 false_rhs = &false_value;
4867 if (TREE_CODE (cond) == COND_EXPR)
4869 test = TREE_OPERAND (cond, 0);
4870 true_value = TREE_OPERAND (cond, 1);
4871 false_value = TREE_OPERAND (cond, 2);
4872 /* If this operand throws an expression, then it does not make
4873 sense to try to perform a logical or arithmetic operation
4874 involving it. Instead of building `a + throw 3' for example,
4875 we simply build `a, throw 3'. */
4876 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4878 if (! cond_first_p)
4880 lhs_code = COMPOUND_EXPR;
4881 lhs_type = void_type_node;
4883 else
4884 lhs = true_value;
4886 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4888 if (! cond_first_p)
4890 rhs_code = COMPOUND_EXPR;
4891 rhs_type = void_type_node;
4893 else
4894 rhs = false_value;
4897 else
4899 tree testtype = TREE_TYPE (cond);
4900 test = cond;
4901 true_value = fold_convert (testtype, integer_one_node);
4902 false_value = fold_convert (testtype, integer_zero_node);
4905 /* If ARG is complex we want to make sure we only evaluate it once. Though
4906 this is only required if it is volatile, it might be more efficient even
4907 if it is not. However, if we succeed in folding one part to a constant,
4908 we do not need to make this SAVE_EXPR. Since we do this optimization
4909 primarily to see if we do end up with constant and this SAVE_EXPR
4910 interferes with later optimizations, suppressing it when we can is
4911 important.
4913 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4914 do so. Don't try to see if the result is a constant if an arm is a
4915 COND_EXPR since we get exponential behavior in that case. */
4917 if (saved_expr_p (arg))
4918 save = 1;
4919 else if (lhs == 0 && rhs == 0
4920 && !TREE_CONSTANT (arg)
4921 && lang_hooks.decls.global_bindings_p () == 0
4922 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4923 || TREE_SIDE_EFFECTS (arg)))
4925 if (TREE_CODE (true_value) != COND_EXPR)
4926 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4928 if (TREE_CODE (false_value) != COND_EXPR)
4929 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4931 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4932 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4934 arg = save_expr (arg);
4935 lhs = rhs = 0;
4936 save = saved_expr_p (arg);
4940 if (lhs == 0)
4941 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4942 if (rhs == 0)
4943 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4945 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4947 /* If ARG involves a SAVE_EXPR, we need to ensure it is evaluated
4948 ahead of the COND_EXPR we made. Otherwise we would have it only
4949 evaluated in one branch, with the other branch using the result
4950 but missing the evaluation code. Beware that the save_expr call
4951 above might not return a SAVE_EXPR, so testing the TREE_CODE
4952 of ARG is not enough to decide here.  */
4953 if (save)
4954 return build (COMPOUND_EXPR, type,
4955 fold_convert (void_type_node, arg),
4956 strip_compound_expr (test, arg));
4957 else
4958 return fold_convert (type, test);
4962 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4964 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4965 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4966 ADDEND is the same as X.
4968 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4969 and finite. The problematic cases are when X is zero, and its mode
4970 has signed zeros. In the case of rounding towards -infinity,
4971 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4972 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4974 static bool
4975 fold_real_zero_addition_p (tree type, tree addend, int negate)
4977 if (!real_zerop (addend))
4978 return false;
4980 /* Don't allow the fold with -fsignaling-nans. */
4981 if (HONOR_SNANS (TYPE_MODE (type)))
4982 return false;
4984 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4985 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4986 return true;
4988 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4989 if (TREE_CODE (addend) == REAL_CST
4990 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4991 negate = !negate;
4993 /* The mode has signed zeros, and we have to honor their sign.
4994 In this situation, there is only one case we can return true for.
4995 X - 0 is the same as X unless rounding towards -infinity is
4996 supported. */
4997 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5000 /* Subroutine of fold() that checks comparisons of built-in math
5001 functions against real constants.
5003 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5004 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5005 is the type of the result and ARG0 and ARG1 are the operands of the
5006 comparison. ARG1 must be a TREE_REAL_CST.
5008 The function returns the constant folded tree if a simplification
5009 can be made, and NULL_TREE otherwise. */
5011 static tree
5012 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5013 tree type, tree arg0, tree arg1)
5015 REAL_VALUE_TYPE c;
5017 if (BUILTIN_SQRT_P (fcode))
5019 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5020 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5022 c = TREE_REAL_CST (arg1);
5023 if (REAL_VALUE_NEGATIVE (c))
5025 /* sqrt(x) < y is always false, if y is negative. */
5026 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5027 return omit_one_operand (type,
5028 fold_convert (type, integer_zero_node),
5029 arg);
5031 /* sqrt(x) > y is always true, if y is negative and we
5032 don't care about NaNs, i.e. negative values of x. */
5033 if (code == NE_EXPR || !HONOR_NANS (mode))
5034 return omit_one_operand (type,
5035 fold_convert (type, integer_one_node),
5036 arg);
5038 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5039 return fold (build (GE_EXPR, type, arg,
5040 build_real (TREE_TYPE (arg), dconst0)));
5042 else if (code == GT_EXPR || code == GE_EXPR)
5044 REAL_VALUE_TYPE c2;
5046 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5047 real_convert (&c2, mode, &c2);
5049 if (REAL_VALUE_ISINF (c2))
5051 /* sqrt(x) > y is x == +Inf, when y is very large. */
5052 if (HONOR_INFINITIES (mode))
5053 return fold (build (EQ_EXPR, type, arg,
5054 build_real (TREE_TYPE (arg), c2)));
5056 /* sqrt(x) > y is always false, when y is very large
5057 and we don't care about infinities. */
5058 return omit_one_operand (type,
5059 fold_convert (type, integer_zero_node),
5060 arg);
5063 /* sqrt(x) > c is the same as x > c*c. */
5064 return fold (build (code, type, arg,
5065 build_real (TREE_TYPE (arg), c2)));
5067 else if (code == LT_EXPR || code == LE_EXPR)
5069 REAL_VALUE_TYPE c2;
5071 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5072 real_convert (&c2, mode, &c2);
5074 if (REAL_VALUE_ISINF (c2))
5076 /* sqrt(x) < y is always true, when y is a very large
5077 value and we don't care about NaNs or Infinities. */
5078 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5079 return omit_one_operand (type,
5080 fold_convert (type, integer_one_node),
5081 arg);
5083 /* sqrt(x) < y is x != +Inf when y is very large and we
5084 don't care about NaNs. */
5085 if (! HONOR_NANS (mode))
5086 return fold (build (NE_EXPR, type, arg,
5087 build_real (TREE_TYPE (arg), c2)));
5089 /* sqrt(x) < y is x >= 0 when y is very large and we
5090 don't care about Infinities. */
5091 if (! HONOR_INFINITIES (mode))
5092 return fold (build (GE_EXPR, type, arg,
5093 build_real (TREE_TYPE (arg), dconst0)));
5095 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5096 if (lang_hooks.decls.global_bindings_p () != 0
5097 || CONTAINS_PLACEHOLDER_P (arg))
5098 return NULL_TREE;
5100 arg = save_expr (arg);
5101 return fold (build (TRUTH_ANDIF_EXPR, type,
5102 fold (build (GE_EXPR, type, arg,
5103 build_real (TREE_TYPE (arg),
5104 dconst0))),
5105 fold (build (NE_EXPR, type, arg,
5106 build_real (TREE_TYPE (arg),
5107 c2)))));
5110 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5111 if (! HONOR_NANS (mode))
5112 return fold (build (code, type, arg,
5113 build_real (TREE_TYPE (arg), c2)));
5115 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5116 if (lang_hooks.decls.global_bindings_p () == 0
5117 && ! CONTAINS_PLACEHOLDER_P (arg))
5119 arg = save_expr (arg);
5120 return fold (build (TRUTH_ANDIF_EXPR, type,
5121 fold (build (GE_EXPR, type, arg,
5122 build_real (TREE_TYPE (arg),
5123 dconst0))),
5124 fold (build (code, type, arg,
5125 build_real (TREE_TYPE (arg),
5126 c2)))));
5131 return NULL_TREE;
5134 /* Subroutine of fold() that optimizes comparisons against Infinities,
5135 either +Inf or -Inf.
5137 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5138 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5139 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5141 The function returns the constant folded tree if a simplification
5142 can be made, and NULL_TREE otherwise. */
5144 static tree
5145 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5147 enum machine_mode mode;
5148 REAL_VALUE_TYPE max;
5149 tree temp;
5150 bool neg;
5152 mode = TYPE_MODE (TREE_TYPE (arg0));
5154 /* For negative infinity swap the sense of the comparison. */
5155 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5156 if (neg)
5157 code = swap_tree_comparison (code);
5159 switch (code)
5161 case GT_EXPR:
5162 /* x > +Inf is always false, if with ignore sNANs. */
5163 if (HONOR_SNANS (mode))
5164 return NULL_TREE;
5165 return omit_one_operand (type,
5166 fold_convert (type, integer_zero_node),
5167 arg0);
5169 case LE_EXPR:
5170 /* x <= +Inf is always true, if we don't case about NaNs. */
5171 if (! HONOR_NANS (mode))
5172 return omit_one_operand (type,
5173 fold_convert (type, integer_one_node),
5174 arg0);
5176 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5177 if (lang_hooks.decls.global_bindings_p () == 0
5178 && ! CONTAINS_PLACEHOLDER_P (arg0))
5180 arg0 = save_expr (arg0);
5181 return fold (build (EQ_EXPR, type, arg0, arg0));
5183 break;
5185 case EQ_EXPR:
5186 case GE_EXPR:
5187 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5188 real_maxval (&max, neg, mode);
5189 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
5190 arg0, build_real (TREE_TYPE (arg0), max)));
5192 case LT_EXPR:
5193 /* x < +Inf is always equal to x <= DBL_MAX. */
5194 real_maxval (&max, neg, mode);
5195 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5196 arg0, build_real (TREE_TYPE (arg0), max)));
5198 case NE_EXPR:
5199 /* x != +Inf is always equal to !(x > DBL_MAX). */
5200 real_maxval (&max, neg, mode);
5201 if (! HONOR_NANS (mode))
5202 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5203 arg0, build_real (TREE_TYPE (arg0), max)));
5204 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
5205 arg0, build_real (TREE_TYPE (arg0), max)));
5206 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5208 default:
5209 break;
5212 return NULL_TREE;
5215 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5216 equality/inequality test, then return a simplified form of
5217 the test using shifts and logical operations. Otherwise return
5218 NULL. TYPE is the desired result type. */
5220 tree
5221 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5222 tree result_type)
5224 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5225 operand 0. */
5226 if (code == TRUTH_NOT_EXPR)
5228 code = TREE_CODE (arg0);
5229 if (code != NE_EXPR && code != EQ_EXPR)
5230 return NULL_TREE;
5232 /* Extract the arguments of the EQ/NE. */
5233 arg1 = TREE_OPERAND (arg0, 1);
5234 arg0 = TREE_OPERAND (arg0, 0);
5236 /* This requires us to invert the code. */
5237 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5240 /* If this is testing a single bit, we can optimize the test. */
5241 if ((code == NE_EXPR || code == EQ_EXPR)
5242 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5243 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5245 tree inner = TREE_OPERAND (arg0, 0);
5246 tree type = TREE_TYPE (arg0);
5247 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5248 enum machine_mode operand_mode = TYPE_MODE (type);
5249 int ops_unsigned;
5250 tree signed_type, unsigned_type, intermediate_type;
5251 tree arg00;
5253 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5254 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5255 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5256 if (arg00 != NULL_TREE)
5258 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5259 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
5260 fold_convert (stype, arg00),
5261 fold_convert (stype, integer_zero_node)));
5264 /* At this point, we know that arg0 is not testing the sign bit. */
5265 if (TYPE_PRECISION (type) - 1 == bitnum)
5266 abort ();
5268 /* Otherwise we have (A & C) != 0 where C is a single bit,
5269 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5270 Similarly for (A & C) == 0. */
5272 /* If INNER is a right shift of a constant and it plus BITNUM does
5273 not overflow, adjust BITNUM and INNER. */
5274 if (TREE_CODE (inner) == RSHIFT_EXPR
5275 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5276 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5277 && bitnum < TYPE_PRECISION (type)
5278 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5279 bitnum - TYPE_PRECISION (type)))
5281 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5282 inner = TREE_OPERAND (inner, 0);
5285 /* If we are going to be able to omit the AND below, we must do our
5286 operations as unsigned. If we must use the AND, we have a choice.
5287 Normally unsigned is faster, but for some machines signed is. */
5288 #ifdef LOAD_EXTEND_OP
5289 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5290 #else
5291 ops_unsigned = 1;
5292 #endif
5294 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5295 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5296 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5297 inner = fold_convert (intermediate_type, inner);
5299 if (bitnum != 0)
5300 inner = build (RSHIFT_EXPR, intermediate_type,
5301 inner, size_int (bitnum));
5303 if (code == EQ_EXPR)
5304 inner = build (BIT_XOR_EXPR, intermediate_type,
5305 inner, integer_one_node);
5307 /* Put the AND last so it can combine with more things. */
5308 inner = build (BIT_AND_EXPR, intermediate_type,
5309 inner, integer_one_node);
5311 /* Make sure to return the proper type. */
5312 inner = fold_convert (result_type, inner);
5314 return inner;
5316 return NULL_TREE;
5319 /* Check whether we are allowed to reorder operands arg0 and arg1,
5320 such that the evaluation of arg1 occurs before arg0. */
5322 static bool
5323 reorder_operands_p (tree arg0, tree arg1)
5325 if (! flag_evaluation_order)
5326 return true;
5327 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5328 return true;
5329 return ! TREE_SIDE_EFFECTS (arg0)
5330 && ! TREE_SIDE_EFFECTS (arg1);
5333 /* Test whether it is preferable two swap two operands, ARG0 and
5334 ARG1, for example because ARG0 is an integer constant and ARG1
5335 isn't. If REORDER is true, only recommend swapping if we can
5336 evaluate the operands in reverse order. */
5338 static bool
5339 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5341 STRIP_SIGN_NOPS (arg0);
5342 STRIP_SIGN_NOPS (arg1);
5344 if (TREE_CODE (arg1) == INTEGER_CST)
5345 return 0;
5346 if (TREE_CODE (arg0) == INTEGER_CST)
5347 return 1;
5349 if (TREE_CODE (arg1) == REAL_CST)
5350 return 0;
5351 if (TREE_CODE (arg0) == REAL_CST)
5352 return 1;
5354 if (TREE_CODE (arg1) == COMPLEX_CST)
5355 return 0;
5356 if (TREE_CODE (arg0) == COMPLEX_CST)
5357 return 1;
5359 if (TREE_CONSTANT (arg1))
5360 return 0;
5361 if (TREE_CONSTANT (arg0))
5362 return 1;
5364 if (optimize_size)
5365 return 0;
5367 if (reorder && flag_evaluation_order
5368 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5369 return 0;
5371 if (DECL_P (arg1))
5372 return 0;
5373 if (DECL_P (arg0))
5374 return 1;
5376 return 0;
5379 /* Perform constant folding and related simplification of EXPR.
5380 The related simplifications include x*1 => x, x*0 => 0, etc.,
5381 and application of the associative law.
5382 NOP_EXPR conversions may be removed freely (as long as we
5383 are careful not to change the type of the overall expression).
5384 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5385 but we can constant-fold them if they have constant operands. */
5387 #ifdef ENABLE_FOLD_CHECKING
5388 # define fold(x) fold_1 (x)
5389 static tree fold_1 (tree);
5390 static
5391 #endif
5392 tree
5393 fold (tree expr)
5395 const tree t = expr;
5396 const tree type = TREE_TYPE (expr);
5397 tree t1 = NULL_TREE;
5398 tree tem;
5399 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5400 enum tree_code code = TREE_CODE (t);
5401 int kind = TREE_CODE_CLASS (code);
5402 /* WINS will be nonzero when the switch is done
5403 if all operands are constant. */
5404 int wins = 1;
5406 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5407 Likewise for a SAVE_EXPR that's already been evaluated. */
5408 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5409 return t;
5411 /* Return right away if a constant. */
5412 if (kind == 'c')
5413 return t;
5415 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5417 tree subop;
5419 /* Special case for conversion ops that can have fixed point args. */
5420 arg0 = TREE_OPERAND (t, 0);
5422 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5423 if (arg0 != 0)
5424 STRIP_SIGN_NOPS (arg0);
5426 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5427 subop = TREE_REALPART (arg0);
5428 else
5429 subop = arg0;
5431 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5432 && TREE_CODE (subop) != REAL_CST)
5433 /* Note that TREE_CONSTANT isn't enough:
5434 static var addresses are constant but we can't
5435 do arithmetic on them. */
5436 wins = 0;
5438 else if (IS_EXPR_CODE_CLASS (kind))
5440 int len = first_rtl_op (code);
5441 int i;
5442 for (i = 0; i < len; i++)
5444 tree op = TREE_OPERAND (t, i);
5445 tree subop;
5447 if (op == 0)
5448 continue; /* Valid for CALL_EXPR, at least. */
5450 /* Strip any conversions that don't change the mode. This is
5451 safe for every expression, except for a comparison expression
5452 because its signedness is derived from its operands. So, in
5453 the latter case, only strip conversions that don't change the
5454 signedness.
5456 Note that this is done as an internal manipulation within the
5457 constant folder, in order to find the simplest representation
5458 of the arguments so that their form can be studied. In any
5459 cases, the appropriate type conversions should be put back in
5460 the tree that will get out of the constant folder. */
5461 if (kind == '<')
5462 STRIP_SIGN_NOPS (op);
5463 else
5464 STRIP_NOPS (op);
5466 if (TREE_CODE (op) == COMPLEX_CST)
5467 subop = TREE_REALPART (op);
5468 else
5469 subop = op;
5471 if (TREE_CODE (subop) != INTEGER_CST
5472 && TREE_CODE (subop) != REAL_CST)
5473 /* Note that TREE_CONSTANT isn't enough:
5474 static var addresses are constant but we can't
5475 do arithmetic on them. */
5476 wins = 0;
5478 if (i == 0)
5479 arg0 = op;
5480 else if (i == 1)
5481 arg1 = op;
5485 /* If this is a commutative operation, and ARG0 is a constant, move it
5486 to ARG1 to reduce the number of tests below. */
5487 if (commutative_tree_code (code)
5488 && tree_swap_operands_p (arg0, arg1, true))
5489 return fold (build (code, type, TREE_OPERAND (t, 1),
5490 TREE_OPERAND (t, 0)));
5492 /* Now WINS is set as described above,
5493 ARG0 is the first operand of EXPR,
5494 and ARG1 is the second operand (if it has more than one operand).
5496 First check for cases where an arithmetic operation is applied to a
5497 compound, conditional, or comparison operation. Push the arithmetic
5498 operation inside the compound or conditional to see if any folding
5499 can then be done. Convert comparison to conditional for this purpose.
5500 The also optimizes non-constant cases that used to be done in
5501 expand_expr.
5503 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5504 one of the operands is a comparison and the other is a comparison, a
5505 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5506 code below would make the expression more complex. Change it to a
5507 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5508 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5510 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5511 || code == EQ_EXPR || code == NE_EXPR)
5512 && ((truth_value_p (TREE_CODE (arg0))
5513 && (truth_value_p (TREE_CODE (arg1))
5514 || (TREE_CODE (arg1) == BIT_AND_EXPR
5515 && integer_onep (TREE_OPERAND (arg1, 1)))))
5516 || (truth_value_p (TREE_CODE (arg1))
5517 && (truth_value_p (TREE_CODE (arg0))
5518 || (TREE_CODE (arg0) == BIT_AND_EXPR
5519 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5521 tem = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5522 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5523 : TRUTH_XOR_EXPR,
5524 type, arg0, arg1));
5526 if (code == EQ_EXPR)
5527 tem = invert_truthvalue (tem);
5529 return tem;
5532 if (TREE_CODE_CLASS (code) == '1')
5534 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5535 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5536 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5537 else if (TREE_CODE (arg0) == COND_EXPR)
5539 tree arg01 = TREE_OPERAND (arg0, 1);
5540 tree arg02 = TREE_OPERAND (arg0, 2);
5541 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5542 arg01 = fold (build1 (code, type, arg01));
5543 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5544 arg02 = fold (build1 (code, type, arg02));
5545 tem = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5546 arg01, arg02));
5548 /* If this was a conversion, and all we did was to move into
5549 inside the COND_EXPR, bring it back out. But leave it if
5550 it is a conversion from integer to integer and the
5551 result precision is no wider than a word since such a
5552 conversion is cheap and may be optimized away by combine,
5553 while it couldn't if it were outside the COND_EXPR. Then return
5554 so we don't get into an infinite recursion loop taking the
5555 conversion out and then back in. */
5557 if ((code == NOP_EXPR || code == CONVERT_EXPR
5558 || code == NON_LVALUE_EXPR)
5559 && TREE_CODE (tem) == COND_EXPR
5560 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
5561 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
5562 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
5563 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
5564 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
5565 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
5566 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
5567 && (INTEGRAL_TYPE_P
5568 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
5569 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
5570 tem = build1 (code, type,
5571 build (COND_EXPR,
5572 TREE_TYPE (TREE_OPERAND
5573 (TREE_OPERAND (tem, 1), 0)),
5574 TREE_OPERAND (tem, 0),
5575 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
5576 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
5577 return tem;
5579 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5580 return fold (build (COND_EXPR, type, arg0,
5581 fold (build1 (code, type, integer_one_node)),
5582 fold (build1 (code, type, integer_zero_node))));
5584 else if (TREE_CODE_CLASS (code) == '<'
5585 && TREE_CODE (arg0) == COMPOUND_EXPR)
5586 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5587 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5588 else if (TREE_CODE_CLASS (code) == '<'
5589 && TREE_CODE (arg1) == COMPOUND_EXPR)
5590 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5591 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5592 else if (TREE_CODE_CLASS (code) == '2'
5593 || TREE_CODE_CLASS (code) == '<')
5595 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5596 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5597 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5598 if (TREE_CODE (arg1) == COMPOUND_EXPR
5599 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
5600 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5601 fold (build (code, type,
5602 arg0, TREE_OPERAND (arg1, 1))));
5604 if (TREE_CODE (arg0) == COND_EXPR
5605 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5607 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5608 /*cond_first_p=*/1);
5609 if (tem != NULL_TREE)
5610 return tem;
5613 if (TREE_CODE (arg1) == COND_EXPR
5614 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
5616 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5617 /*cond_first_p=*/0);
5618 if (tem != NULL_TREE)
5619 return tem;
5623 switch (code)
5625 case CONST_DECL:
5626 return fold (DECL_INITIAL (t));
5628 case NOP_EXPR:
5629 case FLOAT_EXPR:
5630 case CONVERT_EXPR:
5631 case FIX_TRUNC_EXPR:
5632 case FIX_CEIL_EXPR:
5633 case FIX_FLOOR_EXPR:
5634 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
5635 return TREE_OPERAND (t, 0);
5637 /* Handle cases of two conversions in a row. */
5638 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5639 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5641 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5642 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5643 int inside_int = INTEGRAL_TYPE_P (inside_type);
5644 int inside_ptr = POINTER_TYPE_P (inside_type);
5645 int inside_float = FLOAT_TYPE_P (inside_type);
5646 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5647 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
5648 int inter_int = INTEGRAL_TYPE_P (inter_type);
5649 int inter_ptr = POINTER_TYPE_P (inter_type);
5650 int inter_float = FLOAT_TYPE_P (inter_type);
5651 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5652 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
5653 int final_int = INTEGRAL_TYPE_P (type);
5654 int final_ptr = POINTER_TYPE_P (type);
5655 int final_float = FLOAT_TYPE_P (type);
5656 unsigned int final_prec = TYPE_PRECISION (type);
5657 int final_unsignedp = TYPE_UNSIGNED (type);
5659 /* In addition to the cases of two conversions in a row
5660 handled below, if we are converting something to its own
5661 type via an object of identical or wider precision, neither
5662 conversion is needed. */
5663 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
5664 && ((inter_int && final_int) || (inter_float && final_float))
5665 && inter_prec >= final_prec)
5666 return fold (build1 (code, type,
5667 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5669 /* Likewise, if the intermediate and final types are either both
5670 float or both integer, we don't need the middle conversion if
5671 it is wider than the final type and doesn't change the signedness
5672 (for integers). Avoid this if the final type is a pointer
5673 since then we sometimes need the inner conversion. Likewise if
5674 the outer has a precision not equal to the size of its mode. */
5675 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5676 || (inter_float && inside_float))
5677 && inter_prec >= inside_prec
5678 && (inter_float || inter_unsignedp == inside_unsignedp)
5679 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
5680 && TYPE_MODE (type) == TYPE_MODE (inter_type))
5681 && ! final_ptr)
5682 return fold (build1 (code, type,
5683 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5685 /* If we have a sign-extension of a zero-extended value, we can
5686 replace that by a single zero-extension. */
5687 if (inside_int && inter_int && final_int
5688 && inside_prec < inter_prec && inter_prec < final_prec
5689 && inside_unsignedp && !inter_unsignedp)
5690 return fold (build1 (code, type,
5691 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5693 /* Two conversions in a row are not needed unless:
5694 - some conversion is floating-point (overstrict for now), or
5695 - the intermediate type is narrower than both initial and
5696 final, or
5697 - the intermediate type and innermost type differ in signedness,
5698 and the outermost type is wider than the intermediate, or
5699 - the initial type is a pointer type and the precisions of the
5700 intermediate and final types differ, or
5701 - the final type is a pointer type and the precisions of the
5702 initial and intermediate types differ. */
5703 if (! inside_float && ! inter_float && ! final_float
5704 && (inter_prec > inside_prec || inter_prec > final_prec)
5705 && ! (inside_int && inter_int
5706 && inter_unsignedp != inside_unsignedp
5707 && inter_prec < final_prec)
5708 && ((inter_unsignedp && inter_prec > inside_prec)
5709 == (final_unsignedp && final_prec > inter_prec))
5710 && ! (inside_ptr && inter_prec != final_prec)
5711 && ! (final_ptr && inside_prec != inter_prec)
5712 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
5713 && TYPE_MODE (type) == TYPE_MODE (inter_type))
5714 && ! final_ptr)
5715 return fold (build1 (code, type,
5716 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5719 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5720 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5721 /* Detect assigning a bitfield. */
5722 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5723 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5725 /* Don't leave an assignment inside a conversion
5726 unless assigning a bitfield. */
5727 tree prev = TREE_OPERAND (t, 0);
5728 tem = copy_node (t);
5729 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
5730 /* First do the assignment, then return converted constant. */
5731 tem = build (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
5732 TREE_NO_UNUSED_WARNING (tem) = 1;
5733 TREE_USED (tem) = 1;
5734 return tem;
5737 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5738 constants (if x has signed type, the sign bit cannot be set
5739 in c). This folds extension into the BIT_AND_EXPR. */
5740 if (INTEGRAL_TYPE_P (type)
5741 && TREE_CODE (type) != BOOLEAN_TYPE
5742 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5743 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5745 tree and = TREE_OPERAND (t, 0);
5746 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5747 int change = 0;
5749 if (TYPE_UNSIGNED (TREE_TYPE (and))
5750 || (TYPE_PRECISION (type)
5751 <= TYPE_PRECISION (TREE_TYPE (and))))
5752 change = 1;
5753 else if (TYPE_PRECISION (TREE_TYPE (and1))
5754 <= HOST_BITS_PER_WIDE_INT
5755 && host_integerp (and1, 1))
5757 unsigned HOST_WIDE_INT cst;
5759 cst = tree_low_cst (and1, 1);
5760 cst &= (HOST_WIDE_INT) -1
5761 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5762 change = (cst == 0);
5763 #ifdef LOAD_EXTEND_OP
5764 if (change
5765 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5766 == ZERO_EXTEND))
5768 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
5769 and0 = fold_convert (uns, and0);
5770 and1 = fold_convert (uns, and1);
5772 #endif
5774 if (change)
5775 return fold (build (BIT_AND_EXPR, type,
5776 fold_convert (type, and0),
5777 fold_convert (type, and1)));
5780 tem = fold_convert_const (code, type, arg0);
5781 return tem ? tem : t;
5783 case VIEW_CONVERT_EXPR:
5784 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5785 return build1 (VIEW_CONVERT_EXPR, type,
5786 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5787 return t;
5789 case COMPONENT_REF:
5790 if (TREE_CODE (arg0) == CONSTRUCTOR
5791 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5793 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5794 if (m)
5795 return TREE_VALUE (m);
5797 return t;
5799 case RANGE_EXPR:
5800 if (TREE_CONSTANT (t) != wins)
5802 tem = copy_node (t);
5803 TREE_CONSTANT (tem) = wins;
5804 return tem;
5806 return t;
5808 case NEGATE_EXPR:
5809 if (negate_expr_p (arg0))
5810 return fold_convert (type, negate_expr (arg0));
5811 return t;
5813 case ABS_EXPR:
5814 if (wins
5815 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
5816 return fold_abs_const (arg0, type);
5817 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5818 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5819 /* Convert fabs((double)float) into (double)fabsf(float). */
5820 else if (TREE_CODE (arg0) == NOP_EXPR
5821 && TREE_CODE (type) == REAL_TYPE)
5823 tree targ0 = strip_float_extensions (arg0);
5824 if (targ0 != arg0)
5825 return fold_convert (type, fold (build1 (ABS_EXPR,
5826 TREE_TYPE (targ0),
5827 targ0)));
5829 else if (tree_expr_nonnegative_p (arg0))
5830 return arg0;
5831 return t;
5833 case CONJ_EXPR:
5834 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5835 return fold_convert (type, arg0);
5836 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5837 return build (COMPLEX_EXPR, type,
5838 TREE_OPERAND (arg0, 0),
5839 negate_expr (TREE_OPERAND (arg0, 1)));
5840 else if (TREE_CODE (arg0) == COMPLEX_CST)
5841 return build_complex (type, TREE_REALPART (arg0),
5842 negate_expr (TREE_IMAGPART (arg0)));
5843 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5844 return fold (build (TREE_CODE (arg0), type,
5845 fold (build1 (CONJ_EXPR, type,
5846 TREE_OPERAND (arg0, 0))),
5847 fold (build1 (CONJ_EXPR,
5848 type, TREE_OPERAND (arg0, 1)))));
5849 else if (TREE_CODE (arg0) == CONJ_EXPR)
5850 return TREE_OPERAND (arg0, 0);
5851 return t;
5853 case BIT_NOT_EXPR:
5854 if (wins)
5856 tem = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5857 ~ TREE_INT_CST_HIGH (arg0));
5858 TREE_TYPE (tem) = type;
5859 force_fit_type (tem, 0);
5860 TREE_OVERFLOW (tem) = TREE_OVERFLOW (arg0);
5861 TREE_CONSTANT_OVERFLOW (tem) = TREE_CONSTANT_OVERFLOW (arg0);
5862 return tem;
5864 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5865 return TREE_OPERAND (arg0, 0);
5866 return t;
5868 case PLUS_EXPR:
5869 /* A + (-B) -> A - B */
5870 if (TREE_CODE (arg1) == NEGATE_EXPR)
5871 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5872 /* (-A) + B -> B - A */
5873 if (TREE_CODE (arg0) == NEGATE_EXPR
5874 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
5875 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5876 if (! FLOAT_TYPE_P (type))
5878 if (integer_zerop (arg1))
5879 return non_lvalue (fold_convert (type, arg0));
5881 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5882 with a constant, and the two constants have no bits in common,
5883 we should treat this as a BIT_IOR_EXPR since this may produce more
5884 simplifications. */
5885 if (TREE_CODE (arg0) == BIT_AND_EXPR
5886 && TREE_CODE (arg1) == BIT_AND_EXPR
5887 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5888 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5889 && integer_zerop (const_binop (BIT_AND_EXPR,
5890 TREE_OPERAND (arg0, 1),
5891 TREE_OPERAND (arg1, 1), 0)))
5893 code = BIT_IOR_EXPR;
5894 goto bit_ior;
5897 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5898 (plus (plus (mult) (mult)) (foo)) so that we can
5899 take advantage of the factoring cases below. */
5900 if ((TREE_CODE (arg0) == PLUS_EXPR
5901 && TREE_CODE (arg1) == MULT_EXPR)
5902 || (TREE_CODE (arg1) == PLUS_EXPR
5903 && TREE_CODE (arg0) == MULT_EXPR))
5905 tree parg0, parg1, parg, marg;
5907 if (TREE_CODE (arg0) == PLUS_EXPR)
5908 parg = arg0, marg = arg1;
5909 else
5910 parg = arg1, marg = arg0;
5911 parg0 = TREE_OPERAND (parg, 0);
5912 parg1 = TREE_OPERAND (parg, 1);
5913 STRIP_NOPS (parg0);
5914 STRIP_NOPS (parg1);
5916 if (TREE_CODE (parg0) == MULT_EXPR
5917 && TREE_CODE (parg1) != MULT_EXPR)
5918 return fold (build (PLUS_EXPR, type,
5919 fold (build (PLUS_EXPR, type,
5920 fold_convert (type, parg0),
5921 fold_convert (type, marg))),
5922 fold_convert (type, parg1)));
5923 if (TREE_CODE (parg0) != MULT_EXPR
5924 && TREE_CODE (parg1) == MULT_EXPR)
5925 return fold (build (PLUS_EXPR, type,
5926 fold (build (PLUS_EXPR, type,
5927 fold_convert (type, parg1),
5928 fold_convert (type, marg))),
5929 fold_convert (type, parg0)));
5932 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5934 tree arg00, arg01, arg10, arg11;
5935 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5937 /* (A * C) + (B * C) -> (A+B) * C.
5938 We are most concerned about the case where C is a constant,
5939 but other combinations show up during loop reduction. Since
5940 it is not difficult, try all four possibilities. */
5942 arg00 = TREE_OPERAND (arg0, 0);
5943 arg01 = TREE_OPERAND (arg0, 1);
5944 arg10 = TREE_OPERAND (arg1, 0);
5945 arg11 = TREE_OPERAND (arg1, 1);
5946 same = NULL_TREE;
5948 if (operand_equal_p (arg01, arg11, 0))
5949 same = arg01, alt0 = arg00, alt1 = arg10;
5950 else if (operand_equal_p (arg00, arg10, 0))
5951 same = arg00, alt0 = arg01, alt1 = arg11;
5952 else if (operand_equal_p (arg00, arg11, 0))
5953 same = arg00, alt0 = arg01, alt1 = arg10;
5954 else if (operand_equal_p (arg01, arg10, 0))
5955 same = arg01, alt0 = arg00, alt1 = arg11;
5957 /* No identical multiplicands; see if we can find a common
5958 power-of-two factor in non-power-of-two multiplies. This
5959 can help in multi-dimensional array access. */
5960 else if (TREE_CODE (arg01) == INTEGER_CST
5961 && TREE_CODE (arg11) == INTEGER_CST
5962 && TREE_INT_CST_HIGH (arg01) == 0
5963 && TREE_INT_CST_HIGH (arg11) == 0)
5965 HOST_WIDE_INT int01, int11, tmp;
5966 int01 = TREE_INT_CST_LOW (arg01);
5967 int11 = TREE_INT_CST_LOW (arg11);
5969 /* Move min of absolute values to int11. */
5970 if ((int01 >= 0 ? int01 : -int01)
5971 < (int11 >= 0 ? int11 : -int11))
5973 tmp = int01, int01 = int11, int11 = tmp;
5974 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5975 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5978 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5980 alt0 = fold (build (MULT_EXPR, type, arg00,
5981 build_int_2 (int01 / int11, 0)));
5982 alt1 = arg10;
5983 same = arg11;
5987 if (same)
5988 return fold (build (MULT_EXPR, type,
5989 fold (build (PLUS_EXPR, type, alt0, alt1)),
5990 same));
5993 else
5995 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5996 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5997 return non_lvalue (fold_convert (type, arg0));
5999 /* Likewise if the operands are reversed. */
6000 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6001 return non_lvalue (fold_convert (type, arg1));
6003 /* Convert x+x into x*2.0. */
6004 if (operand_equal_p (arg0, arg1, 0)
6005 && SCALAR_FLOAT_TYPE_P (type))
6006 return fold (build (MULT_EXPR, type, arg0,
6007 build_real (type, dconst2)));
6009 /* Convert x*c+x into x*(c+1). */
6010 if (flag_unsafe_math_optimizations
6011 && TREE_CODE (arg0) == MULT_EXPR
6012 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6013 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6014 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6016 REAL_VALUE_TYPE c;
6018 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6019 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6020 return fold (build (MULT_EXPR, type, arg1,
6021 build_real (type, c)));
6024 /* Convert x+x*c into x*(c+1). */
6025 if (flag_unsafe_math_optimizations
6026 && TREE_CODE (arg1) == MULT_EXPR
6027 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6028 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6029 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6031 REAL_VALUE_TYPE c;
6033 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6034 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6035 return fold (build (MULT_EXPR, type, arg0,
6036 build_real (type, c)));
6039 /* Convert x*c1+x*c2 into x*(c1+c2). */
6040 if (flag_unsafe_math_optimizations
6041 && TREE_CODE (arg0) == MULT_EXPR
6042 && TREE_CODE (arg1) == MULT_EXPR
6043 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6044 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6045 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6046 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6047 && operand_equal_p (TREE_OPERAND (arg0, 0),
6048 TREE_OPERAND (arg1, 0), 0))
6050 REAL_VALUE_TYPE c1, c2;
6052 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6053 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6054 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6055 return fold (build (MULT_EXPR, type,
6056 TREE_OPERAND (arg0, 0),
6057 build_real (type, c1)));
6059 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6060 if (flag_unsafe_math_optimizations
6061 && TREE_CODE (arg1) == PLUS_EXPR
6062 && TREE_CODE (arg0) != MULT_EXPR)
6064 tree tree10 = TREE_OPERAND (arg1, 0);
6065 tree tree11 = TREE_OPERAND (arg1, 1);
6066 if (TREE_CODE (tree11) == MULT_EXPR
6067 && TREE_CODE (tree10) == MULT_EXPR)
6069 tree tree0;
6070 tree0 = fold (build (PLUS_EXPR, type, arg0, tree10));
6071 return fold (build (PLUS_EXPR, type, tree0, tree11));
6074 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6075 if (flag_unsafe_math_optimizations
6076 && TREE_CODE (arg0) == PLUS_EXPR
6077 && TREE_CODE (arg1) != MULT_EXPR)
6079 tree tree00 = TREE_OPERAND (arg0, 0);
6080 tree tree01 = TREE_OPERAND (arg0, 1);
6081 if (TREE_CODE (tree01) == MULT_EXPR
6082 && TREE_CODE (tree00) == MULT_EXPR)
6084 tree tree0;
6085 tree0 = fold (build (PLUS_EXPR, type, tree01, arg1));
6086 return fold (build (PLUS_EXPR, type, tree00, tree0));
6091 bit_rotate:
6092 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6093 is a rotate of A by C1 bits. */
6094 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6095 is a rotate of A by B bits. */
6097 enum tree_code code0, code1;
6098 code0 = TREE_CODE (arg0);
6099 code1 = TREE_CODE (arg1);
6100 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6101 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6102 && operand_equal_p (TREE_OPERAND (arg0, 0),
6103 TREE_OPERAND (arg1, 0), 0)
6104 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6106 tree tree01, tree11;
6107 enum tree_code code01, code11;
6109 tree01 = TREE_OPERAND (arg0, 1);
6110 tree11 = TREE_OPERAND (arg1, 1);
6111 STRIP_NOPS (tree01);
6112 STRIP_NOPS (tree11);
6113 code01 = TREE_CODE (tree01);
6114 code11 = TREE_CODE (tree11);
6115 if (code01 == INTEGER_CST
6116 && code11 == INTEGER_CST
6117 && TREE_INT_CST_HIGH (tree01) == 0
6118 && TREE_INT_CST_HIGH (tree11) == 0
6119 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6120 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6121 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6122 code0 == LSHIFT_EXPR ? tree01 : tree11);
6123 else if (code11 == MINUS_EXPR)
6125 tree tree110, tree111;
6126 tree110 = TREE_OPERAND (tree11, 0);
6127 tree111 = TREE_OPERAND (tree11, 1);
6128 STRIP_NOPS (tree110);
6129 STRIP_NOPS (tree111);
6130 if (TREE_CODE (tree110) == INTEGER_CST
6131 && 0 == compare_tree_int (tree110,
6132 TYPE_PRECISION
6133 (TREE_TYPE (TREE_OPERAND
6134 (arg0, 0))))
6135 && operand_equal_p (tree01, tree111, 0))
6136 return build ((code0 == LSHIFT_EXPR
6137 ? LROTATE_EXPR
6138 : RROTATE_EXPR),
6139 type, TREE_OPERAND (arg0, 0), tree01);
6141 else if (code01 == MINUS_EXPR)
6143 tree tree010, tree011;
6144 tree010 = TREE_OPERAND (tree01, 0);
6145 tree011 = TREE_OPERAND (tree01, 1);
6146 STRIP_NOPS (tree010);
6147 STRIP_NOPS (tree011);
6148 if (TREE_CODE (tree010) == INTEGER_CST
6149 && 0 == compare_tree_int (tree010,
6150 TYPE_PRECISION
6151 (TREE_TYPE (TREE_OPERAND
6152 (arg0, 0))))
6153 && operand_equal_p (tree11, tree011, 0))
6154 return build ((code0 != LSHIFT_EXPR
6155 ? LROTATE_EXPR
6156 : RROTATE_EXPR),
6157 type, TREE_OPERAND (arg0, 0), tree11);
6162 associate:
6163 /* In most languages, can't associate operations on floats through
6164 parentheses. Rather than remember where the parentheses were, we
6165 don't associate floats at all, unless the user has specified
6166 -funsafe-math-optimizations. */
6168 if (! wins
6169 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6171 tree var0, con0, lit0, minus_lit0;
6172 tree var1, con1, lit1, minus_lit1;
6174 /* Split both trees into variables, constants, and literals. Then
6175 associate each group together, the constants with literals,
6176 then the result with variables. This increases the chances of
6177 literals being recombined later and of generating relocatable
6178 expressions for the sum of a constant and literal. */
6179 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6180 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6181 code == MINUS_EXPR);
6183 /* Only do something if we found more than two objects. Otherwise,
6184 nothing has changed and we risk infinite recursion. */
6185 if (2 < ((var0 != 0) + (var1 != 0)
6186 + (con0 != 0) + (con1 != 0)
6187 + (lit0 != 0) + (lit1 != 0)
6188 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6190 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6191 if (code == MINUS_EXPR)
6192 code = PLUS_EXPR;
6194 var0 = associate_trees (var0, var1, code, type);
6195 con0 = associate_trees (con0, con1, code, type);
6196 lit0 = associate_trees (lit0, lit1, code, type);
6197 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6199 /* Preserve the MINUS_EXPR if the negative part of the literal is
6200 greater than the positive part. Otherwise, the multiplicative
6201 folding code (i.e extract_muldiv) may be fooled in case
6202 unsigned constants are subtracted, like in the following
6203 example: ((X*2 + 4) - 8U)/2. */
6204 if (minus_lit0 && lit0)
6206 if (TREE_CODE (lit0) == INTEGER_CST
6207 && TREE_CODE (minus_lit0) == INTEGER_CST
6208 && tree_int_cst_lt (lit0, minus_lit0))
6210 minus_lit0 = associate_trees (minus_lit0, lit0,
6211 MINUS_EXPR, type);
6212 lit0 = 0;
6214 else
6216 lit0 = associate_trees (lit0, minus_lit0,
6217 MINUS_EXPR, type);
6218 minus_lit0 = 0;
6221 if (minus_lit0)
6223 if (con0 == 0)
6224 return fold_convert (type,
6225 associate_trees (var0, minus_lit0,
6226 MINUS_EXPR, type));
6227 else
6229 con0 = associate_trees (con0, minus_lit0,
6230 MINUS_EXPR, type);
6231 return fold_convert (type,
6232 associate_trees (var0, con0,
6233 PLUS_EXPR, type));
6237 con0 = associate_trees (con0, lit0, code, type);
6238 return fold_convert (type, associate_trees (var0, con0,
6239 code, type));
6243 binary:
6244 if (wins)
6245 t1 = const_binop (code, arg0, arg1, 0);
6246 if (t1 != NULL_TREE)
6248 /* The return value should always have
6249 the same type as the original expression. */
6250 if (TREE_TYPE (t1) != type)
6251 t1 = fold_convert (type, t1);
6253 return t1;
6255 return t;
6257 case MINUS_EXPR:
6258 /* A - (-B) -> A + B */
6259 if (TREE_CODE (arg1) == NEGATE_EXPR)
6260 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6261 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6262 if (TREE_CODE (arg0) == NEGATE_EXPR
6263 && (FLOAT_TYPE_P (type)
6264 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6265 && negate_expr_p (arg1)
6266 && reorder_operands_p (arg0, arg1))
6267 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
6268 TREE_OPERAND (arg0, 0)));
6270 if (! FLOAT_TYPE_P (type))
6272 if (! wins && integer_zerop (arg0))
6273 return negate_expr (fold_convert (type, arg1));
6274 if (integer_zerop (arg1))
6275 return non_lvalue (fold_convert (type, arg0));
6277 /* Fold A - (A & B) into ~B & A. */
6278 if (!TREE_SIDE_EFFECTS (arg0)
6279 && TREE_CODE (arg1) == BIT_AND_EXPR)
6281 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6282 return fold (build (BIT_AND_EXPR, type,
6283 fold (build1 (BIT_NOT_EXPR, type,
6284 TREE_OPERAND (arg1, 0))),
6285 arg0));
6286 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6287 return fold (build (BIT_AND_EXPR, type,
6288 fold (build1 (BIT_NOT_EXPR, type,
6289 TREE_OPERAND (arg1, 1))),
6290 arg0));
6293 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6294 any power of 2 minus 1. */
6295 if (TREE_CODE (arg0) == BIT_AND_EXPR
6296 && TREE_CODE (arg1) == BIT_AND_EXPR
6297 && operand_equal_p (TREE_OPERAND (arg0, 0),
6298 TREE_OPERAND (arg1, 0), 0))
6300 tree mask0 = TREE_OPERAND (arg0, 1);
6301 tree mask1 = TREE_OPERAND (arg1, 1);
6302 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6304 if (operand_equal_p (tem, mask1, 0))
6306 tem = fold (build (BIT_XOR_EXPR, type,
6307 TREE_OPERAND (arg0, 0), mask1));
6308 return fold (build (MINUS_EXPR, type, tem, mask1));
6313 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6314 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6315 return non_lvalue (fold_convert (type, arg0));
6317 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6318 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6319 (-ARG1 + ARG0) reduces to -ARG1. */
6320 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6321 return negate_expr (fold_convert (type, arg1));
6323 /* Fold &x - &x. This can happen from &x.foo - &x.
6324 This is unsafe for certain floats even in non-IEEE formats.
6325 In IEEE, it is unsafe because it does wrong for NaNs.
6326 Also note that operand_equal_p is always false if an operand
6327 is volatile. */
6329 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6330 && operand_equal_p (arg0, arg1, 0))
6331 return fold_convert (type, integer_zero_node);
6333 /* A - B -> A + (-B) if B is easily negatable. */
6334 if (!wins && negate_expr_p (arg1)
6335 && (FLOAT_TYPE_P (type)
6336 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6337 return fold (build (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6339 if (TREE_CODE (arg0) == MULT_EXPR
6340 && TREE_CODE (arg1) == MULT_EXPR
6341 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6343 /* (A * C) - (B * C) -> (A-B) * C. */
6344 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6345 TREE_OPERAND (arg1, 1), 0))
6346 return fold (build (MULT_EXPR, type,
6347 fold (build (MINUS_EXPR, type,
6348 TREE_OPERAND (arg0, 0),
6349 TREE_OPERAND (arg1, 0))),
6350 TREE_OPERAND (arg0, 1)));
6351 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6352 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6353 TREE_OPERAND (arg1, 0), 0))
6354 return fold (build (MULT_EXPR, type,
6355 TREE_OPERAND (arg0, 0),
6356 fold (build (MINUS_EXPR, type,
6357 TREE_OPERAND (arg0, 1),
6358 TREE_OPERAND (arg1, 1)))));
6361 goto associate;
6363 case MULT_EXPR:
6364 /* (-A) * (-B) -> A * B */
6365 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6366 return fold (build (MULT_EXPR, type,
6367 TREE_OPERAND (arg0, 0),
6368 negate_expr (arg1)));
6369 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6370 return fold (build (MULT_EXPR, type,
6371 negate_expr (arg0),
6372 TREE_OPERAND (arg1, 0)));
6374 if (! FLOAT_TYPE_P (type))
6376 if (integer_zerop (arg1))
6377 return omit_one_operand (type, arg1, arg0);
6378 if (integer_onep (arg1))
6379 return non_lvalue (fold_convert (type, arg0));
6381 /* (a * (1 << b)) is (a << b) */
6382 if (TREE_CODE (arg1) == LSHIFT_EXPR
6383 && integer_onep (TREE_OPERAND (arg1, 0)))
6384 return fold (build (LSHIFT_EXPR, type, arg0,
6385 TREE_OPERAND (arg1, 1)));
6386 if (TREE_CODE (arg0) == LSHIFT_EXPR
6387 && integer_onep (TREE_OPERAND (arg0, 0)))
6388 return fold (build (LSHIFT_EXPR, type, arg1,
6389 TREE_OPERAND (arg0, 1)));
6391 if (TREE_CODE (arg1) == INTEGER_CST
6392 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6393 fold_convert (type, arg1),
6394 code, NULL_TREE)))
6395 return fold_convert (type, tem);
6398 else
6400 /* Maybe fold x * 0 to 0. The expressions aren't the same
6401 when x is NaN, since x * 0 is also NaN. Nor are they the
6402 same in modes with signed zeros, since multiplying a
6403 negative value by 0 gives -0, not +0. */
6404 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6405 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6406 && real_zerop (arg1))
6407 return omit_one_operand (type, arg1, arg0);
6408 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6409 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6410 && real_onep (arg1))
6411 return non_lvalue (fold_convert (type, arg0));
6413 /* Transform x * -1.0 into -x. */
6414 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6415 && real_minus_onep (arg1))
6416 return fold (build1 (NEGATE_EXPR, type, arg0));
6418 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6419 if (flag_unsafe_math_optimizations
6420 && TREE_CODE (arg0) == RDIV_EXPR
6421 && TREE_CODE (arg1) == REAL_CST
6422 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6424 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6425 arg1, 0);
6426 if (tem)
6427 return fold (build (RDIV_EXPR, type, tem,
6428 TREE_OPERAND (arg0, 1)));
6431 if (flag_unsafe_math_optimizations)
6433 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6434 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6436 /* Optimizations of root(...)*root(...). */
6437 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
6439 tree rootfn, arg, arglist;
6440 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6441 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6443 /* Optimize sqrt(x)*sqrt(x) as x. */
6444 if (BUILTIN_SQRT_P (fcode0)
6445 && operand_equal_p (arg00, arg10, 0)
6446 && ! HONOR_SNANS (TYPE_MODE (type)))
6447 return arg00;
6449 /* Optimize root(x)*root(y) as root(x*y). */
6450 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6451 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6452 arglist = build_tree_list (NULL_TREE, arg);
6453 return build_function_call_expr (rootfn, arglist);
6456 /* Optimize expN(x)*expN(y) as expN(x+y). */
6457 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
6459 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6460 tree arg = build (PLUS_EXPR, type,
6461 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6462 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6463 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6464 return build_function_call_expr (expfn, arglist);
6467 /* Optimizations of pow(...)*pow(...). */
6468 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6469 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6470 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6472 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6473 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6474 1)));
6475 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6476 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6477 1)));
6479 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6480 if (operand_equal_p (arg01, arg11, 0))
6482 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6483 tree arg = build (MULT_EXPR, type, arg00, arg10);
6484 tree arglist = tree_cons (NULL_TREE, fold (arg),
6485 build_tree_list (NULL_TREE,
6486 arg01));
6487 return build_function_call_expr (powfn, arglist);
6490 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6491 if (operand_equal_p (arg00, arg10, 0))
6493 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6494 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6495 tree arglist = tree_cons (NULL_TREE, arg00,
6496 build_tree_list (NULL_TREE,
6497 arg));
6498 return build_function_call_expr (powfn, arglist);
6502 /* Optimize tan(x)*cos(x) as sin(x). */
6503 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6504 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6505 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6506 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6507 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6508 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6509 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6510 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6512 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
6514 if (sinfn != NULL_TREE)
6515 return build_function_call_expr (sinfn,
6516 TREE_OPERAND (arg0, 1));
6519 /* Optimize x*pow(x,c) as pow(x,c+1). */
6520 if (fcode1 == BUILT_IN_POW
6521 || fcode1 == BUILT_IN_POWF
6522 || fcode1 == BUILT_IN_POWL)
6524 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6525 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6526 1)));
6527 if (TREE_CODE (arg11) == REAL_CST
6528 && ! TREE_CONSTANT_OVERFLOW (arg11)
6529 && operand_equal_p (arg0, arg10, 0))
6531 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6532 REAL_VALUE_TYPE c;
6533 tree arg, arglist;
6535 c = TREE_REAL_CST (arg11);
6536 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6537 arg = build_real (type, c);
6538 arglist = build_tree_list (NULL_TREE, arg);
6539 arglist = tree_cons (NULL_TREE, arg0, arglist);
6540 return build_function_call_expr (powfn, arglist);
6544 /* Optimize pow(x,c)*x as pow(x,c+1). */
6545 if (fcode0 == BUILT_IN_POW
6546 || fcode0 == BUILT_IN_POWF
6547 || fcode0 == BUILT_IN_POWL)
6549 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6550 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6551 1)));
6552 if (TREE_CODE (arg01) == REAL_CST
6553 && ! TREE_CONSTANT_OVERFLOW (arg01)
6554 && operand_equal_p (arg1, arg00, 0))
6556 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6557 REAL_VALUE_TYPE c;
6558 tree arg, arglist;
6560 c = TREE_REAL_CST (arg01);
6561 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6562 arg = build_real (type, c);
6563 arglist = build_tree_list (NULL_TREE, arg);
6564 arglist = tree_cons (NULL_TREE, arg1, arglist);
6565 return build_function_call_expr (powfn, arglist);
6569 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6570 if (! optimize_size
6571 && operand_equal_p (arg0, arg1, 0))
6573 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6575 if (powfn)
6577 tree arg = build_real (type, dconst2);
6578 tree arglist = build_tree_list (NULL_TREE, arg);
6579 arglist = tree_cons (NULL_TREE, arg0, arglist);
6580 return build_function_call_expr (powfn, arglist);
6585 goto associate;
6587 case BIT_IOR_EXPR:
6588 bit_ior:
6589 if (integer_all_onesp (arg1))
6590 return omit_one_operand (type, arg1, arg0);
6591 if (integer_zerop (arg1))
6592 return non_lvalue (fold_convert (type, arg0));
6593 if (operand_equal_p (arg0, arg1, 0))
6594 return non_lvalue (fold_convert (type, arg0));
6595 t1 = distribute_bit_expr (code, type, arg0, arg1);
6596 if (t1 != NULL_TREE)
6597 return t1;
6599 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6601 This results in more efficient code for machines without a NAND
6602 instruction. Combine will canonicalize to the first form
6603 which will allow use of NAND instructions provided by the
6604 backend if they exist. */
6605 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6606 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6608 return fold (build1 (BIT_NOT_EXPR, type,
6609 build (BIT_AND_EXPR, type,
6610 TREE_OPERAND (arg0, 0),
6611 TREE_OPERAND (arg1, 0))));
6614 /* See if this can be simplified into a rotate first. If that
6615 is unsuccessful continue in the association code. */
6616 goto bit_rotate;
6618 case BIT_XOR_EXPR:
6619 if (integer_zerop (arg1))
6620 return non_lvalue (fold_convert (type, arg0));
6621 if (integer_all_onesp (arg1))
6622 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6623 if (operand_equal_p (arg0, arg1, 0))
6624 return omit_one_operand (type, integer_zero_node, arg0);
6626 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6627 with a constant, and the two constants have no bits in common,
6628 we should treat this as a BIT_IOR_EXPR since this may produce more
6629 simplifications. */
6630 if (TREE_CODE (arg0) == BIT_AND_EXPR
6631 && TREE_CODE (arg1) == BIT_AND_EXPR
6632 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6633 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6634 && integer_zerop (const_binop (BIT_AND_EXPR,
6635 TREE_OPERAND (arg0, 1),
6636 TREE_OPERAND (arg1, 1), 0)))
6638 code = BIT_IOR_EXPR;
6639 goto bit_ior;
6642 /* See if this can be simplified into a rotate first. If that
6643 is unsuccessful continue in the association code. */
6644 goto bit_rotate;
6646 case BIT_AND_EXPR:
6647 if (integer_all_onesp (arg1))
6648 return non_lvalue (fold_convert (type, arg0));
6649 if (integer_zerop (arg1))
6650 return omit_one_operand (type, arg1, arg0);
6651 if (operand_equal_p (arg0, arg1, 0))
6652 return non_lvalue (fold_convert (type, arg0));
6653 t1 = distribute_bit_expr (code, type, arg0, arg1);
6654 if (t1 != NULL_TREE)
6655 return t1;
6656 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6657 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6658 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6660 unsigned int prec
6661 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6663 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6664 && (~TREE_INT_CST_LOW (arg1)
6665 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6666 return fold_convert (type, TREE_OPERAND (arg0, 0));
6669 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6671 This results in more efficient code for machines without a NOR
6672 instruction. Combine will canonicalize to the first form
6673 which will allow use of NOR instructions provided by the
6674 backend if they exist. */
6675 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6676 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6678 return fold (build1 (BIT_NOT_EXPR, type,
6679 build (BIT_IOR_EXPR, type,
6680 TREE_OPERAND (arg0, 0),
6681 TREE_OPERAND (arg1, 0))));
6684 goto associate;
6686 case RDIV_EXPR:
6687 /* Don't touch a floating-point divide by zero unless the mode
6688 of the constant can represent infinity. */
6689 if (TREE_CODE (arg1) == REAL_CST
6690 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6691 && real_zerop (arg1))
6692 return t;
6694 /* (-A) / (-B) -> A / B */
6695 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6696 return fold (build (RDIV_EXPR, type,
6697 TREE_OPERAND (arg0, 0),
6698 negate_expr (arg1)));
6699 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6700 return fold (build (RDIV_EXPR, type,
6701 negate_expr (arg0),
6702 TREE_OPERAND (arg1, 0)));
6704 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6705 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6706 && real_onep (arg1))
6707 return non_lvalue (fold_convert (type, arg0));
6709 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6710 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6711 && real_minus_onep (arg1))
6712 return non_lvalue (fold_convert (type, negate_expr (arg0)));
6714 /* If ARG1 is a constant, we can convert this to a multiply by the
6715 reciprocal. This does not have the same rounding properties,
6716 so only do this if -funsafe-math-optimizations. We can actually
6717 always safely do it if ARG1 is a power of two, but it's hard to
6718 tell if it is or not in a portable manner. */
6719 if (TREE_CODE (arg1) == REAL_CST)
6721 if (flag_unsafe_math_optimizations
6722 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6723 arg1, 0)))
6724 return fold (build (MULT_EXPR, type, arg0, tem));
6725 /* Find the reciprocal if optimizing and the result is exact. */
6726 if (optimize)
6728 REAL_VALUE_TYPE r;
6729 r = TREE_REAL_CST (arg1);
6730 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6732 tem = build_real (type, r);
6733 return fold (build (MULT_EXPR, type, arg0, tem));
6737 /* Convert A/B/C to A/(B*C). */
6738 if (flag_unsafe_math_optimizations
6739 && TREE_CODE (arg0) == RDIV_EXPR)
6740 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6741 fold (build (MULT_EXPR, type,
6742 TREE_OPERAND (arg0, 1), arg1))));
6744 /* Convert A/(B/C) to (A/B)*C. */
6745 if (flag_unsafe_math_optimizations
6746 && TREE_CODE (arg1) == RDIV_EXPR)
6747 return fold (build (MULT_EXPR, type,
6748 fold (build (RDIV_EXPR, type, arg0,
6749 TREE_OPERAND (arg1, 0))),
6750 TREE_OPERAND (arg1, 1)));
6752 /* Convert C1/(X*C2) into (C1/C2)/X. */
6753 if (flag_unsafe_math_optimizations
6754 && TREE_CODE (arg1) == MULT_EXPR
6755 && TREE_CODE (arg0) == REAL_CST
6756 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6758 tree tem = const_binop (RDIV_EXPR, arg0,
6759 TREE_OPERAND (arg1, 1), 0);
6760 if (tem)
6761 return fold (build (RDIV_EXPR, type, tem,
6762 TREE_OPERAND (arg1, 0)));
6765 if (flag_unsafe_math_optimizations)
6767 enum built_in_function fcode = builtin_mathfn_code (arg1);
6768 /* Optimize x/expN(y) into x*expN(-y). */
6769 if (BUILTIN_EXPONENT_P (fcode))
6771 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6772 tree arg = build1 (NEGATE_EXPR, type,
6773 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6774 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6775 arg1 = build_function_call_expr (expfn, arglist);
6776 return fold (build (MULT_EXPR, type, arg0, arg1));
6779 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6780 if (fcode == BUILT_IN_POW
6781 || fcode == BUILT_IN_POWF
6782 || fcode == BUILT_IN_POWL)
6784 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6785 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6786 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6787 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6788 tree arglist = tree_cons(NULL_TREE, arg10,
6789 build_tree_list (NULL_TREE, neg11));
6790 arg1 = build_function_call_expr (powfn, arglist);
6791 return fold (build (MULT_EXPR, type, arg0, arg1));
6795 if (flag_unsafe_math_optimizations)
6797 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6798 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6800 /* Optimize sin(x)/cos(x) as tan(x). */
6801 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6802 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6803 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6804 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6805 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6807 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
6809 if (tanfn != NULL_TREE)
6810 return build_function_call_expr (tanfn,
6811 TREE_OPERAND (arg0, 1));
6814 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6815 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6816 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6817 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6818 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6819 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6821 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
6823 if (tanfn != NULL_TREE)
6825 tree tmp = TREE_OPERAND (arg0, 1);
6826 tmp = build_function_call_expr (tanfn, tmp);
6827 return fold (build (RDIV_EXPR, type,
6828 build_real (type, dconst1),
6829 tmp));
6833 /* Optimize pow(x,c)/x as pow(x,c-1). */
6834 if (fcode0 == BUILT_IN_POW
6835 || fcode0 == BUILT_IN_POWF
6836 || fcode0 == BUILT_IN_POWL)
6838 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6839 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6840 if (TREE_CODE (arg01) == REAL_CST
6841 && ! TREE_CONSTANT_OVERFLOW (arg01)
6842 && operand_equal_p (arg1, arg00, 0))
6844 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6845 REAL_VALUE_TYPE c;
6846 tree arg, arglist;
6848 c = TREE_REAL_CST (arg01);
6849 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6850 arg = build_real (type, c);
6851 arglist = build_tree_list (NULL_TREE, arg);
6852 arglist = tree_cons (NULL_TREE, arg1, arglist);
6853 return build_function_call_expr (powfn, arglist);
6857 goto binary;
6859 case TRUNC_DIV_EXPR:
6860 case ROUND_DIV_EXPR:
6861 case FLOOR_DIV_EXPR:
6862 case CEIL_DIV_EXPR:
6863 case EXACT_DIV_EXPR:
6864 if (integer_onep (arg1))
6865 return non_lvalue (fold_convert (type, arg0));
6866 if (integer_zerop (arg1))
6867 return t;
6869 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6870 operation, EXACT_DIV_EXPR.
6872 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6873 At one time others generated faster code, it's not clear if they do
6874 after the last round to changes to the DIV code in expmed.c. */
6875 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6876 && multiple_of_p (type, arg0, arg1))
6877 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6879 if (TREE_CODE (arg1) == INTEGER_CST
6880 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6881 code, NULL_TREE)))
6882 return fold_convert (type, tem);
6884 goto binary;
6886 case CEIL_MOD_EXPR:
6887 case FLOOR_MOD_EXPR:
6888 case ROUND_MOD_EXPR:
6889 case TRUNC_MOD_EXPR:
6890 if (integer_onep (arg1))
6891 return omit_one_operand (type, integer_zero_node, arg0);
6892 if (integer_zerop (arg1))
6893 return t;
6895 if (TREE_CODE (arg1) == INTEGER_CST
6896 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6897 code, NULL_TREE)))
6898 return fold_convert (type, tem);
6900 goto binary;
6902 case LROTATE_EXPR:
6903 case RROTATE_EXPR:
6904 if (integer_all_onesp (arg0))
6905 return omit_one_operand (type, arg0, arg1);
6906 goto shift;
6908 case RSHIFT_EXPR:
6909 /* Optimize -1 >> x for arithmetic right shifts. */
6910 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
6911 return omit_one_operand (type, arg0, arg1);
6912 /* ... fall through ... */
6914 case LSHIFT_EXPR:
6915 shift:
6916 if (integer_zerop (arg1))
6917 return non_lvalue (fold_convert (type, arg0));
6918 if (integer_zerop (arg0))
6919 return omit_one_operand (type, arg0, arg1);
6921 /* Since negative shift count is not well-defined,
6922 don't try to compute it in the compiler. */
6923 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6924 return t;
6925 /* Rewrite an LROTATE_EXPR by a constant into an
6926 RROTATE_EXPR by a new constant. */
6927 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6929 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
6930 tem = fold_convert (TREE_TYPE (arg1), tem);
6931 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
6932 return fold (build (RROTATE_EXPR, type, arg0, tem));
6935 /* If we have a rotate of a bit operation with the rotate count and
6936 the second operand of the bit operation both constant,
6937 permute the two operations. */
6938 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6939 && (TREE_CODE (arg0) == BIT_AND_EXPR
6940 || TREE_CODE (arg0) == BIT_IOR_EXPR
6941 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6942 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6943 return fold (build (TREE_CODE (arg0), type,
6944 fold (build (code, type,
6945 TREE_OPERAND (arg0, 0), arg1)),
6946 fold (build (code, type,
6947 TREE_OPERAND (arg0, 1), arg1))));
6949 /* Two consecutive rotates adding up to the width of the mode can
6950 be ignored. */
6951 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6952 && TREE_CODE (arg0) == RROTATE_EXPR
6953 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6954 && TREE_INT_CST_HIGH (arg1) == 0
6955 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6956 && ((TREE_INT_CST_LOW (arg1)
6957 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6958 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6959 return TREE_OPERAND (arg0, 0);
6961 goto binary;
6963 case MIN_EXPR:
6964 if (operand_equal_p (arg0, arg1, 0))
6965 return omit_one_operand (type, arg0, arg1);
6966 if (INTEGRAL_TYPE_P (type)
6967 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6968 return omit_one_operand (type, arg1, arg0);
6969 goto associate;
6971 case MAX_EXPR:
6972 if (operand_equal_p (arg0, arg1, 0))
6973 return omit_one_operand (type, arg0, arg1);
6974 if (INTEGRAL_TYPE_P (type)
6975 && TYPE_MAX_VALUE (type)
6976 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6977 return omit_one_operand (type, arg1, arg0);
6978 goto associate;
6980 case TRUTH_NOT_EXPR:
6981 /* Note that the operand of this must be an int
6982 and its values must be 0 or 1.
6983 ("true" is a fixed value perhaps depending on the language,
6984 but we don't handle values other than 1 correctly yet.) */
6985 tem = invert_truthvalue (arg0);
6986 /* Avoid infinite recursion. */
6987 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6989 tem = fold_single_bit_test (code, arg0, arg1, type);
6990 if (tem)
6991 return tem;
6992 return t;
6994 return fold_convert (type, tem);
6996 case TRUTH_ANDIF_EXPR:
6997 /* Note that the operands of this must be ints
6998 and their values must be 0 or 1.
6999 ("true" is a fixed value perhaps depending on the language.) */
7000 /* If first arg is constant zero, return it. */
7001 if (integer_zerop (arg0))
7002 return fold_convert (type, arg0);
7003 case TRUTH_AND_EXPR:
7004 /* If either arg is constant true, drop it. */
7005 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7006 return non_lvalue (fold_convert (type, arg1));
7007 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7008 /* Preserve sequence points. */
7009 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7010 return non_lvalue (fold_convert (type, arg0));
7011 /* If second arg is constant zero, result is zero, but first arg
7012 must be evaluated. */
7013 if (integer_zerop (arg1))
7014 return omit_one_operand (type, arg1, arg0);
7015 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7016 case will be handled here. */
7017 if (integer_zerop (arg0))
7018 return omit_one_operand (type, arg0, arg1);
7020 truth_andor:
7021 /* We only do these simplifications if we are optimizing. */
7022 if (!optimize)
7023 return t;
7025 /* Check for things like (A || B) && (A || C). We can convert this
7026 to A || (B && C). Note that either operator can be any of the four
7027 truth and/or operations and the transformation will still be
7028 valid. Also note that we only care about order for the
7029 ANDIF and ORIF operators. If B contains side effects, this
7030 might change the truth-value of A. */
7031 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7032 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7033 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7034 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7035 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7036 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7038 tree a00 = TREE_OPERAND (arg0, 0);
7039 tree a01 = TREE_OPERAND (arg0, 1);
7040 tree a10 = TREE_OPERAND (arg1, 0);
7041 tree a11 = TREE_OPERAND (arg1, 1);
7042 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7043 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7044 && (code == TRUTH_AND_EXPR
7045 || code == TRUTH_OR_EXPR));
7047 if (operand_equal_p (a00, a10, 0))
7048 return fold (build (TREE_CODE (arg0), type, a00,
7049 fold (build (code, type, a01, a11))));
7050 else if (commutative && operand_equal_p (a00, a11, 0))
7051 return fold (build (TREE_CODE (arg0), type, a00,
7052 fold (build (code, type, a01, a10))));
7053 else if (commutative && operand_equal_p (a01, a10, 0))
7054 return fold (build (TREE_CODE (arg0), type, a01,
7055 fold (build (code, type, a00, a11))));
7057 /* This case if tricky because we must either have commutative
7058 operators or else A10 must not have side-effects. */
7060 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7061 && operand_equal_p (a01, a11, 0))
7062 return fold (build (TREE_CODE (arg0), type,
7063 fold (build (code, type, a00, a10)),
7064 a01));
7067 /* See if we can build a range comparison. */
7068 if (0 != (tem = fold_range_test (t)))
7069 return tem;
7071 /* Check for the possibility of merging component references. If our
7072 lhs is another similar operation, try to merge its rhs with our
7073 rhs. Then try to merge our lhs and rhs. */
7074 if (TREE_CODE (arg0) == code
7075 && 0 != (tem = fold_truthop (code, type,
7076 TREE_OPERAND (arg0, 1), arg1)))
7077 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7079 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7080 return tem;
7082 return t;
7084 case TRUTH_ORIF_EXPR:
7085 /* Note that the operands of this must be ints
7086 and their values must be 0 or true.
7087 ("true" is a fixed value perhaps depending on the language.) */
7088 /* If first arg is constant true, return it. */
7089 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7090 return fold_convert (type, arg0);
7091 case TRUTH_OR_EXPR:
7092 /* If either arg is constant zero, drop it. */
7093 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7094 return non_lvalue (fold_convert (type, arg1));
7095 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7096 /* Preserve sequence points. */
7097 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7098 return non_lvalue (fold_convert (type, arg0));
7099 /* If second arg is constant true, result is true, but we must
7100 evaluate first arg. */
7101 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7102 return omit_one_operand (type, arg1, arg0);
7103 /* Likewise for first arg, but note this only occurs here for
7104 TRUTH_OR_EXPR. */
7105 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7106 return omit_one_operand (type, arg0, arg1);
7107 goto truth_andor;
7109 case TRUTH_XOR_EXPR:
7110 /* If either arg is constant zero, drop it. */
7111 if (integer_zerop (arg0))
7112 return non_lvalue (fold_convert (type, arg1));
7113 if (integer_zerop (arg1))
7114 return non_lvalue (fold_convert (type, arg0));
7115 /* If either arg is constant true, this is a logical inversion. */
7116 if (integer_onep (arg0))
7117 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7118 if (integer_onep (arg1))
7119 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7120 return t;
7122 case EQ_EXPR:
7123 case NE_EXPR:
7124 case LT_EXPR:
7125 case GT_EXPR:
7126 case LE_EXPR:
7127 case GE_EXPR:
7128 /* If one arg is a real or integer constant, put it last. */
7129 if (tree_swap_operands_p (arg0, arg1, true))
7130 return fold (build (swap_tree_comparison (code), type, arg1, arg0));
7132 /* If this is an equality comparison of the address of a non-weak
7133 object against zero, then we know the result. */
7134 if ((code == EQ_EXPR || code == NE_EXPR)
7135 && TREE_CODE (arg0) == ADDR_EXPR
7136 && DECL_P (TREE_OPERAND (arg0, 0))
7137 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7138 && integer_zerop (arg1))
7140 if (code == EQ_EXPR)
7141 return fold_convert (type, integer_zero_node);
7142 else
7143 return fold_convert (type, integer_one_node);
7146 /* If this is an equality comparison of the address of two non-weak,
7147 unaliased symbols neither of which are extern (since we do not
7148 have access to attributes for externs), then we know the result. */
7149 if ((code == EQ_EXPR || code == NE_EXPR)
7150 && TREE_CODE (arg0) == ADDR_EXPR
7151 && DECL_P (TREE_OPERAND (arg0, 0))
7152 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7153 && ! lookup_attribute ("alias",
7154 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7155 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7156 && TREE_CODE (arg1) == ADDR_EXPR
7157 && DECL_P (TREE_OPERAND (arg1, 0))
7158 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7159 && ! lookup_attribute ("alias",
7160 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7161 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7163 if (code == EQ_EXPR)
7164 return fold_convert (type, (operand_equal_p (arg0, arg1, 0)
7165 ? integer_one_node : integer_zero_node));
7166 else
7167 return fold_convert (type, (operand_equal_p (arg0, arg1, 0)
7168 ? integer_zero_node : integer_one_node));
7171 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7173 tree targ0 = strip_float_extensions (arg0);
7174 tree targ1 = strip_float_extensions (arg1);
7175 tree newtype = TREE_TYPE (targ0);
7177 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7178 newtype = TREE_TYPE (targ1);
7180 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7181 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7182 return fold (build (code, type, fold_convert (newtype, targ0),
7183 fold_convert (newtype, targ1)));
7185 /* (-a) CMP (-b) -> b CMP a */
7186 if (TREE_CODE (arg0) == NEGATE_EXPR
7187 && TREE_CODE (arg1) == NEGATE_EXPR)
7188 return fold (build (code, type, TREE_OPERAND (arg1, 0),
7189 TREE_OPERAND (arg0, 0)));
7191 if (TREE_CODE (arg1) == REAL_CST)
7193 REAL_VALUE_TYPE cst;
7194 cst = TREE_REAL_CST (arg1);
7196 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7197 if (TREE_CODE (arg0) == NEGATE_EXPR)
7198 return
7199 fold (build (swap_tree_comparison (code), type,
7200 TREE_OPERAND (arg0, 0),
7201 build_real (TREE_TYPE (arg1),
7202 REAL_VALUE_NEGATE (cst))));
7204 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7205 /* a CMP (-0) -> a CMP 0 */
7206 if (REAL_VALUE_MINUS_ZERO (cst))
7207 return fold (build (code, type, arg0,
7208 build_real (TREE_TYPE (arg1), dconst0)));
7210 /* x != NaN is always true, other ops are always false. */
7211 if (REAL_VALUE_ISNAN (cst)
7212 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7214 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7215 return omit_one_operand (type, fold_convert (type, tem), arg0);
7218 /* Fold comparisons against infinity. */
7219 if (REAL_VALUE_ISINF (cst))
7221 tem = fold_inf_compare (code, type, arg0, arg1);
7222 if (tem != NULL_TREE)
7223 return tem;
7227 /* If this is a comparison of a real constant with a PLUS_EXPR
7228 or a MINUS_EXPR of a real constant, we can convert it into a
7229 comparison with a revised real constant as long as no overflow
7230 occurs when unsafe_math_optimizations are enabled. */
7231 if (flag_unsafe_math_optimizations
7232 && TREE_CODE (arg1) == REAL_CST
7233 && (TREE_CODE (arg0) == PLUS_EXPR
7234 || TREE_CODE (arg0) == MINUS_EXPR)
7235 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7236 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7237 ? MINUS_EXPR : PLUS_EXPR,
7238 arg1, TREE_OPERAND (arg0, 1), 0))
7239 && ! TREE_CONSTANT_OVERFLOW (tem))
7240 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7242 /* Likewise, we can simplify a comparison of a real constant with
7243 a MINUS_EXPR whose first operand is also a real constant, i.e.
7244 (c1 - x) < c2 becomes x > c1-c2. */
7245 if (flag_unsafe_math_optimizations
7246 && TREE_CODE (arg1) == REAL_CST
7247 && TREE_CODE (arg0) == MINUS_EXPR
7248 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7249 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7250 arg1, 0))
7251 && ! TREE_CONSTANT_OVERFLOW (tem))
7252 return fold (build (swap_tree_comparison (code), type,
7253 TREE_OPERAND (arg0, 1), tem));
7255 /* Fold comparisons against built-in math functions. */
7256 if (TREE_CODE (arg1) == REAL_CST
7257 && flag_unsafe_math_optimizations
7258 && ! flag_errno_math)
7260 enum built_in_function fcode = builtin_mathfn_code (arg0);
7262 if (fcode != END_BUILTINS)
7264 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7265 if (tem != NULL_TREE)
7266 return tem;
7271 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7272 if (TREE_CONSTANT (arg1)
7273 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7274 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7275 /* This optimization is invalid for ordered comparisons
7276 if CONST+INCR overflows or if foo+incr might overflow.
7277 This optimization is invalid for floating point due to rounding.
7278 For pointer types we assume overflow doesn't happen. */
7279 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7280 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7281 && (code == EQ_EXPR || code == NE_EXPR))))
7283 tree varop, newconst;
7285 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7287 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
7288 arg1, TREE_OPERAND (arg0, 1)));
7289 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7290 TREE_OPERAND (arg0, 0),
7291 TREE_OPERAND (arg0, 1));
7293 else
7295 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
7296 arg1, TREE_OPERAND (arg0, 1)));
7297 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7298 TREE_OPERAND (arg0, 0),
7299 TREE_OPERAND (arg0, 1));
7303 /* If VAROP is a reference to a bitfield, we must mask
7304 the constant by the width of the field. */
7305 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7306 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
7308 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7309 int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
7310 tree folded_compare, shift;
7312 /* First check whether the comparison would come out
7313 always the same. If we don't do that we would
7314 change the meaning with the masking. */
7315 folded_compare = fold (build2 (code, type,
7316 TREE_OPERAND (varop, 0),
7317 arg1));
7318 if (integer_zerop (folded_compare)
7319 || integer_onep (folded_compare))
7320 return omit_one_operand (type, folded_compare, varop);
7322 shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
7324 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7325 newconst, shift));
7326 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7327 newconst, shift));
7330 return fold (build2 (code, type, varop, newconst));
7333 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7334 This transformation affects the cases which are handled in later
7335 optimizations involving comparisons with non-negative constants. */
7336 if (TREE_CODE (arg1) == INTEGER_CST
7337 && TREE_CODE (arg0) != INTEGER_CST
7338 && tree_int_cst_sgn (arg1) > 0)
7340 switch (code)
7342 case GE_EXPR:
7343 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7344 return fold (build (GT_EXPR, type, arg0, arg1));
7346 case LT_EXPR:
7347 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7348 return fold (build (LE_EXPR, type, arg0, arg1));
7350 default:
7351 break;
7355 /* Comparisons with the highest or lowest possible integer of
7356 the specified size will have known values. */
7358 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7360 if (TREE_CODE (arg1) == INTEGER_CST
7361 && ! TREE_CONSTANT_OVERFLOW (arg1)
7362 && width <= HOST_BITS_PER_WIDE_INT
7363 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7364 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7366 unsigned HOST_WIDE_INT signed_max;
7367 unsigned HOST_WIDE_INT max, min;
7369 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7371 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
7373 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7374 min = 0;
7376 else
7378 max = signed_max;
7379 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7382 if (TREE_INT_CST_HIGH (arg1) == 0
7383 && TREE_INT_CST_LOW (arg1) == max)
7384 switch (code)
7386 case GT_EXPR:
7387 return omit_one_operand (type,
7388 fold_convert (type,
7389 integer_zero_node),
7390 arg0);
7391 case GE_EXPR:
7392 return fold (build (EQ_EXPR, type, arg0, arg1));
7394 case LE_EXPR:
7395 return omit_one_operand (type,
7396 fold_convert (type,
7397 integer_one_node),
7398 arg0);
7399 case LT_EXPR:
7400 return fold (build (NE_EXPR, type, arg0, arg1));
7402 /* The GE_EXPR and LT_EXPR cases above are not normally
7403 reached because of previous transformations. */
7405 default:
7406 break;
7408 else if (TREE_INT_CST_HIGH (arg1) == 0
7409 && TREE_INT_CST_LOW (arg1) == max - 1)
7410 switch (code)
7412 case GT_EXPR:
7413 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7414 return fold (build (EQ_EXPR, type, arg0, arg1));
7415 case LE_EXPR:
7416 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7417 return fold (build (NE_EXPR, type, arg0, arg1));
7418 default:
7419 break;
7421 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7422 && TREE_INT_CST_LOW (arg1) == min)
7423 switch (code)
7425 case LT_EXPR:
7426 return omit_one_operand (type,
7427 fold_convert (type,
7428 integer_zero_node),
7429 arg0);
7430 case LE_EXPR:
7431 return fold (build (EQ_EXPR, type, arg0, arg1));
7433 case GE_EXPR:
7434 return omit_one_operand (type,
7435 fold_convert (type,
7436 integer_one_node),
7437 arg0);
7438 case GT_EXPR:
7439 return fold (build (NE_EXPR, type, arg0, arg1));
7441 default:
7442 break;
7444 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7445 && TREE_INT_CST_LOW (arg1) == min + 1)
7446 switch (code)
7448 case GE_EXPR:
7449 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7450 return fold (build (NE_EXPR, type, arg0, arg1));
7451 case LT_EXPR:
7452 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7453 return fold (build (EQ_EXPR, type, arg0, arg1));
7454 default:
7455 break;
7458 else if (TREE_INT_CST_HIGH (arg1) == 0
7459 && TREE_INT_CST_LOW (arg1) == signed_max
7460 && TYPE_UNSIGNED (TREE_TYPE (arg1))
7461 /* signed_type does not work on pointer types. */
7462 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7464 /* The following case also applies to X < signed_max+1
7465 and X >= signed_max+1 because previous transformations. */
7466 if (code == LE_EXPR || code == GT_EXPR)
7468 tree st0, st1;
7469 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
7470 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
7471 return fold
7472 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7473 type, fold_convert (st0, arg0),
7474 fold_convert (st1, integer_zero_node)));
7480 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7481 a MINUS_EXPR of a constant, we can convert it into a comparison with
7482 a revised constant as long as no overflow occurs. */
7483 if ((code == EQ_EXPR || code == NE_EXPR)
7484 && TREE_CODE (arg1) == INTEGER_CST
7485 && (TREE_CODE (arg0) == PLUS_EXPR
7486 || TREE_CODE (arg0) == MINUS_EXPR)
7487 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7488 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7489 ? MINUS_EXPR : PLUS_EXPR,
7490 arg1, TREE_OPERAND (arg0, 1), 0))
7491 && ! TREE_CONSTANT_OVERFLOW (tem))
7492 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7494 /* Similarly for a NEGATE_EXPR. */
7495 else if ((code == EQ_EXPR || code == NE_EXPR)
7496 && TREE_CODE (arg0) == NEGATE_EXPR
7497 && TREE_CODE (arg1) == INTEGER_CST
7498 && 0 != (tem = negate_expr (arg1))
7499 && TREE_CODE (tem) == INTEGER_CST
7500 && ! TREE_CONSTANT_OVERFLOW (tem))
7501 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7503 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7504 for !=. Don't do this for ordered comparisons due to overflow. */
7505 else if ((code == NE_EXPR || code == EQ_EXPR)
7506 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7507 return fold (build (code, type,
7508 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7510 /* If we are widening one operand of an integer comparison,
7511 see if the other operand is similarly being widened. Perhaps we
7512 can do the comparison in the narrower type. */
7513 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7514 && TREE_CODE (arg0) == NOP_EXPR
7515 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7516 && (code == EQ_EXPR || code == NE_EXPR
7517 || TYPE_UNSIGNED (TREE_TYPE (arg0))
7518 == TYPE_UNSIGNED (TREE_TYPE (tem)))
7519 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7520 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7521 || (TREE_CODE (t1) == INTEGER_CST
7522 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7523 return fold (build (code, type, tem,
7524 fold_convert (TREE_TYPE (tem), t1)));
7526 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7527 constant, we can simplify it. */
7528 else if (TREE_CODE (arg1) == INTEGER_CST
7529 && (TREE_CODE (arg0) == MIN_EXPR
7530 || TREE_CODE (arg0) == MAX_EXPR)
7531 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7532 return optimize_minmax_comparison (t);
7534 /* If we are comparing an ABS_EXPR with a constant, we can
7535 convert all the cases into explicit comparisons, but they may
7536 well not be faster than doing the ABS and one comparison.
7537 But ABS (X) <= C is a range comparison, which becomes a subtraction
7538 and a comparison, and is probably faster. */
7539 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7540 && TREE_CODE (arg0) == ABS_EXPR
7541 && ! TREE_SIDE_EFFECTS (arg0)
7542 && (0 != (tem = negate_expr (arg1)))
7543 && TREE_CODE (tem) == INTEGER_CST
7544 && ! TREE_CONSTANT_OVERFLOW (tem))
7545 return fold (build (TRUTH_ANDIF_EXPR, type,
7546 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7547 build (LE_EXPR, type,
7548 TREE_OPERAND (arg0, 0), arg1)));
7550 /* If this is an EQ or NE comparison with zero and ARG0 is
7551 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7552 two operations, but the latter can be done in one less insn
7553 on machines that have only two-operand insns or on which a
7554 constant cannot be the first operand. */
7555 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7556 && TREE_CODE (arg0) == BIT_AND_EXPR)
7558 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7559 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7560 return
7561 fold (build (code, type,
7562 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7563 build (RSHIFT_EXPR,
7564 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7565 TREE_OPERAND (arg0, 1),
7566 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7567 fold_convert (TREE_TYPE (arg0),
7568 integer_one_node)),
7569 arg1));
7570 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7571 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7572 return
7573 fold (build (code, type,
7574 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7575 build (RSHIFT_EXPR,
7576 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7577 TREE_OPERAND (arg0, 0),
7578 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7579 fold_convert (TREE_TYPE (arg0),
7580 integer_one_node)),
7581 arg1));
7584 /* If this is an NE or EQ comparison of zero against the result of a
7585 signed MOD operation whose second operand is a power of 2, make
7586 the MOD operation unsigned since it is simpler and equivalent. */
7587 if ((code == NE_EXPR || code == EQ_EXPR)
7588 && integer_zerop (arg1)
7589 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
7590 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7591 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7592 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7593 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7594 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7596 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
7597 tree newmod = build (TREE_CODE (arg0), newtype,
7598 fold_convert (newtype,
7599 TREE_OPERAND (arg0, 0)),
7600 fold_convert (newtype,
7601 TREE_OPERAND (arg0, 1)));
7603 return build (code, type, newmod, fold_convert (newtype, arg1));
7606 /* If this is an NE comparison of zero with an AND of one, remove the
7607 comparison since the AND will give the correct value. */
7608 if (code == NE_EXPR && integer_zerop (arg1)
7609 && TREE_CODE (arg0) == BIT_AND_EXPR
7610 && integer_onep (TREE_OPERAND (arg0, 1)))
7611 return fold_convert (type, arg0);
7613 /* If we have (A & C) == C where C is a power of 2, convert this into
7614 (A & C) != 0. Similarly for NE_EXPR. */
7615 if ((code == EQ_EXPR || code == NE_EXPR)
7616 && TREE_CODE (arg0) == BIT_AND_EXPR
7617 && integer_pow2p (TREE_OPERAND (arg0, 1))
7618 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7619 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7620 arg0, integer_zero_node));
7622 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7623 2, then fold the expression into shifts and logical operations. */
7624 tem = fold_single_bit_test (code, arg0, arg1, type);
7625 if (tem)
7626 return tem;
7628 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7629 Similarly for NE_EXPR. */
7630 if ((code == EQ_EXPR || code == NE_EXPR)
7631 && TREE_CODE (arg0) == BIT_AND_EXPR
7632 && TREE_CODE (arg1) == INTEGER_CST
7633 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7635 tree dandnotc
7636 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7637 arg1, build1 (BIT_NOT_EXPR,
7638 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7639 TREE_OPERAND (arg0, 1))));
7640 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7641 if (integer_nonzerop (dandnotc))
7642 return omit_one_operand (type, rslt, arg0);
7645 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7646 Similarly for NE_EXPR. */
7647 if ((code == EQ_EXPR || code == NE_EXPR)
7648 && TREE_CODE (arg0) == BIT_IOR_EXPR
7649 && TREE_CODE (arg1) == INTEGER_CST
7650 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7652 tree candnotd
7653 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7654 TREE_OPERAND (arg0, 1),
7655 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7656 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7657 if (integer_nonzerop (candnotd))
7658 return omit_one_operand (type, rslt, arg0);
7661 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7662 and similarly for >= into !=. */
7663 if ((code == LT_EXPR || code == GE_EXPR)
7664 && TYPE_UNSIGNED (TREE_TYPE (arg0))
7665 && TREE_CODE (arg1) == LSHIFT_EXPR
7666 && integer_onep (TREE_OPERAND (arg1, 0)))
7667 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7668 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7669 TREE_OPERAND (arg1, 1)),
7670 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7672 else if ((code == LT_EXPR || code == GE_EXPR)
7673 && TYPE_UNSIGNED (TREE_TYPE (arg0))
7674 && (TREE_CODE (arg1) == NOP_EXPR
7675 || TREE_CODE (arg1) == CONVERT_EXPR)
7676 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7677 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7678 return
7679 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7680 fold_convert (TREE_TYPE (arg0),
7681 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7682 TREE_OPERAND (TREE_OPERAND (arg1, 0),
7683 1))),
7684 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7686 /* Simplify comparison of something with itself. (For IEEE
7687 floating-point, we can only do some of these simplifications.) */
7688 if (operand_equal_p (arg0, arg1, 0))
7690 switch (code)
7692 case EQ_EXPR:
7693 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7694 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7695 return constant_boolean_node (1, type);
7696 break;
7698 case GE_EXPR:
7699 case LE_EXPR:
7700 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7701 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7702 return constant_boolean_node (1, type);
7703 return fold (build (EQ_EXPR, type, arg0, arg1));
7705 case NE_EXPR:
7706 /* For NE, we can only do this simplification if integer
7707 or we don't honor IEEE floating point NaNs. */
7708 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7709 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7710 break;
7711 /* ... fall through ... */
7712 case GT_EXPR:
7713 case LT_EXPR:
7714 return constant_boolean_node (0, type);
7715 default:
7716 abort ();
7720 /* If we are comparing an expression that just has comparisons
7721 of two integer values, arithmetic expressions of those comparisons,
7722 and constants, we can simplify it. There are only three cases
7723 to check: the two values can either be equal, the first can be
7724 greater, or the second can be greater. Fold the expression for
7725 those three values. Since each value must be 0 or 1, we have
7726 eight possibilities, each of which corresponds to the constant 0
7727 or 1 or one of the six possible comparisons.
7729 This handles common cases like (a > b) == 0 but also handles
7730 expressions like ((x > y) - (y > x)) > 0, which supposedly
7731 occur in macroized code. */
7733 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7735 tree cval1 = 0, cval2 = 0;
7736 int save_p = 0;
7738 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7739 /* Don't handle degenerate cases here; they should already
7740 have been handled anyway. */
7741 && cval1 != 0 && cval2 != 0
7742 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7743 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7744 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7745 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7746 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7747 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7748 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7750 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7751 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7753 /* We can't just pass T to eval_subst in case cval1 or cval2
7754 was the same as ARG1. */
7756 tree high_result
7757 = fold (build (code, type,
7758 eval_subst (arg0, cval1, maxval, cval2, minval),
7759 arg1));
7760 tree equal_result
7761 = fold (build (code, type,
7762 eval_subst (arg0, cval1, maxval, cval2, maxval),
7763 arg1));
7764 tree low_result
7765 = fold (build (code, type,
7766 eval_subst (arg0, cval1, minval, cval2, maxval),
7767 arg1));
7769 /* All three of these results should be 0 or 1. Confirm they
7770 are. Then use those values to select the proper code
7771 to use. */
7773 if ((integer_zerop (high_result)
7774 || integer_onep (high_result))
7775 && (integer_zerop (equal_result)
7776 || integer_onep (equal_result))
7777 && (integer_zerop (low_result)
7778 || integer_onep (low_result)))
7780 /* Make a 3-bit mask with the high-order bit being the
7781 value for `>', the next for '=', and the low for '<'. */
7782 switch ((integer_onep (high_result) * 4)
7783 + (integer_onep (equal_result) * 2)
7784 + integer_onep (low_result))
7786 case 0:
7787 /* Always false. */
7788 return omit_one_operand (type, integer_zero_node, arg0);
7789 case 1:
7790 code = LT_EXPR;
7791 break;
7792 case 2:
7793 code = EQ_EXPR;
7794 break;
7795 case 3:
7796 code = LE_EXPR;
7797 break;
7798 case 4:
7799 code = GT_EXPR;
7800 break;
7801 case 5:
7802 code = NE_EXPR;
7803 break;
7804 case 6:
7805 code = GE_EXPR;
7806 break;
7807 case 7:
7808 /* Always true. */
7809 return omit_one_operand (type, integer_one_node, arg0);
7812 tem = build (code, type, cval1, cval2);
7813 if (save_p)
7814 return save_expr (tem);
7815 else
7816 return fold (tem);
7821 /* If this is a comparison of a field, we may be able to simplify it. */
7822 if (((TREE_CODE (arg0) == COMPONENT_REF
7823 && lang_hooks.can_use_bit_fields_p ())
7824 || TREE_CODE (arg0) == BIT_FIELD_REF)
7825 && (code == EQ_EXPR || code == NE_EXPR)
7826 /* Handle the constant case even without -O
7827 to make sure the warnings are given. */
7828 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7830 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7831 if (t1)
7832 return t1;
7835 /* If this is a comparison of complex values and either or both sides
7836 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7837 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7838 This may prevent needless evaluations. */
7839 if ((code == EQ_EXPR || code == NE_EXPR)
7840 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7841 && (TREE_CODE (arg0) == COMPLEX_EXPR
7842 || TREE_CODE (arg1) == COMPLEX_EXPR
7843 || TREE_CODE (arg0) == COMPLEX_CST
7844 || TREE_CODE (arg1) == COMPLEX_CST))
7846 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7847 tree real0, imag0, real1, imag1;
7849 arg0 = save_expr (arg0);
7850 arg1 = save_expr (arg1);
7851 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7852 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7853 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7854 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7856 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7857 : TRUTH_ORIF_EXPR),
7858 type,
7859 fold (build (code, type, real0, real1)),
7860 fold (build (code, type, imag0, imag1))));
7863 /* Optimize comparisons of strlen vs zero to a compare of the
7864 first character of the string vs zero. To wit,
7865 strlen(ptr) == 0 => *ptr == 0
7866 strlen(ptr) != 0 => *ptr != 0
7867 Other cases should reduce to one of these two (or a constant)
7868 due to the return value of strlen being unsigned. */
7869 if ((code == EQ_EXPR || code == NE_EXPR)
7870 && integer_zerop (arg1)
7871 && TREE_CODE (arg0) == CALL_EXPR)
7873 tree fndecl = get_callee_fndecl (arg0);
7874 tree arglist;
7876 if (fndecl
7877 && DECL_BUILT_IN (fndecl)
7878 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7879 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7880 && (arglist = TREE_OPERAND (arg0, 1))
7881 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7882 && ! TREE_CHAIN (arglist))
7883 return fold (build (code, type,
7884 build1 (INDIRECT_REF, char_type_node,
7885 TREE_VALUE(arglist)),
7886 integer_zero_node));
7889 /* Both ARG0 and ARG1 are known to be constants at this point. */
7890 t1 = fold_relational_const (code, type, arg0, arg1);
7891 return (t1 == NULL_TREE ? t : t1);
7893 case COND_EXPR:
7894 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7895 so all simple results must be passed through pedantic_non_lvalue. */
7896 if (TREE_CODE (arg0) == INTEGER_CST)
7898 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
7899 /* Only optimize constant conditions when the selected branch
7900 has the same type as the COND_EXPR. This avoids optimizing
7901 away "c ? x : throw", where the throw has a void type. */
7902 if (! VOID_TYPE_P (TREE_TYPE (tem))
7903 || VOID_TYPE_P (type))
7904 return pedantic_non_lvalue (tem);
7905 return t;
7907 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
7908 return pedantic_omit_one_operand (type, arg1, arg0);
7910 /* If we have A op B ? A : C, we may be able to convert this to a
7911 simpler expression, depending on the operation and the values
7912 of B and C. Signed zeros prevent all of these transformations,
7913 for reasons given above each one. */
7915 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7916 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7917 arg1, TREE_OPERAND (arg0, 1))
7918 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7920 tree arg2 = TREE_OPERAND (t, 2);
7921 enum tree_code comp_code = TREE_CODE (arg0);
7923 STRIP_NOPS (arg2);
7925 /* If we have A op 0 ? A : -A, consider applying the following
7926 transformations:
7928 A == 0? A : -A same as -A
7929 A != 0? A : -A same as A
7930 A >= 0? A : -A same as abs (A)
7931 A > 0? A : -A same as abs (A)
7932 A <= 0? A : -A same as -abs (A)
7933 A < 0? A : -A same as -abs (A)
7935 None of these transformations work for modes with signed
7936 zeros. If A is +/-0, the first two transformations will
7937 change the sign of the result (from +0 to -0, or vice
7938 versa). The last four will fix the sign of the result,
7939 even though the original expressions could be positive or
7940 negative, depending on the sign of A.
7942 Note that all these transformations are correct if A is
7943 NaN, since the two alternatives (A and -A) are also NaNs. */
7944 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7945 ? real_zerop (TREE_OPERAND (arg0, 1))
7946 : integer_zerop (TREE_OPERAND (arg0, 1)))
7947 && TREE_CODE (arg2) == NEGATE_EXPR
7948 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
7949 switch (comp_code)
7951 case EQ_EXPR:
7952 tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
7953 tem = fold_convert (type, negate_expr (tem));
7954 return pedantic_non_lvalue (tem);
7955 case NE_EXPR:
7956 return pedantic_non_lvalue (fold_convert (type, arg1));
7957 case GE_EXPR:
7958 case GT_EXPR:
7959 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
7960 arg1 = fold_convert (lang_hooks.types.signed_type
7961 (TREE_TYPE (arg1)), arg1);
7962 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
7963 return pedantic_non_lvalue (fold_convert (type, arg1));
7964 case LE_EXPR:
7965 case LT_EXPR:
7966 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
7967 arg1 = fold_convert (lang_hooks.types.signed_type
7968 (TREE_TYPE (arg1)), arg1);
7969 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
7970 arg1 = negate_expr (fold_convert (type, arg1));
7971 return pedantic_non_lvalue (arg1);
7972 default:
7973 abort ();
7976 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
7977 A == 0 ? A : 0 is always 0 unless A is -0. Note that
7978 both transformations are correct when A is NaN: A != 0
7979 is then true, and A == 0 is false. */
7981 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
7983 if (comp_code == NE_EXPR)
7984 return pedantic_non_lvalue (fold_convert (type, arg1));
7985 else if (comp_code == EQ_EXPR)
7986 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
7989 /* Try some transformations of A op B ? A : B.
7991 A == B? A : B same as B
7992 A != B? A : B same as A
7993 A >= B? A : B same as max (A, B)
7994 A > B? A : B same as max (B, A)
7995 A <= B? A : B same as min (A, B)
7996 A < B? A : B same as min (B, A)
7998 As above, these transformations don't work in the presence
7999 of signed zeros. For example, if A and B are zeros of
8000 opposite sign, the first two transformations will change
8001 the sign of the result. In the last four, the original
8002 expressions give different results for (A=+0, B=-0) and
8003 (A=-0, B=+0), but the transformed expressions do not.
8005 The first two transformations are correct if either A or B
8006 is a NaN. In the first transformation, the condition will
8007 be false, and B will indeed be chosen. In the case of the
8008 second transformation, the condition A != B will be true,
8009 and A will be chosen.
8011 The conversions to max() and min() are not correct if B is
8012 a number and A is not. The conditions in the original
8013 expressions will be false, so all four give B. The min()
8014 and max() versions would give a NaN instead. */
8015 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8016 arg2, TREE_OPERAND (arg0, 0)))
8018 tree comp_op0 = TREE_OPERAND (arg0, 0);
8019 tree comp_op1 = TREE_OPERAND (arg0, 1);
8020 tree comp_type = TREE_TYPE (comp_op0);
8022 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8023 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8025 comp_type = type;
8026 comp_op0 = arg1;
8027 comp_op1 = arg2;
8030 switch (comp_code)
8032 case EQ_EXPR:
8033 return pedantic_non_lvalue (fold_convert (type, arg2));
8034 case NE_EXPR:
8035 return pedantic_non_lvalue (fold_convert (type, arg1));
8036 case LE_EXPR:
8037 case LT_EXPR:
8038 /* In C++ a ?: expression can be an lvalue, so put the
8039 operand which will be used if they are equal first
8040 so that we can convert this back to the
8041 corresponding COND_EXPR. */
8042 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8043 return pedantic_non_lvalue (fold_convert
8044 (type, fold (build (MIN_EXPR, comp_type,
8045 (comp_code == LE_EXPR
8046 ? comp_op0 : comp_op1),
8047 (comp_code == LE_EXPR
8048 ? comp_op1 : comp_op0)))));
8049 break;
8050 case GE_EXPR:
8051 case GT_EXPR:
8052 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8053 return pedantic_non_lvalue (fold_convert
8054 (type, fold (build (MAX_EXPR, comp_type,
8055 (comp_code == GE_EXPR
8056 ? comp_op0 : comp_op1),
8057 (comp_code == GE_EXPR
8058 ? comp_op1 : comp_op0)))));
8059 break;
8060 default:
8061 abort ();
8065 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8066 we might still be able to simplify this. For example,
8067 if C1 is one less or one more than C2, this might have started
8068 out as a MIN or MAX and been transformed by this function.
8069 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8071 if (INTEGRAL_TYPE_P (type)
8072 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8073 && TREE_CODE (arg2) == INTEGER_CST)
8074 switch (comp_code)
8076 case EQ_EXPR:
8077 /* We can replace A with C1 in this case. */
8078 arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8079 return fold (build (code, type, TREE_OPERAND (t, 0), arg1,
8080 TREE_OPERAND (t, 2)));
8082 case LT_EXPR:
8083 /* If C1 is C2 + 1, this is min(A, C2). */
8084 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8085 && operand_equal_p (TREE_OPERAND (arg0, 1),
8086 const_binop (PLUS_EXPR, arg2,
8087 integer_one_node, 0), 1))
8088 return pedantic_non_lvalue
8089 (fold (build (MIN_EXPR, type, arg1, arg2)));
8090 break;
8092 case LE_EXPR:
8093 /* If C1 is C2 - 1, this is min(A, C2). */
8094 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8095 && operand_equal_p (TREE_OPERAND (arg0, 1),
8096 const_binop (MINUS_EXPR, arg2,
8097 integer_one_node, 0), 1))
8098 return pedantic_non_lvalue
8099 (fold (build (MIN_EXPR, type, arg1, arg2)));
8100 break;
8102 case GT_EXPR:
8103 /* If C1 is C2 - 1, this is max(A, C2). */
8104 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8105 && operand_equal_p (TREE_OPERAND (arg0, 1),
8106 const_binop (MINUS_EXPR, arg2,
8107 integer_one_node, 0), 1))
8108 return pedantic_non_lvalue
8109 (fold (build (MAX_EXPR, type, arg1, arg2)));
8110 break;
8112 case GE_EXPR:
8113 /* If C1 is C2 + 1, this is max(A, C2). */
8114 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8115 && operand_equal_p (TREE_OPERAND (arg0, 1),
8116 const_binop (PLUS_EXPR, arg2,
8117 integer_one_node, 0), 1))
8118 return pedantic_non_lvalue
8119 (fold (build (MAX_EXPR, type, arg1, arg2)));
8120 break;
8121 case NE_EXPR:
8122 break;
8123 default:
8124 abort ();
8128 /* If the second operand is simpler than the third, swap them
8129 since that produces better jump optimization results. */
8130 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8131 TREE_OPERAND (t, 2), false))
8133 /* See if this can be inverted. If it can't, possibly because
8134 it was a floating-point inequality comparison, don't do
8135 anything. */
8136 tem = invert_truthvalue (arg0);
8138 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8139 return fold (build (code, type, tem,
8140 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8143 /* Convert A ? 1 : 0 to simply A. */
8144 if (integer_onep (TREE_OPERAND (t, 1))
8145 && integer_zerop (TREE_OPERAND (t, 2))
8146 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8147 call to fold will try to move the conversion inside
8148 a COND, which will recurse. In that case, the COND_EXPR
8149 is probably the best choice, so leave it alone. */
8150 && type == TREE_TYPE (arg0))
8151 return pedantic_non_lvalue (arg0);
8153 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8154 over COND_EXPR in cases such as floating point comparisons. */
8155 if (integer_zerop (TREE_OPERAND (t, 1))
8156 && integer_onep (TREE_OPERAND (t, 2))
8157 && truth_value_p (TREE_CODE (arg0)))
8158 return pedantic_non_lvalue (fold_convert (type,
8159 invert_truthvalue (arg0)));
8161 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8162 operation is simply A & 2. */
8164 if (integer_zerop (TREE_OPERAND (t, 2))
8165 && TREE_CODE (arg0) == NE_EXPR
8166 && integer_zerop (TREE_OPERAND (arg0, 1))
8167 && integer_pow2p (arg1)
8168 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8169 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8170 arg1, 1))
8171 return pedantic_non_lvalue (fold_convert (type,
8172 TREE_OPERAND (arg0, 0)));
8174 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8175 if (integer_zerop (TREE_OPERAND (t, 2))
8176 && truth_value_p (TREE_CODE (arg0))
8177 && truth_value_p (TREE_CODE (arg1)))
8178 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8179 arg0, arg1)));
8181 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8182 if (integer_onep (TREE_OPERAND (t, 2))
8183 && truth_value_p (TREE_CODE (arg0))
8184 && truth_value_p (TREE_CODE (arg1)))
8186 /* Only perform transformation if ARG0 is easily inverted. */
8187 tem = invert_truthvalue (arg0);
8188 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8189 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8190 tem, arg1)));
8193 return t;
8195 case COMPOUND_EXPR:
8196 /* When pedantic, a compound expression can be neither an lvalue
8197 nor an integer constant expression. */
8198 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8199 return t;
8200 /* Don't let (0, 0) be null pointer constant. */
8201 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8202 : fold_convert (type, arg1);
8203 return pedantic_non_lvalue (tem);
8205 case COMPLEX_EXPR:
8206 if (wins)
8207 return build_complex (type, arg0, arg1);
8208 return t;
8210 case REALPART_EXPR:
8211 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8212 return t;
8213 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8214 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8215 TREE_OPERAND (arg0, 1));
8216 else if (TREE_CODE (arg0) == COMPLEX_CST)
8217 return TREE_REALPART (arg0);
8218 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8219 return fold (build (TREE_CODE (arg0), type,
8220 fold (build1 (REALPART_EXPR, type,
8221 TREE_OPERAND (arg0, 0))),
8222 fold (build1 (REALPART_EXPR,
8223 type, TREE_OPERAND (arg0, 1)))));
8224 return t;
8226 case IMAGPART_EXPR:
8227 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8228 return fold_convert (type, integer_zero_node);
8229 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8230 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8231 TREE_OPERAND (arg0, 0));
8232 else if (TREE_CODE (arg0) == COMPLEX_CST)
8233 return TREE_IMAGPART (arg0);
8234 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8235 return fold (build (TREE_CODE (arg0), type,
8236 fold (build1 (IMAGPART_EXPR, type,
8237 TREE_OPERAND (arg0, 0))),
8238 fold (build1 (IMAGPART_EXPR, type,
8239 TREE_OPERAND (arg0, 1)))));
8240 return t;
8242 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8243 appropriate. */
8244 case CLEANUP_POINT_EXPR:
8245 if (! has_cleanups (arg0))
8246 return TREE_OPERAND (t, 0);
8249 enum tree_code code0 = TREE_CODE (arg0);
8250 int kind0 = TREE_CODE_CLASS (code0);
8251 tree arg00 = TREE_OPERAND (arg0, 0);
8252 tree arg01;
8254 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8255 return fold (build1 (code0, type,
8256 fold (build1 (CLEANUP_POINT_EXPR,
8257 TREE_TYPE (arg00), arg00))));
8259 if (kind0 == '<' || kind0 == '2'
8260 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8261 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8262 || code0 == TRUTH_XOR_EXPR)
8264 arg01 = TREE_OPERAND (arg0, 1);
8266 if (TREE_CONSTANT (arg00)
8267 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8268 && ! has_cleanups (arg00)))
8269 return fold (build (code0, type, arg00,
8270 fold (build1 (CLEANUP_POINT_EXPR,
8271 TREE_TYPE (arg01), arg01))));
8273 if (TREE_CONSTANT (arg01))
8274 return fold (build (code0, type,
8275 fold (build1 (CLEANUP_POINT_EXPR,
8276 TREE_TYPE (arg00), arg00)),
8277 arg01));
8280 return t;
8283 case CALL_EXPR:
8284 /* Check for a built-in function. */
8285 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
8286 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
8287 == FUNCTION_DECL)
8288 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
8290 tree tmp = fold_builtin (t);
8291 if (tmp)
8292 return tmp;
8294 return t;
8296 default:
8297 return t;
8298 } /* switch (code) */
8301 #ifdef ENABLE_FOLD_CHECKING
8302 #undef fold
8304 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8305 static void fold_check_failed (tree, tree);
8306 void print_fold_checksum (tree);
8308 /* When --enable-checking=fold, compute a digest of expr before
8309 and after actual fold call to see if fold did not accidentally
8310 change original expr. */
8312 tree
8313 fold (tree expr)
8315 tree ret;
8316 struct md5_ctx ctx;
8317 unsigned char checksum_before[16], checksum_after[16];
8318 htab_t ht;
8320 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8321 md5_init_ctx (&ctx);
8322 fold_checksum_tree (expr, &ctx, ht);
8323 md5_finish_ctx (&ctx, checksum_before);
8324 htab_empty (ht);
8326 ret = fold_1 (expr);
8328 md5_init_ctx (&ctx);
8329 fold_checksum_tree (expr, &ctx, ht);
8330 md5_finish_ctx (&ctx, checksum_after);
8331 htab_delete (ht);
8333 if (memcmp (checksum_before, checksum_after, 16))
8334 fold_check_failed (expr, ret);
8336 return ret;
8339 void
8340 print_fold_checksum (tree expr)
8342 struct md5_ctx ctx;
8343 unsigned char checksum[16], cnt;
8344 htab_t ht;
8346 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8347 md5_init_ctx (&ctx);
8348 fold_checksum_tree (expr, &ctx, ht);
8349 md5_finish_ctx (&ctx, checksum);
8350 htab_delete (ht);
8351 for (cnt = 0; cnt < 16; ++cnt)
8352 fprintf (stderr, "%02x", checksum[cnt]);
8353 putc ('\n', stderr);
8356 static void
8357 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8359 internal_error ("fold check: original tree changed by fold");
8362 static void
8363 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8365 void **slot;
8366 enum tree_code code;
8367 char buf[sizeof (struct tree_decl)];
8368 int i, len;
8370 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8371 > sizeof (struct tree_decl)
8372 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8373 abort ();
8374 if (expr == NULL)
8375 return;
8376 slot = htab_find_slot (ht, expr, INSERT);
8377 if (*slot != NULL)
8378 return;
8379 *slot = expr;
8380 code = TREE_CODE (expr);
8381 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8383 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8384 memcpy (buf, expr, tree_size (expr));
8385 expr = (tree) buf;
8386 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8388 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8390 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8391 memcpy (buf, expr, tree_size (expr));
8392 expr = (tree) buf;
8393 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8395 else if (TREE_CODE_CLASS (code) == 't'
8396 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8398 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8399 memcpy (buf, expr, tree_size (expr));
8400 expr = (tree) buf;
8401 TYPE_POINTER_TO (expr) = NULL;
8402 TYPE_REFERENCE_TO (expr) = NULL;
8404 md5_process_bytes (expr, tree_size (expr), ctx);
8405 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8406 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8407 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8408 len = TREE_CODE_LENGTH (code);
8409 switch (TREE_CODE_CLASS (code))
8411 case 'c':
8412 switch (code)
8414 case STRING_CST:
8415 md5_process_bytes (TREE_STRING_POINTER (expr),
8416 TREE_STRING_LENGTH (expr), ctx);
8417 break;
8418 case COMPLEX_CST:
8419 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8420 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8421 break;
8422 case VECTOR_CST:
8423 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8424 break;
8425 default:
8426 break;
8428 break;
8429 case 'x':
8430 switch (code)
8432 case TREE_LIST:
8433 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8434 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8435 break;
8436 case TREE_VEC:
8437 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8438 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8439 break;
8440 default:
8441 break;
8443 break;
8444 case 'e':
8445 switch (code)
8447 case SAVE_EXPR: len = 2; break;
8448 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8449 case RTL_EXPR: len = 0; break;
8450 case WITH_CLEANUP_EXPR: len = 2; break;
8451 default: break;
8453 /* Fall through. */
8454 case 'r':
8455 case '<':
8456 case '1':
8457 case '2':
8458 case 's':
8459 for (i = 0; i < len; ++i)
8460 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8461 break;
8462 case 'd':
8463 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8464 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8465 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8466 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8467 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8468 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8469 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8470 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8471 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8472 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8473 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8474 break;
8475 case 't':
8476 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8477 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8478 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8479 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8480 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8481 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8482 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8483 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8484 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8485 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8486 break;
8487 default:
8488 break;
8492 #endif
8494 /* Perform constant folding and related simplification of initializer
8495 expression EXPR. This behaves identically to "fold" but ignores
8496 potential run-time traps and exceptions that fold must preserve. */
8498 tree
8499 fold_initializer (tree expr)
8501 int saved_signaling_nans = flag_signaling_nans;
8502 int saved_trapping_math = flag_trapping_math;
8503 int saved_trapv = flag_trapv;
8504 tree result;
8506 flag_signaling_nans = 0;
8507 flag_trapping_math = 0;
8508 flag_trapv = 0;
8510 result = fold (expr);
8512 flag_signaling_nans = saved_signaling_nans;
8513 flag_trapping_math = saved_trapping_math;
8514 flag_trapv = saved_trapv;
8516 return result;
8519 /* Determine if first argument is a multiple of second argument. Return 0 if
8520 it is not, or we cannot easily determined it to be.
8522 An example of the sort of thing we care about (at this point; this routine
8523 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8524 fold cases do now) is discovering that
8526 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8528 is a multiple of
8530 SAVE_EXPR (J * 8)
8532 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8534 This code also handles discovering that
8536 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8538 is a multiple of 8 so we don't have to worry about dealing with a
8539 possible remainder.
8541 Note that we *look* inside a SAVE_EXPR only to determine how it was
8542 calculated; it is not safe for fold to do much of anything else with the
8543 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8544 at run time. For example, the latter example above *cannot* be implemented
8545 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8546 evaluation time of the original SAVE_EXPR is not necessarily the same at
8547 the time the new expression is evaluated. The only optimization of this
8548 sort that would be valid is changing
8550 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8552 divided by 8 to
8554 SAVE_EXPR (I) * SAVE_EXPR (J)
8556 (where the same SAVE_EXPR (J) is used in the original and the
8557 transformed version). */
8559 static int
8560 multiple_of_p (tree type, tree top, tree bottom)
8562 if (operand_equal_p (top, bottom, 0))
8563 return 1;
8565 if (TREE_CODE (type) != INTEGER_TYPE)
8566 return 0;
8568 switch (TREE_CODE (top))
8570 case MULT_EXPR:
8571 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8572 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8574 case PLUS_EXPR:
8575 case MINUS_EXPR:
8576 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8577 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8579 case LSHIFT_EXPR:
8580 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8582 tree op1, t1;
8584 op1 = TREE_OPERAND (top, 1);
8585 /* const_binop may not detect overflow correctly,
8586 so check for it explicitly here. */
8587 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8588 > TREE_INT_CST_LOW (op1)
8589 && TREE_INT_CST_HIGH (op1) == 0
8590 && 0 != (t1 = fold_convert (type,
8591 const_binop (LSHIFT_EXPR,
8592 size_one_node,
8593 op1, 0)))
8594 && ! TREE_OVERFLOW (t1))
8595 return multiple_of_p (type, t1, bottom);
8597 return 0;
8599 case NOP_EXPR:
8600 /* Can't handle conversions from non-integral or wider integral type. */
8601 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8602 || (TYPE_PRECISION (type)
8603 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8604 return 0;
8606 /* .. fall through ... */
8608 case SAVE_EXPR:
8609 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8611 case INTEGER_CST:
8612 if (TREE_CODE (bottom) != INTEGER_CST
8613 || (TYPE_UNSIGNED (type)
8614 && (tree_int_cst_sgn (top) < 0
8615 || tree_int_cst_sgn (bottom) < 0)))
8616 return 0;
8617 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8618 top, bottom, 0));
8620 default:
8621 return 0;
8625 /* Return true if `t' is known to be non-negative. */
8628 tree_expr_nonnegative_p (tree t)
8630 switch (TREE_CODE (t))
8632 case ABS_EXPR:
8633 return 1;
8635 case INTEGER_CST:
8636 return tree_int_cst_sgn (t) >= 0;
8638 case REAL_CST:
8639 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8641 case PLUS_EXPR:
8642 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8643 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8644 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8646 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8647 both unsigned and at least 2 bits shorter than the result. */
8648 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8649 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8650 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8652 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8653 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8654 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
8655 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
8657 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8658 TYPE_PRECISION (inner2)) + 1;
8659 return prec < TYPE_PRECISION (TREE_TYPE (t));
8662 break;
8664 case MULT_EXPR:
8665 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8667 /* x * x for floating point x is always non-negative. */
8668 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8669 return 1;
8670 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8671 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8674 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8675 both unsigned and their total bits is shorter than the result. */
8676 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8677 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8678 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8680 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8681 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8682 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
8683 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
8684 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8685 < TYPE_PRECISION (TREE_TYPE (t));
8687 return 0;
8689 case TRUNC_DIV_EXPR:
8690 case CEIL_DIV_EXPR:
8691 case FLOOR_DIV_EXPR:
8692 case ROUND_DIV_EXPR:
8693 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8694 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8696 case TRUNC_MOD_EXPR:
8697 case CEIL_MOD_EXPR:
8698 case FLOOR_MOD_EXPR:
8699 case ROUND_MOD_EXPR:
8700 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8702 case RDIV_EXPR:
8703 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8704 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8706 case BIT_AND_EXPR:
8707 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8708 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8709 case BIT_IOR_EXPR:
8710 case BIT_XOR_EXPR:
8711 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8712 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8714 case NOP_EXPR:
8716 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8717 tree outer_type = TREE_TYPE (t);
8719 if (TREE_CODE (outer_type) == REAL_TYPE)
8721 if (TREE_CODE (inner_type) == REAL_TYPE)
8722 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8723 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8725 if (TYPE_UNSIGNED (inner_type))
8726 return 1;
8727 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8730 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8732 if (TREE_CODE (inner_type) == REAL_TYPE)
8733 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8734 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8735 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8736 && TYPE_UNSIGNED (inner_type);
8739 break;
8741 case COND_EXPR:
8742 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8743 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8744 case COMPOUND_EXPR:
8745 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8746 case MIN_EXPR:
8747 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8748 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8749 case MAX_EXPR:
8750 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8751 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8752 case MODIFY_EXPR:
8753 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8754 case BIND_EXPR:
8755 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8756 case SAVE_EXPR:
8757 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8758 case NON_LVALUE_EXPR:
8759 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8760 case FLOAT_EXPR:
8761 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8762 case RTL_EXPR:
8763 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8765 case CALL_EXPR:
8767 tree fndecl = get_callee_fndecl (t);
8768 tree arglist = TREE_OPERAND (t, 1);
8769 if (fndecl
8770 && DECL_BUILT_IN (fndecl)
8771 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8772 switch (DECL_FUNCTION_CODE (fndecl))
8774 #define CASE_BUILTIN_F(BUILT_IN_FN) \
8775 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
8776 #define CASE_BUILTIN_I(BUILT_IN_FN) \
8777 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
8779 CASE_BUILTIN_F (BUILT_IN_ACOS)
8780 CASE_BUILTIN_F (BUILT_IN_ACOSH)
8781 CASE_BUILTIN_F (BUILT_IN_CABS)
8782 CASE_BUILTIN_F (BUILT_IN_COSH)
8783 CASE_BUILTIN_F (BUILT_IN_ERFC)
8784 CASE_BUILTIN_F (BUILT_IN_EXP)
8785 CASE_BUILTIN_F (BUILT_IN_EXP10)
8786 CASE_BUILTIN_F (BUILT_IN_EXP2)
8787 CASE_BUILTIN_F (BUILT_IN_FABS)
8788 CASE_BUILTIN_F (BUILT_IN_FDIM)
8789 CASE_BUILTIN_F (BUILT_IN_FREXP)
8790 CASE_BUILTIN_F (BUILT_IN_HYPOT)
8791 CASE_BUILTIN_F (BUILT_IN_POW10)
8792 CASE_BUILTIN_F (BUILT_IN_SQRT)
8793 CASE_BUILTIN_I (BUILT_IN_FFS)
8794 CASE_BUILTIN_I (BUILT_IN_PARITY)
8795 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
8796 /* Always true. */
8797 return 1;
8799 CASE_BUILTIN_F (BUILT_IN_ASINH)
8800 CASE_BUILTIN_F (BUILT_IN_ATAN)
8801 CASE_BUILTIN_F (BUILT_IN_ATANH)
8802 CASE_BUILTIN_F (BUILT_IN_CBRT)
8803 CASE_BUILTIN_F (BUILT_IN_CEIL)
8804 CASE_BUILTIN_F (BUILT_IN_ERF)
8805 CASE_BUILTIN_F (BUILT_IN_EXPM1)
8806 CASE_BUILTIN_F (BUILT_IN_FLOOR)
8807 CASE_BUILTIN_F (BUILT_IN_FMOD)
8808 CASE_BUILTIN_F (BUILT_IN_LDEXP)
8809 CASE_BUILTIN_F (BUILT_IN_LLRINT)
8810 CASE_BUILTIN_F (BUILT_IN_LLROUND)
8811 CASE_BUILTIN_F (BUILT_IN_LRINT)
8812 CASE_BUILTIN_F (BUILT_IN_LROUND)
8813 CASE_BUILTIN_F (BUILT_IN_MODF)
8814 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
8815 CASE_BUILTIN_F (BUILT_IN_POW)
8816 CASE_BUILTIN_F (BUILT_IN_RINT)
8817 CASE_BUILTIN_F (BUILT_IN_ROUND)
8818 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
8819 CASE_BUILTIN_F (BUILT_IN_SINH)
8820 CASE_BUILTIN_F (BUILT_IN_TANH)
8821 CASE_BUILTIN_F (BUILT_IN_TRUNC)
8822 /* True if the 1st argument is nonnegative. */
8823 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8825 CASE_BUILTIN_F(BUILT_IN_FMAX)
8826 /* True if the 1st OR 2nd arguments are nonnegative. */
8827 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
8828 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8830 CASE_BUILTIN_F(BUILT_IN_FMIN)
8831 /* True if the 1st AND 2nd arguments are nonnegative. */
8832 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
8833 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8835 CASE_BUILTIN_F(BUILT_IN_COPYSIGN)
8836 /* True if the 2nd argument is nonnegative. */
8837 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8839 default:
8840 break;
8841 #undef CASE_BUILTIN_F
8842 #undef CASE_BUILTIN_I
8846 /* ... fall through ... */
8848 default:
8849 if (truth_value_p (TREE_CODE (t)))
8850 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8851 return 1;
8854 /* We don't know sign of `t', so be conservative and return false. */
8855 return 0;
8858 /* Return true when T is an address and is known to be nonzero.
8859 For floating point we further ensure that T is not denormal.
8860 Similar logic is present in nonzero_address in rtlanal.h */
8862 static bool
8863 tree_expr_nonzero_p (tree t)
8865 tree type = TREE_TYPE (t);
8867 /* Doing something usefull for floating point would need more work. */
8868 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8869 return false;
8871 switch (TREE_CODE (t))
8873 case ABS_EXPR:
8874 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
8875 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
8877 case INTEGER_CST:
8878 return !integer_zerop (t);
8880 case PLUS_EXPR:
8881 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
8883 /* With the presence of negative values it is hard
8884 to say something. */
8885 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8886 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
8887 return false;
8888 /* One of operands must be positive and the other non-negative. */
8889 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
8890 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
8892 break;
8894 case MULT_EXPR:
8895 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
8897 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
8898 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
8900 break;
8902 case NOP_EXPR:
8904 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8905 tree outer_type = TREE_TYPE (t);
8907 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
8908 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
8910 break;
8912 case ADDR_EXPR:
8913 /* Weak declarations may link to NULL. */
8914 if (DECL_P (TREE_OPERAND (t, 0)))
8915 return !DECL_WEAK (TREE_OPERAND (t, 0));
8916 /* Constants and all other cases are never weak. */
8917 return true;
8919 case COND_EXPR:
8920 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
8921 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
8923 case MIN_EXPR:
8924 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
8925 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
8927 case MAX_EXPR:
8928 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
8930 /* When both operands are nonzero, then MAX must be too. */
8931 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
8932 return true;
8934 /* MAX where operand 0 is positive is positive. */
8935 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8937 /* MAX where operand 1 is positive is positive. */
8938 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
8939 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
8940 return true;
8941 break;
8943 case COMPOUND_EXPR:
8944 case MODIFY_EXPR:
8945 case BIND_EXPR:
8946 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
8948 case SAVE_EXPR:
8949 case NON_LVALUE_EXPR:
8950 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
8952 case BIT_IOR_EXPR:
8953 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
8954 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
8956 default:
8957 break;
8959 return false;
8962 /* Return true if `r' is known to be non-negative.
8963 Only handles constants at the moment. */
8966 rtl_expr_nonnegative_p (rtx r)
8968 switch (GET_CODE (r))
8970 case CONST_INT:
8971 return INTVAL (r) >= 0;
8973 case CONST_DOUBLE:
8974 if (GET_MODE (r) == VOIDmode)
8975 return CONST_DOUBLE_HIGH (r) >= 0;
8976 return 0;
8978 case CONST_VECTOR:
8980 int units, i;
8981 rtx elt;
8983 units = CONST_VECTOR_NUNITS (r);
8985 for (i = 0; i < units; ++i)
8987 elt = CONST_VECTOR_ELT (r, i);
8988 if (!rtl_expr_nonnegative_p (elt))
8989 return 0;
8992 return 1;
8995 case SYMBOL_REF:
8996 case LABEL_REF:
8997 /* These are always nonnegative. */
8998 return 1;
9000 default:
9001 return 0;
9005 /* Return the tree for neg (ARG0) when ARG0 is known to be either
9006 an integer constant or real constant.
9008 TYPE is the type of the result. */
9010 static tree
9011 fold_negate_const (tree arg0, tree type)
9013 tree t = NULL_TREE;
9015 if (TREE_CODE (arg0) == INTEGER_CST)
9017 unsigned HOST_WIDE_INT low;
9018 HOST_WIDE_INT high;
9019 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9020 TREE_INT_CST_HIGH (arg0),
9021 &low, &high);
9022 t = build_int_2 (low, high);
9023 TREE_TYPE (t) = type;
9024 TREE_OVERFLOW (t)
9025 = (TREE_OVERFLOW (arg0)
9026 | force_fit_type (t, overflow && !TYPE_UNSIGNED (type)));
9027 TREE_CONSTANT_OVERFLOW (t)
9028 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9030 else if (TREE_CODE (arg0) == REAL_CST)
9031 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9032 #ifdef ENABLE_CHECKING
9033 else
9034 abort ();
9035 #endif
9037 return t;
9040 /* Return the tree for abs (ARG0) when ARG0 is known to be either
9041 an integer constant or real constant.
9043 TYPE is the type of the result. */
9045 static tree
9046 fold_abs_const (tree arg0, tree type)
9048 tree t = NULL_TREE;
9050 if (TREE_CODE (arg0) == INTEGER_CST)
9052 /* If the value is unsigned, then the absolute value is
9053 the same as the ordinary value. */
9054 if (TYPE_UNSIGNED (type))
9055 return arg0;
9056 /* Similarly, if the value is non-negative. */
9057 else if (INT_CST_LT (integer_minus_one_node, arg0))
9058 return arg0;
9059 /* If the value is negative, then the absolute value is
9060 its negation. */
9061 else
9063 unsigned HOST_WIDE_INT low;
9064 HOST_WIDE_INT high;
9065 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9066 TREE_INT_CST_HIGH (arg0),
9067 &low, &high);
9068 t = build_int_2 (low, high);
9069 TREE_TYPE (t) = type;
9070 TREE_OVERFLOW (t)
9071 = (TREE_OVERFLOW (arg0)
9072 | force_fit_type (t, overflow));
9073 TREE_CONSTANT_OVERFLOW (t)
9074 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9075 return t;
9078 else if (TREE_CODE (arg0) == REAL_CST)
9080 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
9081 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9082 else
9083 return arg0;
9085 #ifdef ENABLE_CHECKING
9086 else
9087 abort ();
9088 #endif
9090 return t;
9093 /* Given CODE, a relational operator, the target type, TYPE and two
9094 constant operands OP0 and OP1, return the result of the
9095 relational operation. If the result is not a compile time
9096 constant, then return NULL_TREE. */
9098 static tree
9099 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
9101 tree tem;
9102 int invert;
9104 /* From here on, the only cases we handle are when the result is
9105 known to be a constant.
9107 To compute GT, swap the arguments and do LT.
9108 To compute GE, do LT and invert the result.
9109 To compute LE, swap the arguments, do LT and invert the result.
9110 To compute NE, do EQ and invert the result.
9112 Therefore, the code below must handle only EQ and LT. */
9114 if (code == LE_EXPR || code == GT_EXPR)
9116 tem = op0, op0 = op1, op1 = tem;
9117 code = swap_tree_comparison (code);
9120 /* Note that it is safe to invert for real values here because we
9121 will check below in the one case that it matters. */
9123 tem = NULL_TREE;
9124 invert = 0;
9125 if (code == NE_EXPR || code == GE_EXPR)
9127 invert = 1;
9128 code = invert_tree_comparison (code);
9131 /* Compute a result for LT or EQ if args permit;
9132 Otherwise return T. */
9133 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9135 if (code == EQ_EXPR)
9136 tem = build_int_2 (tree_int_cst_equal (op0, op1), 0);
9137 else
9138 tem = build_int_2 ((TYPE_UNSIGNED (TREE_TYPE (op0))
9139 ? INT_CST_LT_UNSIGNED (op0, op1)
9140 : INT_CST_LT (op0, op1)),
9144 else if (code == EQ_EXPR && !TREE_SIDE_EFFECTS (op0)
9145 && integer_zerop (op1) && tree_expr_nonzero_p (op0))
9146 tem = build_int_2 (0, 0);
9148 /* Two real constants can be compared explicitly. */
9149 else if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
9151 /* If either operand is a NaN, the result is false with two
9152 exceptions: First, an NE_EXPR is true on NaNs, but that case
9153 is already handled correctly since we will be inverting the
9154 result for NE_EXPR. Second, if we had inverted a LE_EXPR
9155 or a GE_EXPR into a LT_EXPR, we must return true so that it
9156 will be inverted into false. */
9158 if (REAL_VALUE_ISNAN (TREE_REAL_CST (op0))
9159 || REAL_VALUE_ISNAN (TREE_REAL_CST (op1)))
9160 tem = build_int_2 (invert && code == LT_EXPR, 0);
9162 else if (code == EQ_EXPR)
9163 tem = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (op0),
9164 TREE_REAL_CST (op1)),
9166 else
9167 tem = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (op0),
9168 TREE_REAL_CST (op1)),
9172 if (tem == NULL_TREE)
9173 return NULL_TREE;
9175 if (invert)
9176 TREE_INT_CST_LOW (tem) ^= 1;
9178 TREE_TYPE (tem) = type;
9179 if (TREE_CODE (type) == BOOLEAN_TYPE)
9180 return (*lang_hooks.truthvalue_conversion) (tem);
9181 return tem;
9184 #include "gt-fold-const.h"