* tree-ssa-operands.c (get_call_expr_operands): Add VUSE operands for
[official-gcc.git] / gcc / fold-const.c
blob893aacca8e3cb58b7a2826c6629b3cea0e4f916d
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static hashval_t size_htab_hash (const void *);
93 static int size_htab_eq (const void *, const void *);
94 static tree fold_convert_const (enum tree_code, tree, tree);
95 static enum tree_code invert_tree_comparison (enum tree_code, bool);
96 static enum comparison_code comparison_to_compcode (enum tree_code);
97 static enum tree_code compcode_to_comparison (enum comparison_code);
98 static tree combine_comparisons (enum tree_code, enum tree_code,
99 enum tree_code, tree, tree, tree);
100 static int truth_value_p (enum tree_code);
101 static int operand_equal_for_comparison_p (tree, tree, tree);
102 static int twoval_comparison_p (tree, tree *, tree *, int *);
103 static tree eval_subst (tree, tree, tree, tree, tree);
104 static tree pedantic_omit_one_operand (tree, tree, tree);
105 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
106 static tree make_bit_field_ref (tree, tree, int, int, int);
107 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
108 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
109 enum machine_mode *, int *, int *,
110 tree *, tree *);
111 static int all_ones_mask_p (tree, int);
112 static tree sign_bit_p (tree, tree);
113 static int simple_operand_p (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree make_range (tree, int *, tree *, tree *);
116 static tree build_range_check (tree, tree, int, tree, tree);
117 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
118 tree);
119 static tree fold_range_test (tree);
120 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
121 static tree unextend (tree, int, int, tree);
122 static tree fold_truthop (enum tree_code, tree, tree, tree);
123 static tree optimize_minmax_comparison (tree);
124 static tree extract_muldiv (tree, tree, enum tree_code, tree);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
126 static int multiple_of_p (tree, tree, tree);
127 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
128 tree, int);
129 static bool fold_real_zero_addition_p (tree, tree, int);
130 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
131 tree, tree, tree);
132 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
133 static tree fold_div_compare (enum tree_code, tree, tree, tree);
134 static bool reorder_operands_p (tree, tree);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_relational_hi_lo (enum tree_code *, const tree,
139 tree *, tree *);
140 static bool tree_expr_nonzero_p (tree);
142 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
143 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
144 and SUM1. Then this yields nonzero if overflow occurred during the
145 addition.
147 Overflow occurs if A and B have the same sign, but A and SUM differ in
148 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
149 sign. */
150 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
152 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
153 We do that by representing the two-word integer in 4 words, with only
154 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
155 number. The value of the word is LOWPART + HIGHPART * BASE. */
157 #define LOWPART(x) \
158 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
159 #define HIGHPART(x) \
160 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
161 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
163 /* Unpack a two-word integer into 4 words.
164 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
165 WORDS points to the array of HOST_WIDE_INTs. */
167 static void
168 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
170 words[0] = LOWPART (low);
171 words[1] = HIGHPART (low);
172 words[2] = LOWPART (hi);
173 words[3] = HIGHPART (hi);
176 /* Pack an array of 4 words into a two-word integer.
177 WORDS points to the array of words.
178 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
180 static void
181 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
182 HOST_WIDE_INT *hi)
184 *low = words[0] + words[1] * BASE;
185 *hi = words[2] + words[3] * BASE;
188 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
189 in overflow of the value, when >0 we are only interested in signed
190 overflow, for <0 we are interested in any overflow. OVERFLOWED
191 indicates whether overflow has already occurred. CONST_OVERFLOWED
192 indicates whether constant overflow has already occurred. We force
193 T's value to be within range of T's type (by setting to 0 or 1 all
194 the bits outside the type's range). We set TREE_OVERFLOWED if,
195 OVERFLOWED is non-zero,
196 or OVERFLOWABLE is >0 and signed overflow occurs
197 or OVERFLOWABLE is <0 and any overflow occurs
198 We set TREE_CONSTANT_OVERFLOWED if,
199 CONST_OVERFLOWED is non-zero
200 or we set TREE_OVERFLOWED.
201 We return either the original T, or a copy. */
203 tree
204 force_fit_type (tree t, int overflowable,
205 bool overflowed, bool overflowed_const)
207 unsigned HOST_WIDE_INT low;
208 HOST_WIDE_INT high;
209 unsigned int prec;
210 int sign_extended_type;
212 if (TREE_CODE (t) != INTEGER_CST)
213 abort ();
215 low = TREE_INT_CST_LOW (t);
216 high = TREE_INT_CST_HIGH (t);
218 if (POINTER_TYPE_P (TREE_TYPE (t))
219 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
220 prec = POINTER_SIZE;
221 else
222 prec = TYPE_PRECISION (TREE_TYPE (t));
223 /* Size types *are* sign extended. */
224 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
225 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
226 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
228 /* First clear all bits that are beyond the type's precision. */
230 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
232 else if (prec > HOST_BITS_PER_WIDE_INT)
233 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
234 else
236 high = 0;
237 if (prec < HOST_BITS_PER_WIDE_INT)
238 low &= ~((HOST_WIDE_INT) (-1) << prec);
241 if (!sign_extended_type)
242 /* No sign extension */;
243 else if (prec == 2 * HOST_BITS_PER_WIDE_INT)
244 /* Correct width already. */;
245 else if (prec > HOST_BITS_PER_WIDE_INT)
247 /* Sign extend top half? */
248 if (high & ((unsigned HOST_WIDE_INT)1
249 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
250 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
252 else if (prec == HOST_BITS_PER_WIDE_INT)
254 if ((HOST_WIDE_INT)low < 0)
255 high = -1;
257 else
259 /* Sign extend bottom half? */
260 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
262 high = -1;
263 low |= (HOST_WIDE_INT)(-1) << prec;
267 /* If the value changed, return a new node. */
268 if (overflowed || overflowed_const
269 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
271 t = build_int_cst (TREE_TYPE (t), low, high);
273 if (overflowed
274 || overflowable < 0
275 || (overflowable > 0 && sign_extended_type))
277 t = copy_node (t);
278 TREE_OVERFLOW (t) = 1;
279 TREE_CONSTANT_OVERFLOW (t) = 1;
281 else if (overflowed_const)
283 t = copy_node (t);
284 TREE_CONSTANT_OVERFLOW (t) = 1;
288 return t;
291 /* Add two doubleword integers with doubleword result.
292 Each argument is given as two `HOST_WIDE_INT' pieces.
293 One argument is L1 and H1; the other, L2 and H2.
294 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
297 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
298 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
299 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
301 unsigned HOST_WIDE_INT l;
302 HOST_WIDE_INT h;
304 l = l1 + l2;
305 h = h1 + h2 + (l < l1);
307 *lv = l;
308 *hv = h;
309 return OVERFLOW_SUM_SIGN (h1, h2, h);
312 /* Negate a doubleword integer with doubleword result.
313 Return nonzero if the operation overflows, assuming it's signed.
314 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
315 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
318 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
319 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
321 if (l1 == 0)
323 *lv = 0;
324 *hv = - h1;
325 return (*hv & h1) < 0;
327 else
329 *lv = -l1;
330 *hv = ~h1;
331 return 0;
335 /* Multiply two doubleword integers with doubleword result.
336 Return nonzero if the operation overflows, assuming it's signed.
337 Each argument is given as two `HOST_WIDE_INT' pieces.
338 One argument is L1 and H1; the other, L2 and H2.
339 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
342 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
343 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
344 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
346 HOST_WIDE_INT arg1[4];
347 HOST_WIDE_INT arg2[4];
348 HOST_WIDE_INT prod[4 * 2];
349 unsigned HOST_WIDE_INT carry;
350 int i, j, k;
351 unsigned HOST_WIDE_INT toplow, neglow;
352 HOST_WIDE_INT tophigh, neghigh;
354 encode (arg1, l1, h1);
355 encode (arg2, l2, h2);
357 memset (prod, 0, sizeof prod);
359 for (i = 0; i < 4; i++)
361 carry = 0;
362 for (j = 0; j < 4; j++)
364 k = i + j;
365 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
366 carry += arg1[i] * arg2[j];
367 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
368 carry += prod[k];
369 prod[k] = LOWPART (carry);
370 carry = HIGHPART (carry);
372 prod[i + 4] = carry;
375 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
377 /* Check for overflow by calculating the top half of the answer in full;
378 it should agree with the low half's sign bit. */
379 decode (prod + 4, &toplow, &tophigh);
380 if (h1 < 0)
382 neg_double (l2, h2, &neglow, &neghigh);
383 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
385 if (h2 < 0)
387 neg_double (l1, h1, &neglow, &neghigh);
388 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
390 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
393 /* Shift the doubleword integer in L1, H1 left by COUNT places
394 keeping only PREC bits of result.
395 Shift right if COUNT is negative.
396 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
397 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
399 void
400 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
401 HOST_WIDE_INT count, unsigned int prec,
402 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
404 unsigned HOST_WIDE_INT signmask;
406 if (count < 0)
408 rshift_double (l1, h1, -count, prec, lv, hv, arith);
409 return;
412 if (SHIFT_COUNT_TRUNCATED)
413 count %= prec;
415 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
417 /* Shifting by the host word size is undefined according to the
418 ANSI standard, so we must handle this as a special case. */
419 *hv = 0;
420 *lv = 0;
422 else if (count >= HOST_BITS_PER_WIDE_INT)
424 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
425 *lv = 0;
427 else
429 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
430 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
431 *lv = l1 << count;
434 /* Sign extend all bits that are beyond the precision. */
436 signmask = -((prec > HOST_BITS_PER_WIDE_INT
437 ? ((unsigned HOST_WIDE_INT) *hv
438 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
439 : (*lv >> (prec - 1))) & 1);
441 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
443 else if (prec >= HOST_BITS_PER_WIDE_INT)
445 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
446 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
448 else
450 *hv = signmask;
451 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
452 *lv |= signmask << prec;
456 /* Shift the doubleword integer in L1, H1 right by COUNT places
457 keeping only PREC bits of result. COUNT must be positive.
458 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
459 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
461 void
462 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
463 HOST_WIDE_INT count, unsigned int prec,
464 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
465 int arith)
467 unsigned HOST_WIDE_INT signmask;
469 signmask = (arith
470 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
471 : 0);
473 if (SHIFT_COUNT_TRUNCATED)
474 count %= prec;
476 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
478 /* Shifting by the host word size is undefined according to the
479 ANSI standard, so we must handle this as a special case. */
480 *hv = 0;
481 *lv = 0;
483 else if (count >= HOST_BITS_PER_WIDE_INT)
485 *hv = 0;
486 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
488 else
490 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
491 *lv = ((l1 >> count)
492 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
495 /* Zero / sign extend all bits that are beyond the precision. */
497 if (count >= (HOST_WIDE_INT)prec)
499 *hv = signmask;
500 *lv = signmask;
502 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
504 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
506 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
507 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
509 else
511 *hv = signmask;
512 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
513 *lv |= signmask << (prec - count);
517 /* Rotate the doubleword integer in L1, H1 left by COUNT places
518 keeping only PREC bits of result.
519 Rotate right if COUNT is negative.
520 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
522 void
523 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
524 HOST_WIDE_INT count, unsigned int prec,
525 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
527 unsigned HOST_WIDE_INT s1l, s2l;
528 HOST_WIDE_INT s1h, s2h;
530 count %= prec;
531 if (count < 0)
532 count += prec;
534 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
535 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
536 *lv = s1l | s2l;
537 *hv = s1h | s2h;
540 /* Rotate the doubleword integer in L1, H1 left by COUNT places
541 keeping only PREC bits of result. COUNT must be positive.
542 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
544 void
545 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
546 HOST_WIDE_INT count, unsigned int prec,
547 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
549 unsigned HOST_WIDE_INT s1l, s2l;
550 HOST_WIDE_INT s1h, s2h;
552 count %= prec;
553 if (count < 0)
554 count += prec;
556 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
557 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
558 *lv = s1l | s2l;
559 *hv = s1h | s2h;
562 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
563 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
564 CODE is a tree code for a kind of division, one of
565 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
566 or EXACT_DIV_EXPR
567 It controls how the quotient is rounded to an integer.
568 Return nonzero if the operation overflows.
569 UNS nonzero says do unsigned division. */
572 div_and_round_double (enum tree_code code, int uns,
573 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
574 HOST_WIDE_INT hnum_orig,
575 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
576 HOST_WIDE_INT hden_orig,
577 unsigned HOST_WIDE_INT *lquo,
578 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
579 HOST_WIDE_INT *hrem)
581 int quo_neg = 0;
582 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
583 HOST_WIDE_INT den[4], quo[4];
584 int i, j;
585 unsigned HOST_WIDE_INT work;
586 unsigned HOST_WIDE_INT carry = 0;
587 unsigned HOST_WIDE_INT lnum = lnum_orig;
588 HOST_WIDE_INT hnum = hnum_orig;
589 unsigned HOST_WIDE_INT lden = lden_orig;
590 HOST_WIDE_INT hden = hden_orig;
591 int overflow = 0;
593 if (hden == 0 && lden == 0)
594 overflow = 1, lden = 1;
596 /* Calculate quotient sign and convert operands to unsigned. */
597 if (!uns)
599 if (hnum < 0)
601 quo_neg = ~ quo_neg;
602 /* (minimum integer) / (-1) is the only overflow case. */
603 if (neg_double (lnum, hnum, &lnum, &hnum)
604 && ((HOST_WIDE_INT) lden & hden) == -1)
605 overflow = 1;
607 if (hden < 0)
609 quo_neg = ~ quo_neg;
610 neg_double (lden, hden, &lden, &hden);
614 if (hnum == 0 && hden == 0)
615 { /* single precision */
616 *hquo = *hrem = 0;
617 /* This unsigned division rounds toward zero. */
618 *lquo = lnum / lden;
619 goto finish_up;
622 if (hnum == 0)
623 { /* trivial case: dividend < divisor */
624 /* hden != 0 already checked. */
625 *hquo = *lquo = 0;
626 *hrem = hnum;
627 *lrem = lnum;
628 goto finish_up;
631 memset (quo, 0, sizeof quo);
633 memset (num, 0, sizeof num); /* to zero 9th element */
634 memset (den, 0, sizeof den);
636 encode (num, lnum, hnum);
637 encode (den, lden, hden);
639 /* Special code for when the divisor < BASE. */
640 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
642 /* hnum != 0 already checked. */
643 for (i = 4 - 1; i >= 0; i--)
645 work = num[i] + carry * BASE;
646 quo[i] = work / lden;
647 carry = work % lden;
650 else
652 /* Full double precision division,
653 with thanks to Don Knuth's "Seminumerical Algorithms". */
654 int num_hi_sig, den_hi_sig;
655 unsigned HOST_WIDE_INT quo_est, scale;
657 /* Find the highest nonzero divisor digit. */
658 for (i = 4 - 1;; i--)
659 if (den[i] != 0)
661 den_hi_sig = i;
662 break;
665 /* Insure that the first digit of the divisor is at least BASE/2.
666 This is required by the quotient digit estimation algorithm. */
668 scale = BASE / (den[den_hi_sig] + 1);
669 if (scale > 1)
670 { /* scale divisor and dividend */
671 carry = 0;
672 for (i = 0; i <= 4 - 1; i++)
674 work = (num[i] * scale) + carry;
675 num[i] = LOWPART (work);
676 carry = HIGHPART (work);
679 num[4] = carry;
680 carry = 0;
681 for (i = 0; i <= 4 - 1; i++)
683 work = (den[i] * scale) + carry;
684 den[i] = LOWPART (work);
685 carry = HIGHPART (work);
686 if (den[i] != 0) den_hi_sig = i;
690 num_hi_sig = 4;
692 /* Main loop */
693 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
695 /* Guess the next quotient digit, quo_est, by dividing the first
696 two remaining dividend digits by the high order quotient digit.
697 quo_est is never low and is at most 2 high. */
698 unsigned HOST_WIDE_INT tmp;
700 num_hi_sig = i + den_hi_sig + 1;
701 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
702 if (num[num_hi_sig] != den[den_hi_sig])
703 quo_est = work / den[den_hi_sig];
704 else
705 quo_est = BASE - 1;
707 /* Refine quo_est so it's usually correct, and at most one high. */
708 tmp = work - quo_est * den[den_hi_sig];
709 if (tmp < BASE
710 && (den[den_hi_sig - 1] * quo_est
711 > (tmp * BASE + num[num_hi_sig - 2])))
712 quo_est--;
714 /* Try QUO_EST as the quotient digit, by multiplying the
715 divisor by QUO_EST and subtracting from the remaining dividend.
716 Keep in mind that QUO_EST is the I - 1st digit. */
718 carry = 0;
719 for (j = 0; j <= den_hi_sig; j++)
721 work = quo_est * den[j] + carry;
722 carry = HIGHPART (work);
723 work = num[i + j] - LOWPART (work);
724 num[i + j] = LOWPART (work);
725 carry += HIGHPART (work) != 0;
728 /* If quo_est was high by one, then num[i] went negative and
729 we need to correct things. */
730 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
732 quo_est--;
733 carry = 0; /* add divisor back in */
734 for (j = 0; j <= den_hi_sig; j++)
736 work = num[i + j] + den[j] + carry;
737 carry = HIGHPART (work);
738 num[i + j] = LOWPART (work);
741 num [num_hi_sig] += carry;
744 /* Store the quotient digit. */
745 quo[i] = quo_est;
749 decode (quo, lquo, hquo);
751 finish_up:
752 /* If result is negative, make it so. */
753 if (quo_neg)
754 neg_double (*lquo, *hquo, lquo, hquo);
756 /* Compute trial remainder: rem = num - (quo * den) */
757 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
758 neg_double (*lrem, *hrem, lrem, hrem);
759 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
761 switch (code)
763 case TRUNC_DIV_EXPR:
764 case TRUNC_MOD_EXPR: /* round toward zero */
765 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
766 return overflow;
768 case FLOOR_DIV_EXPR:
769 case FLOOR_MOD_EXPR: /* round toward negative infinity */
770 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
772 /* quo = quo - 1; */
773 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
774 lquo, hquo);
776 else
777 return overflow;
778 break;
780 case CEIL_DIV_EXPR:
781 case CEIL_MOD_EXPR: /* round toward positive infinity */
782 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
784 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
785 lquo, hquo);
787 else
788 return overflow;
789 break;
791 case ROUND_DIV_EXPR:
792 case ROUND_MOD_EXPR: /* round to closest integer */
794 unsigned HOST_WIDE_INT labs_rem = *lrem;
795 HOST_WIDE_INT habs_rem = *hrem;
796 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
797 HOST_WIDE_INT habs_den = hden, htwice;
799 /* Get absolute values. */
800 if (*hrem < 0)
801 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
802 if (hden < 0)
803 neg_double (lden, hden, &labs_den, &habs_den);
805 /* If (2 * abs (lrem) >= abs (lden)) */
806 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
807 labs_rem, habs_rem, &ltwice, &htwice);
809 if (((unsigned HOST_WIDE_INT) habs_den
810 < (unsigned HOST_WIDE_INT) htwice)
811 || (((unsigned HOST_WIDE_INT) habs_den
812 == (unsigned HOST_WIDE_INT) htwice)
813 && (labs_den < ltwice)))
815 if (*hquo < 0)
816 /* quo = quo - 1; */
817 add_double (*lquo, *hquo,
818 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
819 else
820 /* quo = quo + 1; */
821 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
822 lquo, hquo);
824 else
825 return overflow;
827 break;
829 default:
830 abort ();
833 /* Compute true remainder: rem = num - (quo * den) */
834 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
835 neg_double (*lrem, *hrem, lrem, hrem);
836 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
837 return overflow;
840 /* Return true if built-in mathematical function specified by CODE
841 preserves the sign of it argument, i.e. -f(x) == f(-x). */
843 static bool
844 negate_mathfn_p (enum built_in_function code)
846 switch (code)
848 case BUILT_IN_ASIN:
849 case BUILT_IN_ASINF:
850 case BUILT_IN_ASINL:
851 case BUILT_IN_ATAN:
852 case BUILT_IN_ATANF:
853 case BUILT_IN_ATANL:
854 case BUILT_IN_SIN:
855 case BUILT_IN_SINF:
856 case BUILT_IN_SINL:
857 case BUILT_IN_TAN:
858 case BUILT_IN_TANF:
859 case BUILT_IN_TANL:
860 return true;
862 default:
863 break;
865 return false;
868 /* Determine whether an expression T can be cheaply negated using
869 the function negate_expr. */
871 static bool
872 negate_expr_p (tree t)
874 unsigned HOST_WIDE_INT val;
875 unsigned int prec;
876 tree type;
878 if (t == 0)
879 return false;
881 type = TREE_TYPE (t);
883 STRIP_SIGN_NOPS (t);
884 switch (TREE_CODE (t))
886 case INTEGER_CST:
887 if (TYPE_UNSIGNED (type) || ! flag_trapv)
888 return true;
890 /* Check that -CST will not overflow type. */
891 prec = TYPE_PRECISION (type);
892 if (prec > HOST_BITS_PER_WIDE_INT)
894 if (TREE_INT_CST_LOW (t) != 0)
895 return true;
896 prec -= HOST_BITS_PER_WIDE_INT;
897 val = TREE_INT_CST_HIGH (t);
899 else
900 val = TREE_INT_CST_LOW (t);
901 if (prec < HOST_BITS_PER_WIDE_INT)
902 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
903 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
905 case REAL_CST:
906 case NEGATE_EXPR:
907 return true;
909 case COMPLEX_CST:
910 return negate_expr_p (TREE_REALPART (t))
911 && negate_expr_p (TREE_IMAGPART (t));
913 case PLUS_EXPR:
914 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
915 return false;
916 /* -(A + B) -> (-B) - A. */
917 if (negate_expr_p (TREE_OPERAND (t, 1))
918 && reorder_operands_p (TREE_OPERAND (t, 0),
919 TREE_OPERAND (t, 1)))
920 return true;
921 /* -(A + B) -> (-A) - B. */
922 return negate_expr_p (TREE_OPERAND (t, 0));
924 case MINUS_EXPR:
925 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
926 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
927 && reorder_operands_p (TREE_OPERAND (t, 0),
928 TREE_OPERAND (t, 1));
930 case MULT_EXPR:
931 if (TYPE_UNSIGNED (TREE_TYPE (t)))
932 break;
934 /* Fall through. */
936 case RDIV_EXPR:
937 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
938 return negate_expr_p (TREE_OPERAND (t, 1))
939 || negate_expr_p (TREE_OPERAND (t, 0));
940 break;
942 case NOP_EXPR:
943 /* Negate -((double)float) as (double)(-float). */
944 if (TREE_CODE (type) == REAL_TYPE)
946 tree tem = strip_float_extensions (t);
947 if (tem != t)
948 return negate_expr_p (tem);
950 break;
952 case CALL_EXPR:
953 /* Negate -f(x) as f(-x). */
954 if (negate_mathfn_p (builtin_mathfn_code (t)))
955 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
956 break;
958 case RSHIFT_EXPR:
959 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
960 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
962 tree op1 = TREE_OPERAND (t, 1);
963 if (TREE_INT_CST_HIGH (op1) == 0
964 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
965 == TREE_INT_CST_LOW (op1))
966 return true;
968 break;
970 default:
971 break;
973 return false;
976 /* Given T, an expression, return the negation of T. Allow for T to be
977 null, in which case return null. */
979 static tree
980 negate_expr (tree t)
982 tree type;
983 tree tem;
985 if (t == 0)
986 return 0;
988 type = TREE_TYPE (t);
989 STRIP_SIGN_NOPS (t);
991 switch (TREE_CODE (t))
993 case INTEGER_CST:
994 tem = fold_negate_const (t, type);
995 if (! TREE_OVERFLOW (tem)
996 || TYPE_UNSIGNED (type)
997 || ! flag_trapv)
998 return tem;
999 break;
1001 case REAL_CST:
1002 tem = fold_negate_const (t, type);
1003 /* Two's complement FP formats, such as c4x, may overflow. */
1004 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1005 return fold_convert (type, tem);
1006 break;
1008 case COMPLEX_CST:
1010 tree rpart = negate_expr (TREE_REALPART (t));
1011 tree ipart = negate_expr (TREE_IMAGPART (t));
1013 if ((TREE_CODE (rpart) == REAL_CST
1014 && TREE_CODE (ipart) == REAL_CST)
1015 || (TREE_CODE (rpart) == INTEGER_CST
1016 && TREE_CODE (ipart) == INTEGER_CST))
1017 return build_complex (type, rpart, ipart);
1019 break;
1021 case NEGATE_EXPR:
1022 return fold_convert (type, TREE_OPERAND (t, 0));
1024 case PLUS_EXPR:
1025 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1027 /* -(A + B) -> (-B) - A. */
1028 if (negate_expr_p (TREE_OPERAND (t, 1))
1029 && reorder_operands_p (TREE_OPERAND (t, 0),
1030 TREE_OPERAND (t, 1)))
1032 tem = negate_expr (TREE_OPERAND (t, 1));
1033 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1034 tem, TREE_OPERAND (t, 0)));
1035 return fold_convert (type, tem);
1038 /* -(A + B) -> (-A) - B. */
1039 if (negate_expr_p (TREE_OPERAND (t, 0)))
1041 tem = negate_expr (TREE_OPERAND (t, 0));
1042 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1043 tem, TREE_OPERAND (t, 1)));
1044 return fold_convert (type, tem);
1047 break;
1049 case MINUS_EXPR:
1050 /* - (A - B) -> B - A */
1051 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1052 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1053 return fold_convert (type,
1054 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1055 TREE_OPERAND (t, 1),
1056 TREE_OPERAND (t, 0))));
1057 break;
1059 case MULT_EXPR:
1060 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1061 break;
1063 /* Fall through. */
1065 case RDIV_EXPR:
1066 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1068 tem = TREE_OPERAND (t, 1);
1069 if (negate_expr_p (tem))
1070 return fold_convert (type,
1071 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1072 TREE_OPERAND (t, 0),
1073 negate_expr (tem))));
1074 tem = TREE_OPERAND (t, 0);
1075 if (negate_expr_p (tem))
1076 return fold_convert (type,
1077 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1078 negate_expr (tem),
1079 TREE_OPERAND (t, 1))));
1081 break;
1083 case NOP_EXPR:
1084 /* Convert -((double)float) into (double)(-float). */
1085 if (TREE_CODE (type) == REAL_TYPE)
1087 tem = strip_float_extensions (t);
1088 if (tem != t && negate_expr_p (tem))
1089 return fold_convert (type, negate_expr (tem));
1091 break;
1093 case CALL_EXPR:
1094 /* Negate -f(x) as f(-x). */
1095 if (negate_mathfn_p (builtin_mathfn_code (t))
1096 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1098 tree fndecl, arg, arglist;
1100 fndecl = get_callee_fndecl (t);
1101 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1102 arglist = build_tree_list (NULL_TREE, arg);
1103 return build_function_call_expr (fndecl, arglist);
1105 break;
1107 case RSHIFT_EXPR:
1108 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1109 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1111 tree op1 = TREE_OPERAND (t, 1);
1112 if (TREE_INT_CST_HIGH (op1) == 0
1113 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1114 == TREE_INT_CST_LOW (op1))
1116 tree ntype = TYPE_UNSIGNED (type)
1117 ? lang_hooks.types.signed_type (type)
1118 : lang_hooks.types.unsigned_type (type);
1119 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1120 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1121 return fold_convert (type, temp);
1124 break;
1126 default:
1127 break;
1130 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1131 return fold_convert (type, tem);
1134 /* Split a tree IN into a constant, literal and variable parts that could be
1135 combined with CODE to make IN. "constant" means an expression with
1136 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1137 commutative arithmetic operation. Store the constant part into *CONP,
1138 the literal in *LITP and return the variable part. If a part isn't
1139 present, set it to null. If the tree does not decompose in this way,
1140 return the entire tree as the variable part and the other parts as null.
1142 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1143 case, we negate an operand that was subtracted. Except if it is a
1144 literal for which we use *MINUS_LITP instead.
1146 If NEGATE_P is true, we are negating all of IN, again except a literal
1147 for which we use *MINUS_LITP instead.
1149 If IN is itself a literal or constant, return it as appropriate.
1151 Note that we do not guarantee that any of the three values will be the
1152 same type as IN, but they will have the same signedness and mode. */
1154 static tree
1155 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1156 tree *minus_litp, int negate_p)
1158 tree var = 0;
1160 *conp = 0;
1161 *litp = 0;
1162 *minus_litp = 0;
1164 /* Strip any conversions that don't change the machine mode or signedness. */
1165 STRIP_SIGN_NOPS (in);
1167 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1168 *litp = in;
1169 else if (TREE_CODE (in) == code
1170 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1171 /* We can associate addition and subtraction together (even
1172 though the C standard doesn't say so) for integers because
1173 the value is not affected. For reals, the value might be
1174 affected, so we can't. */
1175 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1176 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1178 tree op0 = TREE_OPERAND (in, 0);
1179 tree op1 = TREE_OPERAND (in, 1);
1180 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1181 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1183 /* First see if either of the operands is a literal, then a constant. */
1184 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1185 *litp = op0, op0 = 0;
1186 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1187 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1189 if (op0 != 0 && TREE_CONSTANT (op0))
1190 *conp = op0, op0 = 0;
1191 else if (op1 != 0 && TREE_CONSTANT (op1))
1192 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1194 /* If we haven't dealt with either operand, this is not a case we can
1195 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1196 if (op0 != 0 && op1 != 0)
1197 var = in;
1198 else if (op0 != 0)
1199 var = op0;
1200 else
1201 var = op1, neg_var_p = neg1_p;
1203 /* Now do any needed negations. */
1204 if (neg_litp_p)
1205 *minus_litp = *litp, *litp = 0;
1206 if (neg_conp_p)
1207 *conp = negate_expr (*conp);
1208 if (neg_var_p)
1209 var = negate_expr (var);
1211 else if (TREE_CONSTANT (in))
1212 *conp = in;
1213 else
1214 var = in;
1216 if (negate_p)
1218 if (*litp)
1219 *minus_litp = *litp, *litp = 0;
1220 else if (*minus_litp)
1221 *litp = *minus_litp, *minus_litp = 0;
1222 *conp = negate_expr (*conp);
1223 var = negate_expr (var);
1226 return var;
1229 /* Re-associate trees split by the above function. T1 and T2 are either
1230 expressions to associate or null. Return the new expression, if any. If
1231 we build an operation, do it in TYPE and with CODE. */
1233 static tree
1234 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1236 if (t1 == 0)
1237 return t2;
1238 else if (t2 == 0)
1239 return t1;
1241 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1242 try to fold this since we will have infinite recursion. But do
1243 deal with any NEGATE_EXPRs. */
1244 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1245 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1247 if (code == PLUS_EXPR)
1249 if (TREE_CODE (t1) == NEGATE_EXPR)
1250 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1251 fold_convert (type, TREE_OPERAND (t1, 0)));
1252 else if (TREE_CODE (t2) == NEGATE_EXPR)
1253 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1254 fold_convert (type, TREE_OPERAND (t2, 0)));
1256 return build2 (code, type, fold_convert (type, t1),
1257 fold_convert (type, t2));
1260 return fold (build2 (code, type, fold_convert (type, t1),
1261 fold_convert (type, t2)));
1264 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1265 to produce a new constant.
1267 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1269 tree
1270 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1272 unsigned HOST_WIDE_INT int1l, int2l;
1273 HOST_WIDE_INT int1h, int2h;
1274 unsigned HOST_WIDE_INT low;
1275 HOST_WIDE_INT hi;
1276 unsigned HOST_WIDE_INT garbagel;
1277 HOST_WIDE_INT garbageh;
1278 tree t;
1279 tree type = TREE_TYPE (arg1);
1280 int uns = TYPE_UNSIGNED (type);
1281 int is_sizetype
1282 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1283 int overflow = 0;
1284 int no_overflow = 0;
1286 int1l = TREE_INT_CST_LOW (arg1);
1287 int1h = TREE_INT_CST_HIGH (arg1);
1288 int2l = TREE_INT_CST_LOW (arg2);
1289 int2h = TREE_INT_CST_HIGH (arg2);
1291 switch (code)
1293 case BIT_IOR_EXPR:
1294 low = int1l | int2l, hi = int1h | int2h;
1295 break;
1297 case BIT_XOR_EXPR:
1298 low = int1l ^ int2l, hi = int1h ^ int2h;
1299 break;
1301 case BIT_AND_EXPR:
1302 low = int1l & int2l, hi = int1h & int2h;
1303 break;
1305 case RSHIFT_EXPR:
1306 int2l = -int2l;
1307 case LSHIFT_EXPR:
1308 /* It's unclear from the C standard whether shifts can overflow.
1309 The following code ignores overflow; perhaps a C standard
1310 interpretation ruling is needed. */
1311 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1312 &low, &hi, !uns);
1313 no_overflow = 1;
1314 break;
1316 case RROTATE_EXPR:
1317 int2l = - int2l;
1318 case LROTATE_EXPR:
1319 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1320 &low, &hi);
1321 break;
1323 case PLUS_EXPR:
1324 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1325 break;
1327 case MINUS_EXPR:
1328 neg_double (int2l, int2h, &low, &hi);
1329 add_double (int1l, int1h, low, hi, &low, &hi);
1330 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1331 break;
1333 case MULT_EXPR:
1334 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1335 break;
1337 case TRUNC_DIV_EXPR:
1338 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1339 case EXACT_DIV_EXPR:
1340 /* This is a shortcut for a common special case. */
1341 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1342 && ! TREE_CONSTANT_OVERFLOW (arg1)
1343 && ! TREE_CONSTANT_OVERFLOW (arg2)
1344 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1346 if (code == CEIL_DIV_EXPR)
1347 int1l += int2l - 1;
1349 low = int1l / int2l, hi = 0;
1350 break;
1353 /* ... fall through ... */
1355 case ROUND_DIV_EXPR:
1356 if (int2h == 0 && int2l == 1)
1358 low = int1l, hi = int1h;
1359 break;
1361 if (int1l == int2l && int1h == int2h
1362 && ! (int1l == 0 && int1h == 0))
1364 low = 1, hi = 0;
1365 break;
1367 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1368 &low, &hi, &garbagel, &garbageh);
1369 break;
1371 case TRUNC_MOD_EXPR:
1372 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1373 /* This is a shortcut for a common special case. */
1374 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1375 && ! TREE_CONSTANT_OVERFLOW (arg1)
1376 && ! TREE_CONSTANT_OVERFLOW (arg2)
1377 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1379 if (code == CEIL_MOD_EXPR)
1380 int1l += int2l - 1;
1381 low = int1l % int2l, hi = 0;
1382 break;
1385 /* ... fall through ... */
1387 case ROUND_MOD_EXPR:
1388 overflow = div_and_round_double (code, uns,
1389 int1l, int1h, int2l, int2h,
1390 &garbagel, &garbageh, &low, &hi);
1391 break;
1393 case MIN_EXPR:
1394 case MAX_EXPR:
1395 if (uns)
1396 low = (((unsigned HOST_WIDE_INT) int1h
1397 < (unsigned HOST_WIDE_INT) int2h)
1398 || (((unsigned HOST_WIDE_INT) int1h
1399 == (unsigned HOST_WIDE_INT) int2h)
1400 && int1l < int2l));
1401 else
1402 low = (int1h < int2h
1403 || (int1h == int2h && int1l < int2l));
1405 if (low == (code == MIN_EXPR))
1406 low = int1l, hi = int1h;
1407 else
1408 low = int2l, hi = int2h;
1409 break;
1411 default:
1412 abort ();
1415 t = build_int_cst (TREE_TYPE (arg1), low, hi);
1417 if (notrunc)
1419 /* Propagate overflow flags ourselves. */
1420 if (((!uns || is_sizetype) && overflow)
1421 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1423 t = copy_node (t);
1424 TREE_OVERFLOW (t) = 1;
1425 TREE_CONSTANT_OVERFLOW (t) = 1;
1427 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1429 t = copy_node (t);
1430 TREE_CONSTANT_OVERFLOW (t) = 1;
1433 else
1434 t = force_fit_type (t, 1,
1435 ((!uns || is_sizetype) && overflow)
1436 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1437 TREE_CONSTANT_OVERFLOW (arg1)
1438 | TREE_CONSTANT_OVERFLOW (arg2));
1440 return t;
1443 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1444 constant. We assume ARG1 and ARG2 have the same data type, or at least
1445 are the same kind of constant and the same machine mode.
1447 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1449 static tree
1450 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1452 STRIP_NOPS (arg1);
1453 STRIP_NOPS (arg2);
1455 if (TREE_CODE (arg1) == INTEGER_CST)
1456 return int_const_binop (code, arg1, arg2, notrunc);
1458 if (TREE_CODE (arg1) == REAL_CST)
1460 enum machine_mode mode;
1461 REAL_VALUE_TYPE d1;
1462 REAL_VALUE_TYPE d2;
1463 REAL_VALUE_TYPE value;
1464 tree t, type;
1466 d1 = TREE_REAL_CST (arg1);
1467 d2 = TREE_REAL_CST (arg2);
1469 type = TREE_TYPE (arg1);
1470 mode = TYPE_MODE (type);
1472 /* Don't perform operation if we honor signaling NaNs and
1473 either operand is a NaN. */
1474 if (HONOR_SNANS (mode)
1475 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1476 return NULL_TREE;
1478 /* Don't perform operation if it would raise a division
1479 by zero exception. */
1480 if (code == RDIV_EXPR
1481 && REAL_VALUES_EQUAL (d2, dconst0)
1482 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1483 return NULL_TREE;
1485 /* If either operand is a NaN, just return it. Otherwise, set up
1486 for floating-point trap; we return an overflow. */
1487 if (REAL_VALUE_ISNAN (d1))
1488 return arg1;
1489 else if (REAL_VALUE_ISNAN (d2))
1490 return arg2;
1492 REAL_ARITHMETIC (value, code, d1, d2);
1494 t = build_real (type, real_value_truncate (mode, value));
1496 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1497 TREE_CONSTANT_OVERFLOW (t)
1498 = TREE_OVERFLOW (t)
1499 | TREE_CONSTANT_OVERFLOW (arg1)
1500 | TREE_CONSTANT_OVERFLOW (arg2);
1501 return t;
1503 if (TREE_CODE (arg1) == COMPLEX_CST)
1505 tree type = TREE_TYPE (arg1);
1506 tree r1 = TREE_REALPART (arg1);
1507 tree i1 = TREE_IMAGPART (arg1);
1508 tree r2 = TREE_REALPART (arg2);
1509 tree i2 = TREE_IMAGPART (arg2);
1510 tree t;
1512 switch (code)
1514 case PLUS_EXPR:
1515 t = build_complex (type,
1516 const_binop (PLUS_EXPR, r1, r2, notrunc),
1517 const_binop (PLUS_EXPR, i1, i2, notrunc));
1518 break;
1520 case MINUS_EXPR:
1521 t = build_complex (type,
1522 const_binop (MINUS_EXPR, r1, r2, notrunc),
1523 const_binop (MINUS_EXPR, i1, i2, notrunc));
1524 break;
1526 case MULT_EXPR:
1527 t = build_complex (type,
1528 const_binop (MINUS_EXPR,
1529 const_binop (MULT_EXPR,
1530 r1, r2, notrunc),
1531 const_binop (MULT_EXPR,
1532 i1, i2, notrunc),
1533 notrunc),
1534 const_binop (PLUS_EXPR,
1535 const_binop (MULT_EXPR,
1536 r1, i2, notrunc),
1537 const_binop (MULT_EXPR,
1538 i1, r2, notrunc),
1539 notrunc));
1540 break;
1542 case RDIV_EXPR:
1544 tree magsquared
1545 = const_binop (PLUS_EXPR,
1546 const_binop (MULT_EXPR, r2, r2, notrunc),
1547 const_binop (MULT_EXPR, i2, i2, notrunc),
1548 notrunc);
1550 t = build_complex (type,
1551 const_binop
1552 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1553 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1554 const_binop (PLUS_EXPR,
1555 const_binop (MULT_EXPR, r1, r2,
1556 notrunc),
1557 const_binop (MULT_EXPR, i1, i2,
1558 notrunc),
1559 notrunc),
1560 magsquared, notrunc),
1561 const_binop
1562 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1563 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1564 const_binop (MINUS_EXPR,
1565 const_binop (MULT_EXPR, i1, r2,
1566 notrunc),
1567 const_binop (MULT_EXPR, r1, i2,
1568 notrunc),
1569 notrunc),
1570 magsquared, notrunc));
1572 break;
1574 default:
1575 abort ();
1577 return t;
1579 return 0;
1582 /* These are the hash table functions for the hash table of INTEGER_CST
1583 nodes of a sizetype. */
1585 /* Return the hash code code X, an INTEGER_CST. */
1587 static hashval_t
1588 size_htab_hash (const void *x)
1590 tree t = (tree) x;
1592 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1593 ^ htab_hash_pointer (TREE_TYPE (t))
1594 ^ (TREE_OVERFLOW (t) << 20));
1597 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1598 is the same as that given by *Y, which is the same. */
1600 static int
1601 size_htab_eq (const void *x, const void *y)
1603 tree xt = (tree) x;
1604 tree yt = (tree) y;
1606 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1607 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1608 && TREE_TYPE (xt) == TREE_TYPE (yt)
1609 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1612 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1613 bits are given by NUMBER and of the sizetype represented by KIND. */
1615 tree
1616 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1618 return size_int_type (number, sizetype_tab[(int) kind]);
1621 /* Likewise, but the desired type is specified explicitly. */
1623 static GTY (()) tree new_const;
1624 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1625 htab_t size_htab;
1627 tree
1628 size_int_type (HOST_WIDE_INT number, tree type)
1630 void **slot;
1631 unsigned int prec;
1632 HOST_WIDE_INT high;
1633 unsigned HOST_WIDE_INT low;
1635 if (size_htab == 0)
1637 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1638 new_const = make_node (INTEGER_CST);
1641 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1642 hash table, we return the value from the hash table. Otherwise, we
1643 place that in the hash table and make a new node for the next time. */
1644 prec = TYPE_PRECISION (type);
1645 TREE_TYPE (new_const) = type;
1646 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const) = 0;
1647 low = number;
1648 if (number >= 0)
1649 high = 0;
1650 else
1652 /* Sizetype IS sign extended. */
1653 high = -1;
1654 if (prec <= HOST_BITS_PER_WIDE_INT)
1655 low |= (HOST_WIDE_INT)(-1) << (prec - 1);
1657 TREE_INT_CST_LOW (new_const) = low;
1658 TREE_INT_CST_HIGH (new_const) = high;
1660 if (low != (unsigned HOST_WIDE_INT)number
1661 || high != (number < 0 ? -1 : 0))
1662 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const) = 1;
1664 slot = htab_find_slot (size_htab, new_const, INSERT);
1665 if (*slot == 0)
1667 tree t = new_const;
1669 *slot = new_const;
1670 new_const = make_node (INTEGER_CST);
1671 return t;
1673 else
1674 return (tree) *slot;
1677 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1678 is a tree code. The type of the result is taken from the operands.
1679 Both must be the same type integer type and it must be a size type.
1680 If the operands are constant, so is the result. */
1682 tree
1683 size_binop (enum tree_code code, tree arg0, tree arg1)
1685 tree type = TREE_TYPE (arg0);
1687 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1688 || type != TREE_TYPE (arg1))
1689 abort ();
1691 /* Handle the special case of two integer constants faster. */
1692 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1694 /* And some specific cases even faster than that. */
1695 if (code == PLUS_EXPR && integer_zerop (arg0))
1696 return arg1;
1697 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1698 && integer_zerop (arg1))
1699 return arg0;
1700 else if (code == MULT_EXPR && integer_onep (arg0))
1701 return arg1;
1703 /* Handle general case of two integer constants. */
1704 return int_const_binop (code, arg0, arg1, 0);
1707 if (arg0 == error_mark_node || arg1 == error_mark_node)
1708 return error_mark_node;
1710 return fold (build2 (code, type, arg0, arg1));
1713 /* Given two values, either both of sizetype or both of bitsizetype,
1714 compute the difference between the two values. Return the value
1715 in signed type corresponding to the type of the operands. */
1717 tree
1718 size_diffop (tree arg0, tree arg1)
1720 tree type = TREE_TYPE (arg0);
1721 tree ctype;
1723 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1724 || type != TREE_TYPE (arg1))
1725 abort ();
1727 /* If the type is already signed, just do the simple thing. */
1728 if (!TYPE_UNSIGNED (type))
1729 return size_binop (MINUS_EXPR, arg0, arg1);
1731 ctype = (type == bitsizetype || type == ubitsizetype
1732 ? sbitsizetype : ssizetype);
1734 /* If either operand is not a constant, do the conversions to the signed
1735 type and subtract. The hardware will do the right thing with any
1736 overflow in the subtraction. */
1737 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1738 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1739 fold_convert (ctype, arg1));
1741 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1742 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1743 overflow) and negate (which can't either). Special-case a result
1744 of zero while we're here. */
1745 if (tree_int_cst_equal (arg0, arg1))
1746 return fold_convert (ctype, integer_zero_node);
1747 else if (tree_int_cst_lt (arg1, arg0))
1748 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1749 else
1750 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1751 fold_convert (ctype, size_binop (MINUS_EXPR,
1752 arg1, arg0)));
1756 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1757 type TYPE. If no simplification can be done return NULL_TREE. */
1759 static tree
1760 fold_convert_const (enum tree_code code, tree type, tree arg1)
1762 int overflow = 0;
1763 tree t;
1765 if (TREE_TYPE (arg1) == type)
1766 return arg1;
1768 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1770 if (TREE_CODE (arg1) == INTEGER_CST)
1772 /* If we would build a constant wider than GCC supports,
1773 leave the conversion unfolded. */
1774 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1775 return NULL_TREE;
1777 /* Given an integer constant, make new constant with new type,
1778 appropriately sign-extended or truncated. */
1779 t = build_int_cst (type, TREE_INT_CST_LOW (arg1),
1780 TREE_INT_CST_HIGH (arg1));
1782 t = force_fit_type (t,
1783 /* Don't set the overflow when
1784 converting a pointer */
1785 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1786 (TREE_INT_CST_HIGH (arg1) < 0
1787 && (TYPE_UNSIGNED (type)
1788 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1789 | TREE_OVERFLOW (arg1),
1790 TREE_CONSTANT_OVERFLOW (arg1));
1791 return t;
1793 else if (TREE_CODE (arg1) == REAL_CST)
1795 /* The following code implements the floating point to integer
1796 conversion rules required by the Java Language Specification,
1797 that IEEE NaNs are mapped to zero and values that overflow
1798 the target precision saturate, i.e. values greater than
1799 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1800 are mapped to INT_MIN. These semantics are allowed by the
1801 C and C++ standards that simply state that the behavior of
1802 FP-to-integer conversion is unspecified upon overflow. */
1804 HOST_WIDE_INT high, low;
1805 REAL_VALUE_TYPE r;
1806 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1808 switch (code)
1810 case FIX_TRUNC_EXPR:
1811 real_trunc (&r, VOIDmode, &x);
1812 break;
1814 case FIX_CEIL_EXPR:
1815 real_ceil (&r, VOIDmode, &x);
1816 break;
1818 case FIX_FLOOR_EXPR:
1819 real_floor (&r, VOIDmode, &x);
1820 break;
1822 case FIX_ROUND_EXPR:
1823 real_round (&r, VOIDmode, &x);
1824 break;
1826 default:
1827 abort ();
1830 /* If R is NaN, return zero and show we have an overflow. */
1831 if (REAL_VALUE_ISNAN (r))
1833 overflow = 1;
1834 high = 0;
1835 low = 0;
1838 /* See if R is less than the lower bound or greater than the
1839 upper bound. */
1841 if (! overflow)
1843 tree lt = TYPE_MIN_VALUE (type);
1844 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1845 if (REAL_VALUES_LESS (r, l))
1847 overflow = 1;
1848 high = TREE_INT_CST_HIGH (lt);
1849 low = TREE_INT_CST_LOW (lt);
1853 if (! overflow)
1855 tree ut = TYPE_MAX_VALUE (type);
1856 if (ut)
1858 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1859 if (REAL_VALUES_LESS (u, r))
1861 overflow = 1;
1862 high = TREE_INT_CST_HIGH (ut);
1863 low = TREE_INT_CST_LOW (ut);
1868 if (! overflow)
1869 REAL_VALUE_TO_INT (&low, &high, r);
1871 t = build_int_cst (type, low, high);
1873 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1874 TREE_CONSTANT_OVERFLOW (arg1));
1875 return t;
1878 else if (TREE_CODE (type) == REAL_TYPE)
1880 if (TREE_CODE (arg1) == INTEGER_CST)
1881 return build_real_from_int_cst (type, arg1);
1882 if (TREE_CODE (arg1) == REAL_CST)
1884 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1886 /* We make a copy of ARG1 so that we don't modify an
1887 existing constant tree. */
1888 t = copy_node (arg1);
1889 TREE_TYPE (t) = type;
1890 return t;
1893 t = build_real (type,
1894 real_value_truncate (TYPE_MODE (type),
1895 TREE_REAL_CST (arg1)));
1897 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1898 TREE_CONSTANT_OVERFLOW (t)
1899 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1900 return t;
1903 return NULL_TREE;
1906 /* Convert expression ARG to type TYPE. Used by the middle-end for
1907 simple conversions in preference to calling the front-end's convert. */
1909 tree
1910 fold_convert (tree type, tree arg)
1912 tree orig = TREE_TYPE (arg);
1913 tree tem;
1915 if (type == orig)
1916 return arg;
1918 if (TREE_CODE (arg) == ERROR_MARK
1919 || TREE_CODE (type) == ERROR_MARK
1920 || TREE_CODE (orig) == ERROR_MARK)
1921 return error_mark_node;
1923 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1924 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1925 TYPE_MAIN_VARIANT (orig)))
1926 return fold (build1 (NOP_EXPR, type, arg));
1928 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
1929 || TREE_CODE (type) == OFFSET_TYPE)
1931 if (TREE_CODE (arg) == INTEGER_CST)
1933 tem = fold_convert_const (NOP_EXPR, type, arg);
1934 if (tem != NULL_TREE)
1935 return tem;
1937 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1938 || TREE_CODE (orig) == OFFSET_TYPE)
1939 return fold (build1 (NOP_EXPR, type, arg));
1940 if (TREE_CODE (orig) == COMPLEX_TYPE)
1942 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1943 return fold_convert (type, tem);
1945 if (TREE_CODE (orig) == VECTOR_TYPE
1946 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)))
1947 return fold (build1 (NOP_EXPR, type, arg));
1949 else if (TREE_CODE (type) == REAL_TYPE)
1951 if (TREE_CODE (arg) == INTEGER_CST)
1953 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1954 if (tem != NULL_TREE)
1955 return tem;
1957 else if (TREE_CODE (arg) == REAL_CST)
1959 tem = fold_convert_const (NOP_EXPR, type, arg);
1960 if (tem != NULL_TREE)
1961 return tem;
1964 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1965 return fold (build1 (FLOAT_EXPR, type, arg));
1966 if (TREE_CODE (orig) == REAL_TYPE)
1967 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1968 type, arg));
1969 if (TREE_CODE (orig) == COMPLEX_TYPE)
1971 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1972 return fold_convert (type, tem);
1975 else if (TREE_CODE (type) == COMPLEX_TYPE)
1977 if (INTEGRAL_TYPE_P (orig)
1978 || POINTER_TYPE_P (orig)
1979 || TREE_CODE (orig) == REAL_TYPE)
1980 return build2 (COMPLEX_EXPR, type,
1981 fold_convert (TREE_TYPE (type), arg),
1982 fold_convert (TREE_TYPE (type), integer_zero_node));
1983 if (TREE_CODE (orig) == COMPLEX_TYPE)
1985 tree rpart, ipart;
1987 if (TREE_CODE (arg) == COMPLEX_EXPR)
1989 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1990 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1991 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1994 arg = save_expr (arg);
1995 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1996 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1997 rpart = fold_convert (TREE_TYPE (type), rpart);
1998 ipart = fold_convert (TREE_TYPE (type), ipart);
1999 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
2002 else if (TREE_CODE (type) == VECTOR_TYPE)
2004 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
2005 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)))
2006 return fold (build1 (NOP_EXPR, type, arg));
2007 if (TREE_CODE (orig) == VECTOR_TYPE
2008 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)))
2009 return fold (build1 (NOP_EXPR, type, arg));
2011 else if (VOID_TYPE_P (type))
2012 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
2013 abort ();
2016 /* Return an expr equal to X but certainly not valid as an lvalue. */
2018 tree
2019 non_lvalue (tree x)
2021 /* We only need to wrap lvalue tree codes. */
2022 switch (TREE_CODE (x))
2024 case VAR_DECL:
2025 case PARM_DECL:
2026 case RESULT_DECL:
2027 case LABEL_DECL:
2028 case FUNCTION_DECL:
2029 case SSA_NAME:
2031 case COMPONENT_REF:
2032 case INDIRECT_REF:
2033 case ARRAY_REF:
2034 case ARRAY_RANGE_REF:
2035 case BIT_FIELD_REF:
2036 case OBJ_TYPE_REF:
2038 case REALPART_EXPR:
2039 case IMAGPART_EXPR:
2040 case PREINCREMENT_EXPR:
2041 case PREDECREMENT_EXPR:
2042 case SAVE_EXPR:
2043 case TRY_CATCH_EXPR:
2044 case WITH_CLEANUP_EXPR:
2045 case COMPOUND_EXPR:
2046 case MODIFY_EXPR:
2047 case TARGET_EXPR:
2048 case COND_EXPR:
2049 case BIND_EXPR:
2050 case MIN_EXPR:
2051 case MAX_EXPR:
2052 break;
2054 default:
2055 /* Assume the worst for front-end tree codes. */
2056 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2057 break;
2058 return x;
2060 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2063 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2064 Zero means allow extended lvalues. */
2066 int pedantic_lvalues;
2068 /* When pedantic, return an expr equal to X but certainly not valid as a
2069 pedantic lvalue. Otherwise, return X. */
2071 tree
2072 pedantic_non_lvalue (tree x)
2074 if (pedantic_lvalues)
2075 return non_lvalue (x);
2076 else
2077 return x;
2080 /* Given a tree comparison code, return the code that is the logical inverse
2081 of the given code. It is not safe to do this for floating-point
2082 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2083 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2085 static enum tree_code
2086 invert_tree_comparison (enum tree_code code, bool honor_nans)
2088 if (honor_nans && flag_trapping_math)
2089 return ERROR_MARK;
2091 switch (code)
2093 case EQ_EXPR:
2094 return NE_EXPR;
2095 case NE_EXPR:
2096 return EQ_EXPR;
2097 case GT_EXPR:
2098 return honor_nans ? UNLE_EXPR : LE_EXPR;
2099 case GE_EXPR:
2100 return honor_nans ? UNLT_EXPR : LT_EXPR;
2101 case LT_EXPR:
2102 return honor_nans ? UNGE_EXPR : GE_EXPR;
2103 case LE_EXPR:
2104 return honor_nans ? UNGT_EXPR : GT_EXPR;
2105 case LTGT_EXPR:
2106 return UNEQ_EXPR;
2107 case UNEQ_EXPR:
2108 return LTGT_EXPR;
2109 case UNGT_EXPR:
2110 return LE_EXPR;
2111 case UNGE_EXPR:
2112 return LT_EXPR;
2113 case UNLT_EXPR:
2114 return GE_EXPR;
2115 case UNLE_EXPR:
2116 return GT_EXPR;
2117 case ORDERED_EXPR:
2118 return UNORDERED_EXPR;
2119 case UNORDERED_EXPR:
2120 return ORDERED_EXPR;
2121 default:
2122 abort ();
2126 /* Similar, but return the comparison that results if the operands are
2127 swapped. This is safe for floating-point. */
2129 enum tree_code
2130 swap_tree_comparison (enum tree_code code)
2132 switch (code)
2134 case EQ_EXPR:
2135 case NE_EXPR:
2136 return code;
2137 case GT_EXPR:
2138 return LT_EXPR;
2139 case GE_EXPR:
2140 return LE_EXPR;
2141 case LT_EXPR:
2142 return GT_EXPR;
2143 case LE_EXPR:
2144 return GE_EXPR;
2145 default:
2146 abort ();
2151 /* Convert a comparison tree code from an enum tree_code representation
2152 into a compcode bit-based encoding. This function is the inverse of
2153 compcode_to_comparison. */
2155 static enum comparison_code
2156 comparison_to_compcode (enum tree_code code)
2158 switch (code)
2160 case LT_EXPR:
2161 return COMPCODE_LT;
2162 case EQ_EXPR:
2163 return COMPCODE_EQ;
2164 case LE_EXPR:
2165 return COMPCODE_LE;
2166 case GT_EXPR:
2167 return COMPCODE_GT;
2168 case NE_EXPR:
2169 return COMPCODE_NE;
2170 case GE_EXPR:
2171 return COMPCODE_GE;
2172 case ORDERED_EXPR:
2173 return COMPCODE_ORD;
2174 case UNORDERED_EXPR:
2175 return COMPCODE_UNORD;
2176 case UNLT_EXPR:
2177 return COMPCODE_UNLT;
2178 case UNEQ_EXPR:
2179 return COMPCODE_UNEQ;
2180 case UNLE_EXPR:
2181 return COMPCODE_UNLE;
2182 case UNGT_EXPR:
2183 return COMPCODE_UNGT;
2184 case LTGT_EXPR:
2185 return COMPCODE_LTGT;
2186 case UNGE_EXPR:
2187 return COMPCODE_UNGE;
2188 default:
2189 abort ();
2193 /* Convert a compcode bit-based encoding of a comparison operator back
2194 to GCC's enum tree_code representation. This function is the
2195 inverse of comparison_to_compcode. */
2197 static enum tree_code
2198 compcode_to_comparison (enum comparison_code code)
2200 switch (code)
2202 case COMPCODE_LT:
2203 return LT_EXPR;
2204 case COMPCODE_EQ:
2205 return EQ_EXPR;
2206 case COMPCODE_LE:
2207 return LE_EXPR;
2208 case COMPCODE_GT:
2209 return GT_EXPR;
2210 case COMPCODE_NE:
2211 return NE_EXPR;
2212 case COMPCODE_GE:
2213 return GE_EXPR;
2214 case COMPCODE_ORD:
2215 return ORDERED_EXPR;
2216 case COMPCODE_UNORD:
2217 return UNORDERED_EXPR;
2218 case COMPCODE_UNLT:
2219 return UNLT_EXPR;
2220 case COMPCODE_UNEQ:
2221 return UNEQ_EXPR;
2222 case COMPCODE_UNLE:
2223 return UNLE_EXPR;
2224 case COMPCODE_UNGT:
2225 return UNGT_EXPR;
2226 case COMPCODE_LTGT:
2227 return LTGT_EXPR;
2228 case COMPCODE_UNGE:
2229 return UNGE_EXPR;
2230 default:
2231 abort ();
2235 /* Return a tree for the comparison which is the combination of
2236 doing the AND or OR (depending on CODE) of the two operations LCODE
2237 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2238 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2239 if this makes the transformation invalid. */
2241 tree
2242 combine_comparisons (enum tree_code code, enum tree_code lcode,
2243 enum tree_code rcode, tree truth_type,
2244 tree ll_arg, tree lr_arg)
2246 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2247 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2248 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2249 enum comparison_code compcode;
2251 switch (code)
2253 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2254 compcode = lcompcode & rcompcode;
2255 break;
2257 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2258 compcode = lcompcode | rcompcode;
2259 break;
2261 default:
2262 return NULL_TREE;
2265 if (!honor_nans)
2267 /* Eliminate unordered comparisons, as well as LTGT and ORD
2268 which are not used unless the mode has NaNs. */
2269 compcode &= ~COMPCODE_UNORD;
2270 if (compcode == COMPCODE_LTGT)
2271 compcode = COMPCODE_NE;
2272 else if (compcode == COMPCODE_ORD)
2273 compcode = COMPCODE_TRUE;
2275 else if (flag_trapping_math)
2277 /* Check that the original operation and the optimized ones will trap
2278 under the same condition. */
2279 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2280 && (lcompcode != COMPCODE_EQ)
2281 && (lcompcode != COMPCODE_ORD);
2282 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2283 && (rcompcode != COMPCODE_EQ)
2284 && (rcompcode != COMPCODE_ORD);
2285 bool trap = (compcode & COMPCODE_UNORD) == 0
2286 && (compcode != COMPCODE_EQ)
2287 && (compcode != COMPCODE_ORD);
2289 /* In a short-circuited boolean expression the LHS might be
2290 such that the RHS, if evaluated, will never trap. For
2291 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2292 if neither x nor y is NaN. (This is a mixed blessing: for
2293 example, the expression above will never trap, hence
2294 optimizing it to x < y would be invalid). */
2295 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2296 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2297 rtrap = false;
2299 /* If the comparison was short-circuited, and only the RHS
2300 trapped, we may now generate a spurious trap. */
2301 if (rtrap && !ltrap
2302 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2303 return NULL_TREE;
2305 /* If we changed the conditions that cause a trap, we lose. */
2306 if ((ltrap || rtrap) != trap)
2307 return NULL_TREE;
2310 if (compcode == COMPCODE_TRUE)
2311 return constant_boolean_node (true, truth_type);
2312 else if (compcode == COMPCODE_FALSE)
2313 return constant_boolean_node (false, truth_type);
2314 else
2315 return fold (build2 (compcode_to_comparison (compcode),
2316 truth_type, ll_arg, lr_arg));
2319 /* Return nonzero if CODE is a tree code that represents a truth value. */
2321 static int
2322 truth_value_p (enum tree_code code)
2324 return (TREE_CODE_CLASS (code) == '<'
2325 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2326 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2327 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2330 /* Return nonzero if two operands (typically of the same tree node)
2331 are necessarily equal. If either argument has side-effects this
2332 function returns zero. FLAGS modifies behavior as follows:
2334 If OEP_ONLY_CONST is set, only return nonzero for constants.
2335 This function tests whether the operands are indistinguishable;
2336 it does not test whether they are equal using C's == operation.
2337 The distinction is important for IEEE floating point, because
2338 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2339 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2341 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2342 even though it may hold multiple values during a function.
2343 This is because a GCC tree node guarantees that nothing else is
2344 executed between the evaluation of its "operands" (which may often
2345 be evaluated in arbitrary order). Hence if the operands themselves
2346 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2347 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2348 unset means assuming isochronic (or instantaneous) tree equivalence.
2349 Unless comparing arbitrary expression trees, such as from different
2350 statements, this flag can usually be left unset.
2352 If OEP_PURE_SAME is set, then pure functions with identical arguments
2353 are considered the same. It is used when the caller has other ways
2354 to ensure that global memory is unchanged in between. */
2357 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2359 /* If one is specified and the other isn't, they aren't equal and if
2360 neither is specified, they are.
2362 ??? This is temporary and is meant only to handle the cases of the
2363 optional operands for COMPONENT_REF and ARRAY_REF. */
2364 if ((arg0 && !arg1) || (!arg0 && arg1))
2365 return 0;
2366 else if (!arg0 && !arg1)
2367 return 1;
2368 /* If either is ERROR_MARK, they aren't equal. */
2369 else if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2370 return 0;
2372 /* If both types don't have the same signedness, then we can't consider
2373 them equal. We must check this before the STRIP_NOPS calls
2374 because they may change the signedness of the arguments. */
2375 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2376 return 0;
2378 STRIP_NOPS (arg0);
2379 STRIP_NOPS (arg1);
2381 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2382 /* This is needed for conversions and for COMPONENT_REF.
2383 Might as well play it safe and always test this. */
2384 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2385 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2386 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2387 return 0;
2389 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2390 We don't care about side effects in that case because the SAVE_EXPR
2391 takes care of that for us. In all other cases, two expressions are
2392 equal if they have no side effects. If we have two identical
2393 expressions with side effects that should be treated the same due
2394 to the only side effects being identical SAVE_EXPR's, that will
2395 be detected in the recursive calls below. */
2396 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2397 && (TREE_CODE (arg0) == SAVE_EXPR
2398 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2399 return 1;
2401 /* Next handle constant cases, those for which we can return 1 even
2402 if ONLY_CONST is set. */
2403 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2404 switch (TREE_CODE (arg0))
2406 case INTEGER_CST:
2407 return (! TREE_CONSTANT_OVERFLOW (arg0)
2408 && ! TREE_CONSTANT_OVERFLOW (arg1)
2409 && tree_int_cst_equal (arg0, arg1));
2411 case REAL_CST:
2412 return (! TREE_CONSTANT_OVERFLOW (arg0)
2413 && ! TREE_CONSTANT_OVERFLOW (arg1)
2414 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2415 TREE_REAL_CST (arg1)));
2417 case VECTOR_CST:
2419 tree v1, v2;
2421 if (TREE_CONSTANT_OVERFLOW (arg0)
2422 || TREE_CONSTANT_OVERFLOW (arg1))
2423 return 0;
2425 v1 = TREE_VECTOR_CST_ELTS (arg0);
2426 v2 = TREE_VECTOR_CST_ELTS (arg1);
2427 while (v1 && v2)
2429 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2430 flags))
2431 return 0;
2432 v1 = TREE_CHAIN (v1);
2433 v2 = TREE_CHAIN (v2);
2436 return 1;
2439 case COMPLEX_CST:
2440 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2441 flags)
2442 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2443 flags));
2445 case STRING_CST:
2446 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2447 && ! memcmp (TREE_STRING_POINTER (arg0),
2448 TREE_STRING_POINTER (arg1),
2449 TREE_STRING_LENGTH (arg0)));
2451 case ADDR_EXPR:
2452 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2454 default:
2455 break;
2458 if (flags & OEP_ONLY_CONST)
2459 return 0;
2461 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2463 case '1':
2464 /* Two conversions are equal only if signedness and modes match. */
2465 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2466 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2467 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2468 return 0;
2470 return operand_equal_p (TREE_OPERAND (arg0, 0),
2471 TREE_OPERAND (arg1, 0), flags);
2473 case '<':
2474 case '2':
2475 if (operand_equal_p (TREE_OPERAND (arg0, 0),
2476 TREE_OPERAND (arg1, 0), flags)
2477 && operand_equal_p (TREE_OPERAND (arg0, 1),
2478 TREE_OPERAND (arg1, 1), flags))
2479 return 1;
2481 /* For commutative ops, allow the other order. */
2482 return (commutative_tree_code (TREE_CODE (arg0))
2483 && operand_equal_p (TREE_OPERAND (arg0, 0),
2484 TREE_OPERAND (arg1, 1), flags)
2485 && operand_equal_p (TREE_OPERAND (arg0, 1),
2486 TREE_OPERAND (arg1, 0), flags));
2488 case 'r':
2489 /* If either of the pointer (or reference) expressions we are
2490 dereferencing contain a side effect, these cannot be equal. */
2491 if (TREE_SIDE_EFFECTS (arg0)
2492 || TREE_SIDE_EFFECTS (arg1))
2493 return 0;
2495 switch (TREE_CODE (arg0))
2497 case INDIRECT_REF:
2498 case REALPART_EXPR:
2499 case IMAGPART_EXPR:
2500 return operand_equal_p (TREE_OPERAND (arg0, 0),
2501 TREE_OPERAND (arg1, 0), flags);
2503 case ARRAY_REF:
2504 case ARRAY_RANGE_REF:
2505 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2506 TREE_OPERAND (arg1, 0), flags)
2507 && operand_equal_p (TREE_OPERAND (arg0, 1),
2508 TREE_OPERAND (arg1, 1), flags)
2509 && operand_equal_p (TREE_OPERAND (arg0, 2),
2510 TREE_OPERAND (arg1, 2), flags)
2511 && operand_equal_p (TREE_OPERAND (arg0, 3),
2512 TREE_OPERAND (arg1, 3), flags));
2515 case COMPONENT_REF:
2516 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2517 TREE_OPERAND (arg1, 0), flags)
2518 && operand_equal_p (TREE_OPERAND (arg0, 1),
2519 TREE_OPERAND (arg1, 1), flags)
2520 && operand_equal_p (TREE_OPERAND (arg0, 2),
2521 TREE_OPERAND (arg1, 2), flags));
2524 case BIT_FIELD_REF:
2525 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2526 TREE_OPERAND (arg1, 0), flags)
2527 && operand_equal_p (TREE_OPERAND (arg0, 1),
2528 TREE_OPERAND (arg1, 1), flags)
2529 && operand_equal_p (TREE_OPERAND (arg0, 2),
2530 TREE_OPERAND (arg1, 2), flags));
2531 default:
2532 return 0;
2535 case 'e':
2536 switch (TREE_CODE (arg0))
2538 case ADDR_EXPR:
2539 case TRUTH_NOT_EXPR:
2540 return operand_equal_p (TREE_OPERAND (arg0, 0),
2541 TREE_OPERAND (arg1, 0), flags);
2543 case TRUTH_ANDIF_EXPR:
2544 case TRUTH_ORIF_EXPR:
2545 return operand_equal_p (TREE_OPERAND (arg0, 0),
2546 TREE_OPERAND (arg1, 0), flags)
2547 && operand_equal_p (TREE_OPERAND (arg0, 1),
2548 TREE_OPERAND (arg1, 1), flags);
2550 case TRUTH_AND_EXPR:
2551 case TRUTH_OR_EXPR:
2552 case TRUTH_XOR_EXPR:
2553 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2554 TREE_OPERAND (arg1, 0), flags)
2555 && operand_equal_p (TREE_OPERAND (arg0, 1),
2556 TREE_OPERAND (arg1, 1), flags))
2557 || (operand_equal_p (TREE_OPERAND (arg0, 0),
2558 TREE_OPERAND (arg1, 1), flags)
2559 && operand_equal_p (TREE_OPERAND (arg0, 1),
2560 TREE_OPERAND (arg1, 0), flags));
2562 case CALL_EXPR:
2563 /* If the CALL_EXPRs call different functions, then they
2564 clearly can not be equal. */
2565 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2566 TREE_OPERAND (arg1, 0), flags))
2567 return 0;
2570 unsigned int cef = call_expr_flags (arg0);
2571 if (flags & OEP_PURE_SAME)
2572 cef &= ECF_CONST | ECF_PURE;
2573 else
2574 cef &= ECF_CONST;
2575 if (!cef)
2576 return 0;
2579 /* Now see if all the arguments are the same. operand_equal_p
2580 does not handle TREE_LIST, so we walk the operands here
2581 feeding them to operand_equal_p. */
2582 arg0 = TREE_OPERAND (arg0, 1);
2583 arg1 = TREE_OPERAND (arg1, 1);
2584 while (arg0 && arg1)
2586 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2587 flags))
2588 return 0;
2590 arg0 = TREE_CHAIN (arg0);
2591 arg1 = TREE_CHAIN (arg1);
2594 /* If we get here and both argument lists are exhausted
2595 then the CALL_EXPRs are equal. */
2596 return ! (arg0 || arg1);
2598 default:
2599 return 0;
2602 case 'd':
2603 /* Consider __builtin_sqrt equal to sqrt. */
2604 return (TREE_CODE (arg0) == FUNCTION_DECL
2605 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2606 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2607 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2609 default:
2610 return 0;
2614 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2615 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2617 When in doubt, return 0. */
2619 static int
2620 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2622 int unsignedp1, unsignedpo;
2623 tree primarg0, primarg1, primother;
2624 unsigned int correct_width;
2626 if (operand_equal_p (arg0, arg1, 0))
2627 return 1;
2629 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2630 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2631 return 0;
2633 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2634 and see if the inner values are the same. This removes any
2635 signedness comparison, which doesn't matter here. */
2636 primarg0 = arg0, primarg1 = arg1;
2637 STRIP_NOPS (primarg0);
2638 STRIP_NOPS (primarg1);
2639 if (operand_equal_p (primarg0, primarg1, 0))
2640 return 1;
2642 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2643 actual comparison operand, ARG0.
2645 First throw away any conversions to wider types
2646 already present in the operands. */
2648 primarg1 = get_narrower (arg1, &unsignedp1);
2649 primother = get_narrower (other, &unsignedpo);
2651 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2652 if (unsignedp1 == unsignedpo
2653 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2654 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2656 tree type = TREE_TYPE (arg0);
2658 /* Make sure shorter operand is extended the right way
2659 to match the longer operand. */
2660 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2661 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2663 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2664 return 1;
2667 return 0;
2670 /* See if ARG is an expression that is either a comparison or is performing
2671 arithmetic on comparisons. The comparisons must only be comparing
2672 two different values, which will be stored in *CVAL1 and *CVAL2; if
2673 they are nonzero it means that some operands have already been found.
2674 No variables may be used anywhere else in the expression except in the
2675 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2676 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2678 If this is true, return 1. Otherwise, return zero. */
2680 static int
2681 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2683 enum tree_code code = TREE_CODE (arg);
2684 char class = TREE_CODE_CLASS (code);
2686 /* We can handle some of the 'e' cases here. */
2687 if (class == 'e' && code == TRUTH_NOT_EXPR)
2688 class = '1';
2689 else if (class == 'e'
2690 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2691 || code == COMPOUND_EXPR))
2692 class = '2';
2694 else if (class == 'e' && code == SAVE_EXPR
2695 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2697 /* If we've already found a CVAL1 or CVAL2, this expression is
2698 two complex to handle. */
2699 if (*cval1 || *cval2)
2700 return 0;
2702 class = '1';
2703 *save_p = 1;
2706 switch (class)
2708 case '1':
2709 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2711 case '2':
2712 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2713 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2714 cval1, cval2, save_p));
2716 case 'c':
2717 return 1;
2719 case 'e':
2720 if (code == COND_EXPR)
2721 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2722 cval1, cval2, save_p)
2723 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2724 cval1, cval2, save_p)
2725 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2726 cval1, cval2, save_p));
2727 return 0;
2729 case '<':
2730 /* First see if we can handle the first operand, then the second. For
2731 the second operand, we know *CVAL1 can't be zero. It must be that
2732 one side of the comparison is each of the values; test for the
2733 case where this isn't true by failing if the two operands
2734 are the same. */
2736 if (operand_equal_p (TREE_OPERAND (arg, 0),
2737 TREE_OPERAND (arg, 1), 0))
2738 return 0;
2740 if (*cval1 == 0)
2741 *cval1 = TREE_OPERAND (arg, 0);
2742 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2744 else if (*cval2 == 0)
2745 *cval2 = TREE_OPERAND (arg, 0);
2746 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2748 else
2749 return 0;
2751 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2753 else if (*cval2 == 0)
2754 *cval2 = TREE_OPERAND (arg, 1);
2755 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2757 else
2758 return 0;
2760 return 1;
2762 default:
2763 return 0;
2767 /* ARG is a tree that is known to contain just arithmetic operations and
2768 comparisons. Evaluate the operations in the tree substituting NEW0 for
2769 any occurrence of OLD0 as an operand of a comparison and likewise for
2770 NEW1 and OLD1. */
2772 static tree
2773 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2775 tree type = TREE_TYPE (arg);
2776 enum tree_code code = TREE_CODE (arg);
2777 char class = TREE_CODE_CLASS (code);
2779 /* We can handle some of the 'e' cases here. */
2780 if (class == 'e' && code == TRUTH_NOT_EXPR)
2781 class = '1';
2782 else if (class == 'e'
2783 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2784 class = '2';
2786 switch (class)
2788 case '1':
2789 return fold (build1 (code, type,
2790 eval_subst (TREE_OPERAND (arg, 0),
2791 old0, new0, old1, new1)));
2793 case '2':
2794 return fold (build2 (code, type,
2795 eval_subst (TREE_OPERAND (arg, 0),
2796 old0, new0, old1, new1),
2797 eval_subst (TREE_OPERAND (arg, 1),
2798 old0, new0, old1, new1)));
2800 case 'e':
2801 switch (code)
2803 case SAVE_EXPR:
2804 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2806 case COMPOUND_EXPR:
2807 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2809 case COND_EXPR:
2810 return fold (build3 (code, type,
2811 eval_subst (TREE_OPERAND (arg, 0),
2812 old0, new0, old1, new1),
2813 eval_subst (TREE_OPERAND (arg, 1),
2814 old0, new0, old1, new1),
2815 eval_subst (TREE_OPERAND (arg, 2),
2816 old0, new0, old1, new1)));
2817 default:
2818 break;
2820 /* Fall through - ??? */
2822 case '<':
2824 tree arg0 = TREE_OPERAND (arg, 0);
2825 tree arg1 = TREE_OPERAND (arg, 1);
2827 /* We need to check both for exact equality and tree equality. The
2828 former will be true if the operand has a side-effect. In that
2829 case, we know the operand occurred exactly once. */
2831 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2832 arg0 = new0;
2833 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2834 arg0 = new1;
2836 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2837 arg1 = new0;
2838 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2839 arg1 = new1;
2841 return fold (build2 (code, type, arg0, arg1));
2844 default:
2845 return arg;
2849 /* Return a tree for the case when the result of an expression is RESULT
2850 converted to TYPE and OMITTED was previously an operand of the expression
2851 but is now not needed (e.g., we folded OMITTED * 0).
2853 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2854 the conversion of RESULT to TYPE. */
2856 tree
2857 omit_one_operand (tree type, tree result, tree omitted)
2859 tree t = fold_convert (type, result);
2861 if (TREE_SIDE_EFFECTS (omitted))
2862 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2864 return non_lvalue (t);
2867 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2869 static tree
2870 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2872 tree t = fold_convert (type, result);
2874 if (TREE_SIDE_EFFECTS (omitted))
2875 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2877 return pedantic_non_lvalue (t);
2880 /* Return a tree for the case when the result of an expression is RESULT
2881 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2882 of the expression but are now not needed.
2884 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2885 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2886 evaluated before OMITTED2. Otherwise, if neither has side effects,
2887 just do the conversion of RESULT to TYPE. */
2889 tree
2890 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2892 tree t = fold_convert (type, result);
2894 if (TREE_SIDE_EFFECTS (omitted2))
2895 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2896 if (TREE_SIDE_EFFECTS (omitted1))
2897 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2899 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2903 /* Return a simplified tree node for the truth-negation of ARG. This
2904 never alters ARG itself. We assume that ARG is an operation that
2905 returns a truth value (0 or 1).
2907 FIXME: one would think we would fold the result, but it causes
2908 problems with the dominator optimizer. */
2909 tree
2910 invert_truthvalue (tree arg)
2912 tree type = TREE_TYPE (arg);
2913 enum tree_code code = TREE_CODE (arg);
2915 if (code == ERROR_MARK)
2916 return arg;
2918 /* If this is a comparison, we can simply invert it, except for
2919 floating-point non-equality comparisons, in which case we just
2920 enclose a TRUTH_NOT_EXPR around what we have. */
2922 if (TREE_CODE_CLASS (code) == '<')
2924 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2925 if (FLOAT_TYPE_P (op_type)
2926 && flag_trapping_math
2927 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2928 && code != NE_EXPR && code != EQ_EXPR)
2929 return build1 (TRUTH_NOT_EXPR, type, arg);
2930 else
2932 code = invert_tree_comparison (code,
2933 HONOR_NANS (TYPE_MODE (op_type)));
2934 if (code == ERROR_MARK)
2935 return build1 (TRUTH_NOT_EXPR, type, arg);
2936 else
2937 return build2 (code, type,
2938 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2942 switch (code)
2944 case INTEGER_CST:
2945 return fold_convert (type,
2946 build_int_cst (NULL_TREE, integer_zerop (arg), 0));
2948 case TRUTH_AND_EXPR:
2949 return build2 (TRUTH_OR_EXPR, type,
2950 invert_truthvalue (TREE_OPERAND (arg, 0)),
2951 invert_truthvalue (TREE_OPERAND (arg, 1)));
2953 case TRUTH_OR_EXPR:
2954 return build2 (TRUTH_AND_EXPR, type,
2955 invert_truthvalue (TREE_OPERAND (arg, 0)),
2956 invert_truthvalue (TREE_OPERAND (arg, 1)));
2958 case TRUTH_XOR_EXPR:
2959 /* Here we can invert either operand. We invert the first operand
2960 unless the second operand is a TRUTH_NOT_EXPR in which case our
2961 result is the XOR of the first operand with the inside of the
2962 negation of the second operand. */
2964 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2965 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2966 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2967 else
2968 return build2 (TRUTH_XOR_EXPR, type,
2969 invert_truthvalue (TREE_OPERAND (arg, 0)),
2970 TREE_OPERAND (arg, 1));
2972 case TRUTH_ANDIF_EXPR:
2973 return build2 (TRUTH_ORIF_EXPR, type,
2974 invert_truthvalue (TREE_OPERAND (arg, 0)),
2975 invert_truthvalue (TREE_OPERAND (arg, 1)));
2977 case TRUTH_ORIF_EXPR:
2978 return build2 (TRUTH_ANDIF_EXPR, type,
2979 invert_truthvalue (TREE_OPERAND (arg, 0)),
2980 invert_truthvalue (TREE_OPERAND (arg, 1)));
2982 case TRUTH_NOT_EXPR:
2983 return TREE_OPERAND (arg, 0);
2985 case COND_EXPR:
2986 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2987 invert_truthvalue (TREE_OPERAND (arg, 1)),
2988 invert_truthvalue (TREE_OPERAND (arg, 2)));
2990 case COMPOUND_EXPR:
2991 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2992 invert_truthvalue (TREE_OPERAND (arg, 1)));
2994 case NON_LVALUE_EXPR:
2995 return invert_truthvalue (TREE_OPERAND (arg, 0));
2997 case NOP_EXPR:
2998 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2999 break;
3001 case CONVERT_EXPR:
3002 case FLOAT_EXPR:
3003 return build1 (TREE_CODE (arg), type,
3004 invert_truthvalue (TREE_OPERAND (arg, 0)));
3006 case BIT_AND_EXPR:
3007 if (!integer_onep (TREE_OPERAND (arg, 1)))
3008 break;
3009 return build2 (EQ_EXPR, type, arg,
3010 fold_convert (type, integer_zero_node));
3012 case SAVE_EXPR:
3013 return build1 (TRUTH_NOT_EXPR, type, arg);
3015 case CLEANUP_POINT_EXPR:
3016 return build1 (CLEANUP_POINT_EXPR, type,
3017 invert_truthvalue (TREE_OPERAND (arg, 0)));
3019 default:
3020 break;
3022 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
3023 abort ();
3024 return build1 (TRUTH_NOT_EXPR, type, arg);
3027 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3028 operands are another bit-wise operation with a common input. If so,
3029 distribute the bit operations to save an operation and possibly two if
3030 constants are involved. For example, convert
3031 (A | B) & (A | C) into A | (B & C)
3032 Further simplification will occur if B and C are constants.
3034 If this optimization cannot be done, 0 will be returned. */
3036 static tree
3037 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3039 tree common;
3040 tree left, right;
3042 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3043 || TREE_CODE (arg0) == code
3044 || (TREE_CODE (arg0) != BIT_AND_EXPR
3045 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3046 return 0;
3048 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3050 common = TREE_OPERAND (arg0, 0);
3051 left = TREE_OPERAND (arg0, 1);
3052 right = TREE_OPERAND (arg1, 1);
3054 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3056 common = TREE_OPERAND (arg0, 0);
3057 left = TREE_OPERAND (arg0, 1);
3058 right = TREE_OPERAND (arg1, 0);
3060 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3062 common = TREE_OPERAND (arg0, 1);
3063 left = TREE_OPERAND (arg0, 0);
3064 right = TREE_OPERAND (arg1, 1);
3066 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3068 common = TREE_OPERAND (arg0, 1);
3069 left = TREE_OPERAND (arg0, 0);
3070 right = TREE_OPERAND (arg1, 0);
3072 else
3073 return 0;
3075 return fold (build2 (TREE_CODE (arg0), type, common,
3076 fold (build2 (code, type, left, right))));
3079 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3080 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3082 static tree
3083 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3084 int unsignedp)
3086 tree result = build3 (BIT_FIELD_REF, type, inner,
3087 size_int (bitsize), bitsize_int (bitpos));
3089 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3091 return result;
3094 /* Optimize a bit-field compare.
3096 There are two cases: First is a compare against a constant and the
3097 second is a comparison of two items where the fields are at the same
3098 bit position relative to the start of a chunk (byte, halfword, word)
3099 large enough to contain it. In these cases we can avoid the shift
3100 implicit in bitfield extractions.
3102 For constants, we emit a compare of the shifted constant with the
3103 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3104 compared. For two fields at the same position, we do the ANDs with the
3105 similar mask and compare the result of the ANDs.
3107 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3108 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3109 are the left and right operands of the comparison, respectively.
3111 If the optimization described above can be done, we return the resulting
3112 tree. Otherwise we return zero. */
3114 static tree
3115 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3116 tree lhs, tree rhs)
3118 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3119 tree type = TREE_TYPE (lhs);
3120 tree signed_type, unsigned_type;
3121 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3122 enum machine_mode lmode, rmode, nmode;
3123 int lunsignedp, runsignedp;
3124 int lvolatilep = 0, rvolatilep = 0;
3125 tree linner, rinner = NULL_TREE;
3126 tree mask;
3127 tree offset;
3129 /* Get all the information about the extractions being done. If the bit size
3130 if the same as the size of the underlying object, we aren't doing an
3131 extraction at all and so can do nothing. We also don't want to
3132 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3133 then will no longer be able to replace it. */
3134 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3135 &lunsignedp, &lvolatilep);
3136 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3137 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3138 return 0;
3140 if (!const_p)
3142 /* If this is not a constant, we can only do something if bit positions,
3143 sizes, and signedness are the same. */
3144 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3145 &runsignedp, &rvolatilep);
3147 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3148 || lunsignedp != runsignedp || offset != 0
3149 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3150 return 0;
3153 /* See if we can find a mode to refer to this field. We should be able to,
3154 but fail if we can't. */
3155 nmode = get_best_mode (lbitsize, lbitpos,
3156 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3157 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3158 TYPE_ALIGN (TREE_TYPE (rinner))),
3159 word_mode, lvolatilep || rvolatilep);
3160 if (nmode == VOIDmode)
3161 return 0;
3163 /* Set signed and unsigned types of the precision of this mode for the
3164 shifts below. */
3165 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3166 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3168 /* Compute the bit position and size for the new reference and our offset
3169 within it. If the new reference is the same size as the original, we
3170 won't optimize anything, so return zero. */
3171 nbitsize = GET_MODE_BITSIZE (nmode);
3172 nbitpos = lbitpos & ~ (nbitsize - 1);
3173 lbitpos -= nbitpos;
3174 if (nbitsize == lbitsize)
3175 return 0;
3177 if (BYTES_BIG_ENDIAN)
3178 lbitpos = nbitsize - lbitsize - lbitpos;
3180 /* Make the mask to be used against the extracted field. */
3181 mask = build_int_cst (unsigned_type, ~0, ~0);
3182 mask = force_fit_type (mask, 0, false, false);
3183 mask = fold_convert (unsigned_type, mask);
3184 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3185 mask = const_binop (RSHIFT_EXPR, mask,
3186 size_int (nbitsize - lbitsize - lbitpos), 0);
3188 if (! const_p)
3189 /* If not comparing with constant, just rework the comparison
3190 and return. */
3191 return build2 (code, compare_type,
3192 build2 (BIT_AND_EXPR, unsigned_type,
3193 make_bit_field_ref (linner, unsigned_type,
3194 nbitsize, nbitpos, 1),
3195 mask),
3196 build2 (BIT_AND_EXPR, unsigned_type,
3197 make_bit_field_ref (rinner, unsigned_type,
3198 nbitsize, nbitpos, 1),
3199 mask));
3201 /* Otherwise, we are handling the constant case. See if the constant is too
3202 big for the field. Warn and return a tree of for 0 (false) if so. We do
3203 this not only for its own sake, but to avoid having to test for this
3204 error case below. If we didn't, we might generate wrong code.
3206 For unsigned fields, the constant shifted right by the field length should
3207 be all zero. For signed fields, the high-order bits should agree with
3208 the sign bit. */
3210 if (lunsignedp)
3212 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3213 fold_convert (unsigned_type, rhs),
3214 size_int (lbitsize), 0)))
3216 warning ("comparison is always %d due to width of bit-field",
3217 code == NE_EXPR);
3218 return constant_boolean_node (code == NE_EXPR, compare_type);
3221 else
3223 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3224 size_int (lbitsize - 1), 0);
3225 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3227 warning ("comparison is always %d due to width of bit-field",
3228 code == NE_EXPR);
3229 return constant_boolean_node (code == NE_EXPR, compare_type);
3233 /* Single-bit compares should always be against zero. */
3234 if (lbitsize == 1 && ! integer_zerop (rhs))
3236 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3237 rhs = fold_convert (type, integer_zero_node);
3240 /* Make a new bitfield reference, shift the constant over the
3241 appropriate number of bits and mask it with the computed mask
3242 (in case this was a signed field). If we changed it, make a new one. */
3243 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3244 if (lvolatilep)
3246 TREE_SIDE_EFFECTS (lhs) = 1;
3247 TREE_THIS_VOLATILE (lhs) = 1;
3250 rhs = fold (const_binop (BIT_AND_EXPR,
3251 const_binop (LSHIFT_EXPR,
3252 fold_convert (unsigned_type, rhs),
3253 size_int (lbitpos), 0),
3254 mask, 0));
3256 return build2 (code, compare_type,
3257 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3258 rhs);
3261 /* Subroutine for fold_truthop: decode a field reference.
3263 If EXP is a comparison reference, we return the innermost reference.
3265 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3266 set to the starting bit number.
3268 If the innermost field can be completely contained in a mode-sized
3269 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3271 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3272 otherwise it is not changed.
3274 *PUNSIGNEDP is set to the signedness of the field.
3276 *PMASK is set to the mask used. This is either contained in a
3277 BIT_AND_EXPR or derived from the width of the field.
3279 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3281 Return 0 if this is not a component reference or is one that we can't
3282 do anything with. */
3284 static tree
3285 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3286 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3287 int *punsignedp, int *pvolatilep,
3288 tree *pmask, tree *pand_mask)
3290 tree outer_type = 0;
3291 tree and_mask = 0;
3292 tree mask, inner, offset;
3293 tree unsigned_type;
3294 unsigned int precision;
3296 /* All the optimizations using this function assume integer fields.
3297 There are problems with FP fields since the type_for_size call
3298 below can fail for, e.g., XFmode. */
3299 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3300 return 0;
3302 /* We are interested in the bare arrangement of bits, so strip everything
3303 that doesn't affect the machine mode. However, record the type of the
3304 outermost expression if it may matter below. */
3305 if (TREE_CODE (exp) == NOP_EXPR
3306 || TREE_CODE (exp) == CONVERT_EXPR
3307 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3308 outer_type = TREE_TYPE (exp);
3309 STRIP_NOPS (exp);
3311 if (TREE_CODE (exp) == BIT_AND_EXPR)
3313 and_mask = TREE_OPERAND (exp, 1);
3314 exp = TREE_OPERAND (exp, 0);
3315 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3316 if (TREE_CODE (and_mask) != INTEGER_CST)
3317 return 0;
3320 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3321 punsignedp, pvolatilep);
3322 if ((inner == exp && and_mask == 0)
3323 || *pbitsize < 0 || offset != 0
3324 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3325 return 0;
3327 /* If the number of bits in the reference is the same as the bitsize of
3328 the outer type, then the outer type gives the signedness. Otherwise
3329 (in case of a small bitfield) the signedness is unchanged. */
3330 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3331 *punsignedp = TYPE_UNSIGNED (outer_type);
3333 /* Compute the mask to access the bitfield. */
3334 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3335 precision = TYPE_PRECISION (unsigned_type);
3337 mask = build_int_cst (unsigned_type, ~0, ~0);
3338 mask = force_fit_type (mask, 0, false, false);
3340 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3341 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3343 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3344 if (and_mask != 0)
3345 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3346 fold_convert (unsigned_type, and_mask), mask));
3348 *pmask = mask;
3349 *pand_mask = and_mask;
3350 return inner;
3353 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3354 bit positions. */
3356 static int
3357 all_ones_mask_p (tree mask, int size)
3359 tree type = TREE_TYPE (mask);
3360 unsigned int precision = TYPE_PRECISION (type);
3361 tree tmask;
3363 tmask = build_int_cst (lang_hooks.types.signed_type (type), ~0, ~0);
3364 tmask = force_fit_type (tmask, 0, false, false);
3366 return
3367 tree_int_cst_equal (mask,
3368 const_binop (RSHIFT_EXPR,
3369 const_binop (LSHIFT_EXPR, tmask,
3370 size_int (precision - size),
3372 size_int (precision - size), 0));
3375 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3376 represents the sign bit of EXP's type. If EXP represents a sign
3377 or zero extension, also test VAL against the unextended type.
3378 The return value is the (sub)expression whose sign bit is VAL,
3379 or NULL_TREE otherwise. */
3381 static tree
3382 sign_bit_p (tree exp, tree val)
3384 unsigned HOST_WIDE_INT mask_lo, lo;
3385 HOST_WIDE_INT mask_hi, hi;
3386 int width;
3387 tree t;
3389 /* Tree EXP must have an integral type. */
3390 t = TREE_TYPE (exp);
3391 if (! INTEGRAL_TYPE_P (t))
3392 return NULL_TREE;
3394 /* Tree VAL must be an integer constant. */
3395 if (TREE_CODE (val) != INTEGER_CST
3396 || TREE_CONSTANT_OVERFLOW (val))
3397 return NULL_TREE;
3399 width = TYPE_PRECISION (t);
3400 if (width > HOST_BITS_PER_WIDE_INT)
3402 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3403 lo = 0;
3405 mask_hi = ((unsigned HOST_WIDE_INT) -1
3406 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3407 mask_lo = -1;
3409 else
3411 hi = 0;
3412 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3414 mask_hi = 0;
3415 mask_lo = ((unsigned HOST_WIDE_INT) -1
3416 >> (HOST_BITS_PER_WIDE_INT - width));
3419 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3420 treat VAL as if it were unsigned. */
3421 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3422 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3423 return exp;
3425 /* Handle extension from a narrower type. */
3426 if (TREE_CODE (exp) == NOP_EXPR
3427 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3428 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3430 return NULL_TREE;
3433 /* Subroutine for fold_truthop: determine if an operand is simple enough
3434 to be evaluated unconditionally. */
3436 static int
3437 simple_operand_p (tree exp)
3439 /* Strip any conversions that don't change the machine mode. */
3440 while ((TREE_CODE (exp) == NOP_EXPR
3441 || TREE_CODE (exp) == CONVERT_EXPR)
3442 && (TYPE_MODE (TREE_TYPE (exp))
3443 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3444 exp = TREE_OPERAND (exp, 0);
3446 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3447 || (DECL_P (exp)
3448 && ! TREE_ADDRESSABLE (exp)
3449 && ! TREE_THIS_VOLATILE (exp)
3450 && ! DECL_NONLOCAL (exp)
3451 /* Don't regard global variables as simple. They may be
3452 allocated in ways unknown to the compiler (shared memory,
3453 #pragma weak, etc). */
3454 && ! TREE_PUBLIC (exp)
3455 && ! DECL_EXTERNAL (exp)
3456 /* Loading a static variable is unduly expensive, but global
3457 registers aren't expensive. */
3458 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3461 /* The following functions are subroutines to fold_range_test and allow it to
3462 try to change a logical combination of comparisons into a range test.
3464 For example, both
3465 X == 2 || X == 3 || X == 4 || X == 5
3467 X >= 2 && X <= 5
3468 are converted to
3469 (unsigned) (X - 2) <= 3
3471 We describe each set of comparisons as being either inside or outside
3472 a range, using a variable named like IN_P, and then describe the
3473 range with a lower and upper bound. If one of the bounds is omitted,
3474 it represents either the highest or lowest value of the type.
3476 In the comments below, we represent a range by two numbers in brackets
3477 preceded by a "+" to designate being inside that range, or a "-" to
3478 designate being outside that range, so the condition can be inverted by
3479 flipping the prefix. An omitted bound is represented by a "-". For
3480 example, "- [-, 10]" means being outside the range starting at the lowest
3481 possible value and ending at 10, in other words, being greater than 10.
3482 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3483 always false.
3485 We set up things so that the missing bounds are handled in a consistent
3486 manner so neither a missing bound nor "true" and "false" need to be
3487 handled using a special case. */
3489 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3490 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3491 and UPPER1_P are nonzero if the respective argument is an upper bound
3492 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3493 must be specified for a comparison. ARG1 will be converted to ARG0's
3494 type if both are specified. */
3496 static tree
3497 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3498 tree arg1, int upper1_p)
3500 tree tem;
3501 int result;
3502 int sgn0, sgn1;
3504 /* If neither arg represents infinity, do the normal operation.
3505 Else, if not a comparison, return infinity. Else handle the special
3506 comparison rules. Note that most of the cases below won't occur, but
3507 are handled for consistency. */
3509 if (arg0 != 0 && arg1 != 0)
3511 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3512 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3513 STRIP_NOPS (tem);
3514 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3517 if (TREE_CODE_CLASS (code) != '<')
3518 return 0;
3520 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3521 for neither. In real maths, we cannot assume open ended ranges are
3522 the same. But, this is computer arithmetic, where numbers are finite.
3523 We can therefore make the transformation of any unbounded range with
3524 the value Z, Z being greater than any representable number. This permits
3525 us to treat unbounded ranges as equal. */
3526 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3527 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3528 switch (code)
3530 case EQ_EXPR:
3531 result = sgn0 == sgn1;
3532 break;
3533 case NE_EXPR:
3534 result = sgn0 != sgn1;
3535 break;
3536 case LT_EXPR:
3537 result = sgn0 < sgn1;
3538 break;
3539 case LE_EXPR:
3540 result = sgn0 <= sgn1;
3541 break;
3542 case GT_EXPR:
3543 result = sgn0 > sgn1;
3544 break;
3545 case GE_EXPR:
3546 result = sgn0 >= sgn1;
3547 break;
3548 default:
3549 abort ();
3552 return constant_boolean_node (result, type);
3555 /* Given EXP, a logical expression, set the range it is testing into
3556 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3557 actually being tested. *PLOW and *PHIGH will be made of the same type
3558 as the returned expression. If EXP is not a comparison, we will most
3559 likely not be returning a useful value and range. */
3561 static tree
3562 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3564 enum tree_code code;
3565 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3566 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3567 int in_p, n_in_p;
3568 tree low, high, n_low, n_high;
3570 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3571 and see if we can refine the range. Some of the cases below may not
3572 happen, but it doesn't seem worth worrying about this. We "continue"
3573 the outer loop when we've changed something; otherwise we "break"
3574 the switch, which will "break" the while. */
3576 in_p = 0;
3577 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3579 while (1)
3581 code = TREE_CODE (exp);
3582 exp_type = TREE_TYPE (exp);
3584 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3586 if (first_rtl_op (code) > 0)
3587 arg0 = TREE_OPERAND (exp, 0);
3588 if (TREE_CODE_CLASS (code) == '<'
3589 || TREE_CODE_CLASS (code) == '1'
3590 || TREE_CODE_CLASS (code) == '2')
3591 arg0_type = TREE_TYPE (arg0);
3592 if (TREE_CODE_CLASS (code) == '2'
3593 || TREE_CODE_CLASS (code) == '<'
3594 || (TREE_CODE_CLASS (code) == 'e'
3595 && TREE_CODE_LENGTH (code) > 1))
3596 arg1 = TREE_OPERAND (exp, 1);
3599 switch (code)
3601 case TRUTH_NOT_EXPR:
3602 in_p = ! in_p, exp = arg0;
3603 continue;
3605 case EQ_EXPR: case NE_EXPR:
3606 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3607 /* We can only do something if the range is testing for zero
3608 and if the second operand is an integer constant. Note that
3609 saying something is "in" the range we make is done by
3610 complementing IN_P since it will set in the initial case of
3611 being not equal to zero; "out" is leaving it alone. */
3612 if (low == 0 || high == 0
3613 || ! integer_zerop (low) || ! integer_zerop (high)
3614 || TREE_CODE (arg1) != INTEGER_CST)
3615 break;
3617 switch (code)
3619 case NE_EXPR: /* - [c, c] */
3620 low = high = arg1;
3621 break;
3622 case EQ_EXPR: /* + [c, c] */
3623 in_p = ! in_p, low = high = arg1;
3624 break;
3625 case GT_EXPR: /* - [-, c] */
3626 low = 0, high = arg1;
3627 break;
3628 case GE_EXPR: /* + [c, -] */
3629 in_p = ! in_p, low = arg1, high = 0;
3630 break;
3631 case LT_EXPR: /* - [c, -] */
3632 low = arg1, high = 0;
3633 break;
3634 case LE_EXPR: /* + [-, c] */
3635 in_p = ! in_p, low = 0, high = arg1;
3636 break;
3637 default:
3638 abort ();
3641 /* If this is an unsigned comparison, we also know that EXP is
3642 greater than or equal to zero. We base the range tests we make
3643 on that fact, so we record it here so we can parse existing
3644 range tests. We test arg0_type since often the return type
3645 of, e.g. EQ_EXPR, is boolean. */
3646 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3648 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3649 in_p, low, high, 1,
3650 fold_convert (arg0_type, integer_zero_node),
3651 NULL_TREE))
3652 break;
3654 in_p = n_in_p, low = n_low, high = n_high;
3656 /* If the high bound is missing, but we have a nonzero low
3657 bound, reverse the range so it goes from zero to the low bound
3658 minus 1. */
3659 if (high == 0 && low && ! integer_zerop (low))
3661 in_p = ! in_p;
3662 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3663 integer_one_node, 0);
3664 low = fold_convert (arg0_type, integer_zero_node);
3668 exp = arg0;
3669 continue;
3671 case NEGATE_EXPR:
3672 /* (-x) IN [a,b] -> x in [-b, -a] */
3673 n_low = range_binop (MINUS_EXPR, exp_type,
3674 fold_convert (exp_type, integer_zero_node),
3675 0, high, 1);
3676 n_high = range_binop (MINUS_EXPR, exp_type,
3677 fold_convert (exp_type, integer_zero_node),
3678 0, low, 0);
3679 low = n_low, high = n_high;
3680 exp = arg0;
3681 continue;
3683 case BIT_NOT_EXPR:
3684 /* ~ X -> -X - 1 */
3685 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3686 fold_convert (exp_type, integer_one_node));
3687 continue;
3689 case PLUS_EXPR: case MINUS_EXPR:
3690 if (TREE_CODE (arg1) != INTEGER_CST)
3691 break;
3693 /* If EXP is signed, any overflow in the computation is undefined,
3694 so we don't worry about it so long as our computations on
3695 the bounds don't overflow. For unsigned, overflow is defined
3696 and this is exactly the right thing. */
3697 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3698 arg0_type, low, 0, arg1, 0);
3699 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3700 arg0_type, high, 1, arg1, 0);
3701 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3702 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3703 break;
3705 /* Check for an unsigned range which has wrapped around the maximum
3706 value thus making n_high < n_low, and normalize it. */
3707 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3709 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3710 integer_one_node, 0);
3711 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3712 integer_one_node, 0);
3714 /* If the range is of the form +/- [ x+1, x ], we won't
3715 be able to normalize it. But then, it represents the
3716 whole range or the empty set, so make it
3717 +/- [ -, - ]. */
3718 if (tree_int_cst_equal (n_low, low)
3719 && tree_int_cst_equal (n_high, high))
3720 low = high = 0;
3721 else
3722 in_p = ! in_p;
3724 else
3725 low = n_low, high = n_high;
3727 exp = arg0;
3728 continue;
3730 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3731 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3732 break;
3734 if (! INTEGRAL_TYPE_P (arg0_type)
3735 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3736 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3737 break;
3739 n_low = low, n_high = high;
3741 if (n_low != 0)
3742 n_low = fold_convert (arg0_type, n_low);
3744 if (n_high != 0)
3745 n_high = fold_convert (arg0_type, n_high);
3748 /* If we're converting arg0 from an unsigned type, to exp,
3749 a signed type, we will be doing the comparison as unsigned.
3750 The tests above have already verified that LOW and HIGH
3751 are both positive.
3753 So we have to ensure that we will handle large unsigned
3754 values the same way that the current signed bounds treat
3755 negative values. */
3757 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3759 tree high_positive;
3760 tree equiv_type = lang_hooks.types.type_for_mode
3761 (TYPE_MODE (arg0_type), 1);
3763 /* A range without an upper bound is, naturally, unbounded.
3764 Since convert would have cropped a very large value, use
3765 the max value for the destination type. */
3766 high_positive
3767 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3768 : TYPE_MAX_VALUE (arg0_type);
3770 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3771 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3772 fold_convert (arg0_type,
3773 high_positive),
3774 fold_convert (arg0_type,
3775 integer_one_node)));
3777 /* If the low bound is specified, "and" the range with the
3778 range for which the original unsigned value will be
3779 positive. */
3780 if (low != 0)
3782 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3783 1, n_low, n_high, 1,
3784 fold_convert (arg0_type,
3785 integer_zero_node),
3786 high_positive))
3787 break;
3789 in_p = (n_in_p == in_p);
3791 else
3793 /* Otherwise, "or" the range with the range of the input
3794 that will be interpreted as negative. */
3795 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3796 0, n_low, n_high, 1,
3797 fold_convert (arg0_type,
3798 integer_zero_node),
3799 high_positive))
3800 break;
3802 in_p = (in_p != n_in_p);
3806 exp = arg0;
3807 low = n_low, high = n_high;
3808 continue;
3810 default:
3811 break;
3814 break;
3817 /* If EXP is a constant, we can evaluate whether this is true or false. */
3818 if (TREE_CODE (exp) == INTEGER_CST)
3820 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3821 exp, 0, low, 0))
3822 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3823 exp, 1, high, 1)));
3824 low = high = 0;
3825 exp = 0;
3828 *pin_p = in_p, *plow = low, *phigh = high;
3829 return exp;
3832 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3833 type, TYPE, return an expression to test if EXP is in (or out of, depending
3834 on IN_P) the range. Return 0 if the test couldn't be created. */
3836 static tree
3837 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3839 tree etype = TREE_TYPE (exp);
3840 tree value;
3842 if (! in_p)
3844 value = build_range_check (type, exp, 1, low, high);
3845 if (value != 0)
3846 return invert_truthvalue (value);
3848 return 0;
3851 if (low == 0 && high == 0)
3852 return fold_convert (type, integer_one_node);
3854 if (low == 0)
3855 return fold (build2 (LE_EXPR, type, exp, high));
3857 if (high == 0)
3858 return fold (build2 (GE_EXPR, type, exp, low));
3860 if (operand_equal_p (low, high, 0))
3861 return fold (build2 (EQ_EXPR, type, exp, low));
3863 if (integer_zerop (low))
3865 if (! TYPE_UNSIGNED (etype))
3867 etype = lang_hooks.types.unsigned_type (etype);
3868 high = fold_convert (etype, high);
3869 exp = fold_convert (etype, exp);
3871 return build_range_check (type, exp, 1, 0, high);
3874 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3875 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3877 unsigned HOST_WIDE_INT lo;
3878 HOST_WIDE_INT hi;
3879 int prec;
3881 prec = TYPE_PRECISION (etype);
3882 if (prec <= HOST_BITS_PER_WIDE_INT)
3884 hi = 0;
3885 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3887 else
3889 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3890 lo = (unsigned HOST_WIDE_INT) -1;
3893 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3895 if (TYPE_UNSIGNED (etype))
3897 etype = lang_hooks.types.signed_type (etype);
3898 exp = fold_convert (etype, exp);
3900 return fold (build2 (GT_EXPR, type, exp,
3901 fold_convert (etype, integer_zero_node)));
3905 value = const_binop (MINUS_EXPR, high, low, 0);
3906 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3908 tree utype, minv, maxv;
3910 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3911 for the type in question, as we rely on this here. */
3912 switch (TREE_CODE (etype))
3914 case INTEGER_TYPE:
3915 case ENUMERAL_TYPE:
3916 case CHAR_TYPE:
3917 utype = lang_hooks.types.unsigned_type (etype);
3918 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3919 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3920 integer_one_node, 1);
3921 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3922 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3923 minv, 1, maxv, 1)))
3925 etype = utype;
3926 high = fold_convert (etype, high);
3927 low = fold_convert (etype, low);
3928 exp = fold_convert (etype, exp);
3929 value = const_binop (MINUS_EXPR, high, low, 0);
3931 break;
3932 default:
3933 break;
3937 if (value != 0 && ! TREE_OVERFLOW (value))
3938 return build_range_check (type,
3939 fold (build2 (MINUS_EXPR, etype, exp, low)),
3940 1, fold_convert (etype, integer_zero_node),
3941 value);
3943 return 0;
3946 /* Given two ranges, see if we can merge them into one. Return 1 if we
3947 can, 0 if we can't. Set the output range into the specified parameters. */
3949 static int
3950 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3951 tree high0, int in1_p, tree low1, tree high1)
3953 int no_overlap;
3954 int subset;
3955 int temp;
3956 tree tem;
3957 int in_p;
3958 tree low, high;
3959 int lowequal = ((low0 == 0 && low1 == 0)
3960 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3961 low0, 0, low1, 0)));
3962 int highequal = ((high0 == 0 && high1 == 0)
3963 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3964 high0, 1, high1, 1)));
3966 /* Make range 0 be the range that starts first, or ends last if they
3967 start at the same value. Swap them if it isn't. */
3968 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3969 low0, 0, low1, 0))
3970 || (lowequal
3971 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3972 high1, 1, high0, 1))))
3974 temp = in0_p, in0_p = in1_p, in1_p = temp;
3975 tem = low0, low0 = low1, low1 = tem;
3976 tem = high0, high0 = high1, high1 = tem;
3979 /* Now flag two cases, whether the ranges are disjoint or whether the
3980 second range is totally subsumed in the first. Note that the tests
3981 below are simplified by the ones above. */
3982 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3983 high0, 1, low1, 0));
3984 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3985 high1, 1, high0, 1));
3987 /* We now have four cases, depending on whether we are including or
3988 excluding the two ranges. */
3989 if (in0_p && in1_p)
3991 /* If they don't overlap, the result is false. If the second range
3992 is a subset it is the result. Otherwise, the range is from the start
3993 of the second to the end of the first. */
3994 if (no_overlap)
3995 in_p = 0, low = high = 0;
3996 else if (subset)
3997 in_p = 1, low = low1, high = high1;
3998 else
3999 in_p = 1, low = low1, high = high0;
4002 else if (in0_p && ! in1_p)
4004 /* If they don't overlap, the result is the first range. If they are
4005 equal, the result is false. If the second range is a subset of the
4006 first, and the ranges begin at the same place, we go from just after
4007 the end of the first range to the end of the second. If the second
4008 range is not a subset of the first, or if it is a subset and both
4009 ranges end at the same place, the range starts at the start of the
4010 first range and ends just before the second range.
4011 Otherwise, we can't describe this as a single range. */
4012 if (no_overlap)
4013 in_p = 1, low = low0, high = high0;
4014 else if (lowequal && highequal)
4015 in_p = 0, low = high = 0;
4016 else if (subset && lowequal)
4018 in_p = 1, high = high0;
4019 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4020 integer_one_node, 0);
4022 else if (! subset || highequal)
4024 in_p = 1, low = low0;
4025 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4026 integer_one_node, 0);
4028 else
4029 return 0;
4032 else if (! in0_p && in1_p)
4034 /* If they don't overlap, the result is the second range. If the second
4035 is a subset of the first, the result is false. Otherwise,
4036 the range starts just after the first range and ends at the
4037 end of the second. */
4038 if (no_overlap)
4039 in_p = 1, low = low1, high = high1;
4040 else if (subset || highequal)
4041 in_p = 0, low = high = 0;
4042 else
4044 in_p = 1, high = high1;
4045 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4046 integer_one_node, 0);
4050 else
4052 /* The case where we are excluding both ranges. Here the complex case
4053 is if they don't overlap. In that case, the only time we have a
4054 range is if they are adjacent. If the second is a subset of the
4055 first, the result is the first. Otherwise, the range to exclude
4056 starts at the beginning of the first range and ends at the end of the
4057 second. */
4058 if (no_overlap)
4060 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4061 range_binop (PLUS_EXPR, NULL_TREE,
4062 high0, 1,
4063 integer_one_node, 1),
4064 1, low1, 0)))
4065 in_p = 0, low = low0, high = high1;
4066 else
4068 /* Canonicalize - [min, x] into - [-, x]. */
4069 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4070 switch (TREE_CODE (TREE_TYPE (low0)))
4072 case ENUMERAL_TYPE:
4073 if (TYPE_PRECISION (TREE_TYPE (low0))
4074 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4075 break;
4076 /* FALLTHROUGH */
4077 case INTEGER_TYPE:
4078 case CHAR_TYPE:
4079 if (tree_int_cst_equal (low0,
4080 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4081 low0 = 0;
4082 break;
4083 case POINTER_TYPE:
4084 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4085 && integer_zerop (low0))
4086 low0 = 0;
4087 break;
4088 default:
4089 break;
4092 /* Canonicalize - [x, max] into - [x, -]. */
4093 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4094 switch (TREE_CODE (TREE_TYPE (high1)))
4096 case ENUMERAL_TYPE:
4097 if (TYPE_PRECISION (TREE_TYPE (high1))
4098 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4099 break;
4100 /* FALLTHROUGH */
4101 case INTEGER_TYPE:
4102 case CHAR_TYPE:
4103 if (tree_int_cst_equal (high1,
4104 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4105 high1 = 0;
4106 break;
4107 case POINTER_TYPE:
4108 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4109 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4110 high1, 1,
4111 integer_one_node, 1)))
4112 high1 = 0;
4113 break;
4114 default:
4115 break;
4118 /* The ranges might be also adjacent between the maximum and
4119 minimum values of the given type. For
4120 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4121 return + [x + 1, y - 1]. */
4122 if (low0 == 0 && high1 == 0)
4124 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4125 integer_one_node, 1);
4126 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4127 integer_one_node, 0);
4128 if (low == 0 || high == 0)
4129 return 0;
4131 in_p = 1;
4133 else
4134 return 0;
4137 else if (subset)
4138 in_p = 0, low = low0, high = high0;
4139 else
4140 in_p = 0, low = low0, high = high1;
4143 *pin_p = in_p, *plow = low, *phigh = high;
4144 return 1;
4148 /* Subroutine of fold, looking inside expressions of the form
4149 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4150 of the COND_EXPR. This function is being used also to optimize
4151 A op B ? C : A, by reversing the comparison first.
4153 Return a folded expression whose code is not a COND_EXPR
4154 anymore, or NULL_TREE if no folding opportunity is found. */
4156 static tree
4157 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4159 enum tree_code comp_code = TREE_CODE (arg0);
4160 tree arg00 = TREE_OPERAND (arg0, 0);
4161 tree arg01 = TREE_OPERAND (arg0, 1);
4162 tree arg1_type = TREE_TYPE (arg1);
4163 tree tem;
4165 STRIP_NOPS (arg1);
4166 STRIP_NOPS (arg2);
4168 /* If we have A op 0 ? A : -A, consider applying the following
4169 transformations:
4171 A == 0? A : -A same as -A
4172 A != 0? A : -A same as A
4173 A >= 0? A : -A same as abs (A)
4174 A > 0? A : -A same as abs (A)
4175 A <= 0? A : -A same as -abs (A)
4176 A < 0? A : -A same as -abs (A)
4178 None of these transformations work for modes with signed
4179 zeros. If A is +/-0, the first two transformations will
4180 change the sign of the result (from +0 to -0, or vice
4181 versa). The last four will fix the sign of the result,
4182 even though the original expressions could be positive or
4183 negative, depending on the sign of A.
4185 Note that all these transformations are correct if A is
4186 NaN, since the two alternatives (A and -A) are also NaNs. */
4187 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4188 ? real_zerop (arg01)
4189 : integer_zerop (arg01))
4190 && TREE_CODE (arg2) == NEGATE_EXPR
4191 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4192 switch (comp_code)
4194 case EQ_EXPR:
4195 tem = fold_convert (arg1_type, arg1);
4196 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4197 case NE_EXPR:
4198 return pedantic_non_lvalue (fold_convert (type, arg1));
4199 case GE_EXPR:
4200 case GT_EXPR:
4201 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4202 arg1 = fold_convert (lang_hooks.types.signed_type
4203 (TREE_TYPE (arg1)), arg1);
4204 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4205 return pedantic_non_lvalue (fold_convert (type, tem));
4206 case LE_EXPR:
4207 case LT_EXPR:
4208 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4209 arg1 = fold_convert (lang_hooks.types.signed_type
4210 (TREE_TYPE (arg1)), arg1);
4211 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4212 return negate_expr (fold_convert (type, tem));
4213 default:
4214 abort ();
4217 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4218 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4219 both transformations are correct when A is NaN: A != 0
4220 is then true, and A == 0 is false. */
4222 if (integer_zerop (arg01) && integer_zerop (arg2))
4224 if (comp_code == NE_EXPR)
4225 return pedantic_non_lvalue (fold_convert (type, arg1));
4226 else if (comp_code == EQ_EXPR)
4227 return fold_convert (type, integer_zero_node);
4230 /* Try some transformations of A op B ? A : B.
4232 A == B? A : B same as B
4233 A != B? A : B same as A
4234 A >= B? A : B same as max (A, B)
4235 A > B? A : B same as max (B, A)
4236 A <= B? A : B same as min (A, B)
4237 A < B? A : B same as min (B, A)
4239 As above, these transformations don't work in the presence
4240 of signed zeros. For example, if A and B are zeros of
4241 opposite sign, the first two transformations will change
4242 the sign of the result. In the last four, the original
4243 expressions give different results for (A=+0, B=-0) and
4244 (A=-0, B=+0), but the transformed expressions do not.
4246 The first two transformations are correct if either A or B
4247 is a NaN. In the first transformation, the condition will
4248 be false, and B will indeed be chosen. In the case of the
4249 second transformation, the condition A != B will be true,
4250 and A will be chosen.
4252 The conversions to max() and min() are not correct if B is
4253 a number and A is not. The conditions in the original
4254 expressions will be false, so all four give B. The min()
4255 and max() versions would give a NaN instead. */
4256 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4258 tree comp_op0 = arg00;
4259 tree comp_op1 = arg01;
4260 tree comp_type = TREE_TYPE (comp_op0);
4262 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4263 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4265 comp_type = type;
4266 comp_op0 = arg1;
4267 comp_op1 = arg2;
4270 switch (comp_code)
4272 case EQ_EXPR:
4273 return pedantic_non_lvalue (fold_convert (type, arg2));
4274 case NE_EXPR:
4275 return pedantic_non_lvalue (fold_convert (type, arg1));
4276 case LE_EXPR:
4277 case LT_EXPR:
4278 /* In C++ a ?: expression can be an lvalue, so put the
4279 operand which will be used if they are equal first
4280 so that we can convert this back to the
4281 corresponding COND_EXPR. */
4282 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4284 comp_op0 = fold_convert (comp_type, comp_op0);
4285 comp_op1 = fold_convert (comp_type, comp_op1);
4286 tem = fold (build2 (MIN_EXPR, comp_type,
4287 (comp_code == LE_EXPR
4288 ? comp_op0 : comp_op1),
4289 (comp_code == LE_EXPR
4290 ? comp_op1 : comp_op0)));
4291 return pedantic_non_lvalue (fold_convert (type, tem));
4293 break;
4294 case GE_EXPR:
4295 case GT_EXPR:
4296 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4298 comp_op0 = fold_convert (comp_type, comp_op0);
4299 comp_op1 = fold_convert (comp_type, comp_op1);
4300 tem = fold (build2 (MAX_EXPR, comp_type,
4301 (comp_code == GE_EXPR
4302 ? comp_op0 : comp_op1),
4303 (comp_code == GE_EXPR
4304 ? comp_op1 : comp_op0)));
4305 tem = fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1));
4306 return pedantic_non_lvalue (fold_convert (type, tem));
4308 break;
4309 default:
4310 abort ();
4314 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4315 we might still be able to simplify this. For example,
4316 if C1 is one less or one more than C2, this might have started
4317 out as a MIN or MAX and been transformed by this function.
4318 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4320 if (INTEGRAL_TYPE_P (type)
4321 && TREE_CODE (arg01) == INTEGER_CST
4322 && TREE_CODE (arg2) == INTEGER_CST)
4323 switch (comp_code)
4325 case EQ_EXPR:
4326 /* We can replace A with C1 in this case. */
4327 arg1 = fold_convert (type, arg01);
4328 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4330 case LT_EXPR:
4331 /* If C1 is C2 + 1, this is min(A, C2). */
4332 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4333 OEP_ONLY_CONST)
4334 && operand_equal_p (arg01,
4335 const_binop (PLUS_EXPR, arg2,
4336 integer_one_node, 0),
4337 OEP_ONLY_CONST))
4338 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4339 type, arg1, arg2)));
4340 break;
4342 case LE_EXPR:
4343 /* If C1 is C2 - 1, this is min(A, C2). */
4344 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4345 OEP_ONLY_CONST)
4346 && operand_equal_p (arg01,
4347 const_binop (MINUS_EXPR, arg2,
4348 integer_one_node, 0),
4349 OEP_ONLY_CONST))
4350 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4351 type, arg1, arg2)));
4352 break;
4354 case GT_EXPR:
4355 /* If C1 is C2 - 1, this is max(A, C2). */
4356 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4357 OEP_ONLY_CONST)
4358 && operand_equal_p (arg01,
4359 const_binop (MINUS_EXPR, arg2,
4360 integer_one_node, 0),
4361 OEP_ONLY_CONST))
4362 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4363 type, arg1, arg2)));
4364 break;
4366 case GE_EXPR:
4367 /* If C1 is C2 + 1, this is max(A, C2). */
4368 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4369 OEP_ONLY_CONST)
4370 && operand_equal_p (arg01,
4371 const_binop (PLUS_EXPR, arg2,
4372 integer_one_node, 0),
4373 OEP_ONLY_CONST))
4374 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4375 type, arg1, arg2)));
4376 break;
4377 case NE_EXPR:
4378 break;
4379 default:
4380 abort ();
4383 return NULL_TREE;
4388 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
4389 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4390 #endif
4392 /* EXP is some logical combination of boolean tests. See if we can
4393 merge it into some range test. Return the new tree if so. */
4395 static tree
4396 fold_range_test (tree exp)
4398 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4399 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4400 int in0_p, in1_p, in_p;
4401 tree low0, low1, low, high0, high1, high;
4402 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4403 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4404 tree tem;
4406 /* If this is an OR operation, invert both sides; we will invert
4407 again at the end. */
4408 if (or_op)
4409 in0_p = ! in0_p, in1_p = ! in1_p;
4411 /* If both expressions are the same, if we can merge the ranges, and we
4412 can build the range test, return it or it inverted. If one of the
4413 ranges is always true or always false, consider it to be the same
4414 expression as the other. */
4415 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4416 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4417 in1_p, low1, high1)
4418 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4419 lhs != 0 ? lhs
4420 : rhs != 0 ? rhs : integer_zero_node,
4421 in_p, low, high))))
4422 return or_op ? invert_truthvalue (tem) : tem;
4424 /* On machines where the branch cost is expensive, if this is a
4425 short-circuited branch and the underlying object on both sides
4426 is the same, make a non-short-circuit operation. */
4427 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4428 && lhs != 0 && rhs != 0
4429 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4430 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4431 && operand_equal_p (lhs, rhs, 0))
4433 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4434 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4435 which cases we can't do this. */
4436 if (simple_operand_p (lhs))
4437 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4438 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4439 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4440 TREE_OPERAND (exp, 1));
4442 else if (lang_hooks.decls.global_bindings_p () == 0
4443 && ! CONTAINS_PLACEHOLDER_P (lhs))
4445 tree common = save_expr (lhs);
4447 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4448 or_op ? ! in0_p : in0_p,
4449 low0, high0))
4450 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4451 or_op ? ! in1_p : in1_p,
4452 low1, high1))))
4453 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4454 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4455 TREE_TYPE (exp), lhs, rhs);
4459 return 0;
4462 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4463 bit value. Arrange things so the extra bits will be set to zero if and
4464 only if C is signed-extended to its full width. If MASK is nonzero,
4465 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4467 static tree
4468 unextend (tree c, int p, int unsignedp, tree mask)
4470 tree type = TREE_TYPE (c);
4471 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4472 tree temp;
4474 if (p == modesize || unsignedp)
4475 return c;
4477 /* We work by getting just the sign bit into the low-order bit, then
4478 into the high-order bit, then sign-extend. We then XOR that value
4479 with C. */
4480 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4481 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4483 /* We must use a signed type in order to get an arithmetic right shift.
4484 However, we must also avoid introducing accidental overflows, so that
4485 a subsequent call to integer_zerop will work. Hence we must
4486 do the type conversion here. At this point, the constant is either
4487 zero or one, and the conversion to a signed type can never overflow.
4488 We could get an overflow if this conversion is done anywhere else. */
4489 if (TYPE_UNSIGNED (type))
4490 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4492 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4493 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4494 if (mask != 0)
4495 temp = const_binop (BIT_AND_EXPR, temp,
4496 fold_convert (TREE_TYPE (c), mask), 0);
4497 /* If necessary, convert the type back to match the type of C. */
4498 if (TYPE_UNSIGNED (type))
4499 temp = fold_convert (type, temp);
4501 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4504 /* Find ways of folding logical expressions of LHS and RHS:
4505 Try to merge two comparisons to the same innermost item.
4506 Look for range tests like "ch >= '0' && ch <= '9'".
4507 Look for combinations of simple terms on machines with expensive branches
4508 and evaluate the RHS unconditionally.
4510 For example, if we have p->a == 2 && p->b == 4 and we can make an
4511 object large enough to span both A and B, we can do this with a comparison
4512 against the object ANDed with the a mask.
4514 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4515 operations to do this with one comparison.
4517 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4518 function and the one above.
4520 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4521 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4523 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4524 two operands.
4526 We return the simplified tree or 0 if no optimization is possible. */
4528 static tree
4529 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4531 /* If this is the "or" of two comparisons, we can do something if
4532 the comparisons are NE_EXPR. If this is the "and", we can do something
4533 if the comparisons are EQ_EXPR. I.e.,
4534 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4536 WANTED_CODE is this operation code. For single bit fields, we can
4537 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4538 comparison for one-bit fields. */
4540 enum tree_code wanted_code;
4541 enum tree_code lcode, rcode;
4542 tree ll_arg, lr_arg, rl_arg, rr_arg;
4543 tree ll_inner, lr_inner, rl_inner, rr_inner;
4544 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4545 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4546 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4547 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4548 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4549 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4550 enum machine_mode lnmode, rnmode;
4551 tree ll_mask, lr_mask, rl_mask, rr_mask;
4552 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4553 tree l_const, r_const;
4554 tree lntype, rntype, result;
4555 int first_bit, end_bit;
4556 int volatilep;
4558 /* Start by getting the comparison codes. Fail if anything is volatile.
4559 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4560 it were surrounded with a NE_EXPR. */
4562 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4563 return 0;
4565 lcode = TREE_CODE (lhs);
4566 rcode = TREE_CODE (rhs);
4568 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4570 lhs = build2 (NE_EXPR, truth_type, lhs,
4571 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4572 lcode = NE_EXPR;
4575 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4577 rhs = build2 (NE_EXPR, truth_type, rhs,
4578 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4579 rcode = NE_EXPR;
4582 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
4583 return 0;
4585 ll_arg = TREE_OPERAND (lhs, 0);
4586 lr_arg = TREE_OPERAND (lhs, 1);
4587 rl_arg = TREE_OPERAND (rhs, 0);
4588 rr_arg = TREE_OPERAND (rhs, 1);
4590 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4591 if (simple_operand_p (ll_arg)
4592 && simple_operand_p (lr_arg))
4594 tree result;
4595 if (operand_equal_p (ll_arg, rl_arg, 0)
4596 && operand_equal_p (lr_arg, rr_arg, 0))
4598 result = combine_comparisons (code, lcode, rcode,
4599 truth_type, ll_arg, lr_arg);
4600 if (result)
4601 return result;
4603 else if (operand_equal_p (ll_arg, rr_arg, 0)
4604 && operand_equal_p (lr_arg, rl_arg, 0))
4606 result = combine_comparisons (code, lcode,
4607 swap_tree_comparison (rcode),
4608 truth_type, ll_arg, lr_arg);
4609 if (result)
4610 return result;
4614 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4615 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4617 /* If the RHS can be evaluated unconditionally and its operands are
4618 simple, it wins to evaluate the RHS unconditionally on machines
4619 with expensive branches. In this case, this isn't a comparison
4620 that can be merged. Avoid doing this if the RHS is a floating-point
4621 comparison since those can trap. */
4623 if (BRANCH_COST >= 2
4624 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4625 && simple_operand_p (rl_arg)
4626 && simple_operand_p (rr_arg))
4628 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4629 if (code == TRUTH_OR_EXPR
4630 && lcode == NE_EXPR && integer_zerop (lr_arg)
4631 && rcode == NE_EXPR && integer_zerop (rr_arg)
4632 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4633 return build2 (NE_EXPR, truth_type,
4634 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4635 ll_arg, rl_arg),
4636 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4638 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4639 if (code == TRUTH_AND_EXPR
4640 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4641 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4642 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4643 return build2 (EQ_EXPR, truth_type,
4644 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4645 ll_arg, rl_arg),
4646 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4648 return build2 (code, truth_type, lhs, rhs);
4651 /* See if the comparisons can be merged. Then get all the parameters for
4652 each side. */
4654 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4655 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4656 return 0;
4658 volatilep = 0;
4659 ll_inner = decode_field_reference (ll_arg,
4660 &ll_bitsize, &ll_bitpos, &ll_mode,
4661 &ll_unsignedp, &volatilep, &ll_mask,
4662 &ll_and_mask);
4663 lr_inner = decode_field_reference (lr_arg,
4664 &lr_bitsize, &lr_bitpos, &lr_mode,
4665 &lr_unsignedp, &volatilep, &lr_mask,
4666 &lr_and_mask);
4667 rl_inner = decode_field_reference (rl_arg,
4668 &rl_bitsize, &rl_bitpos, &rl_mode,
4669 &rl_unsignedp, &volatilep, &rl_mask,
4670 &rl_and_mask);
4671 rr_inner = decode_field_reference (rr_arg,
4672 &rr_bitsize, &rr_bitpos, &rr_mode,
4673 &rr_unsignedp, &volatilep, &rr_mask,
4674 &rr_and_mask);
4676 /* It must be true that the inner operation on the lhs of each
4677 comparison must be the same if we are to be able to do anything.
4678 Then see if we have constants. If not, the same must be true for
4679 the rhs's. */
4680 if (volatilep || ll_inner == 0 || rl_inner == 0
4681 || ! operand_equal_p (ll_inner, rl_inner, 0))
4682 return 0;
4684 if (TREE_CODE (lr_arg) == INTEGER_CST
4685 && TREE_CODE (rr_arg) == INTEGER_CST)
4686 l_const = lr_arg, r_const = rr_arg;
4687 else if (lr_inner == 0 || rr_inner == 0
4688 || ! operand_equal_p (lr_inner, rr_inner, 0))
4689 return 0;
4690 else
4691 l_const = r_const = 0;
4693 /* If either comparison code is not correct for our logical operation,
4694 fail. However, we can convert a one-bit comparison against zero into
4695 the opposite comparison against that bit being set in the field. */
4697 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4698 if (lcode != wanted_code)
4700 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4702 /* Make the left operand unsigned, since we are only interested
4703 in the value of one bit. Otherwise we are doing the wrong
4704 thing below. */
4705 ll_unsignedp = 1;
4706 l_const = ll_mask;
4708 else
4709 return 0;
4712 /* This is analogous to the code for l_const above. */
4713 if (rcode != wanted_code)
4715 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4717 rl_unsignedp = 1;
4718 r_const = rl_mask;
4720 else
4721 return 0;
4724 /* After this point all optimizations will generate bit-field
4725 references, which we might not want. */
4726 if (! lang_hooks.can_use_bit_fields_p ())
4727 return 0;
4729 /* See if we can find a mode that contains both fields being compared on
4730 the left. If we can't, fail. Otherwise, update all constants and masks
4731 to be relative to a field of that size. */
4732 first_bit = MIN (ll_bitpos, rl_bitpos);
4733 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4734 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4735 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4736 volatilep);
4737 if (lnmode == VOIDmode)
4738 return 0;
4740 lnbitsize = GET_MODE_BITSIZE (lnmode);
4741 lnbitpos = first_bit & ~ (lnbitsize - 1);
4742 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4743 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4745 if (BYTES_BIG_ENDIAN)
4747 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4748 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4751 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4752 size_int (xll_bitpos), 0);
4753 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4754 size_int (xrl_bitpos), 0);
4756 if (l_const)
4758 l_const = fold_convert (lntype, l_const);
4759 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4760 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4761 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4762 fold (build1 (BIT_NOT_EXPR,
4763 lntype, ll_mask)),
4764 0)))
4766 warning ("comparison is always %d", wanted_code == NE_EXPR);
4768 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4771 if (r_const)
4773 r_const = fold_convert (lntype, r_const);
4774 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4775 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4776 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4777 fold (build1 (BIT_NOT_EXPR,
4778 lntype, rl_mask)),
4779 0)))
4781 warning ("comparison is always %d", wanted_code == NE_EXPR);
4783 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4787 /* If the right sides are not constant, do the same for it. Also,
4788 disallow this optimization if a size or signedness mismatch occurs
4789 between the left and right sides. */
4790 if (l_const == 0)
4792 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4793 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4794 /* Make sure the two fields on the right
4795 correspond to the left without being swapped. */
4796 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4797 return 0;
4799 first_bit = MIN (lr_bitpos, rr_bitpos);
4800 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4801 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4802 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4803 volatilep);
4804 if (rnmode == VOIDmode)
4805 return 0;
4807 rnbitsize = GET_MODE_BITSIZE (rnmode);
4808 rnbitpos = first_bit & ~ (rnbitsize - 1);
4809 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4810 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4812 if (BYTES_BIG_ENDIAN)
4814 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4815 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4818 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4819 size_int (xlr_bitpos), 0);
4820 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4821 size_int (xrr_bitpos), 0);
4823 /* Make a mask that corresponds to both fields being compared.
4824 Do this for both items being compared. If the operands are the
4825 same size and the bits being compared are in the same position
4826 then we can do this by masking both and comparing the masked
4827 results. */
4828 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4829 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4830 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4832 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4833 ll_unsignedp || rl_unsignedp);
4834 if (! all_ones_mask_p (ll_mask, lnbitsize))
4835 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4837 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4838 lr_unsignedp || rr_unsignedp);
4839 if (! all_ones_mask_p (lr_mask, rnbitsize))
4840 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4842 return build2 (wanted_code, truth_type, lhs, rhs);
4845 /* There is still another way we can do something: If both pairs of
4846 fields being compared are adjacent, we may be able to make a wider
4847 field containing them both.
4849 Note that we still must mask the lhs/rhs expressions. Furthermore,
4850 the mask must be shifted to account for the shift done by
4851 make_bit_field_ref. */
4852 if ((ll_bitsize + ll_bitpos == rl_bitpos
4853 && lr_bitsize + lr_bitpos == rr_bitpos)
4854 || (ll_bitpos == rl_bitpos + rl_bitsize
4855 && lr_bitpos == rr_bitpos + rr_bitsize))
4857 tree type;
4859 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4860 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4861 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4862 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4864 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4865 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4866 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4867 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4869 /* Convert to the smaller type before masking out unwanted bits. */
4870 type = lntype;
4871 if (lntype != rntype)
4873 if (lnbitsize > rnbitsize)
4875 lhs = fold_convert (rntype, lhs);
4876 ll_mask = fold_convert (rntype, ll_mask);
4877 type = rntype;
4879 else if (lnbitsize < rnbitsize)
4881 rhs = fold_convert (lntype, rhs);
4882 lr_mask = fold_convert (lntype, lr_mask);
4883 type = lntype;
4887 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4888 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4890 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4891 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4893 return build2 (wanted_code, truth_type, lhs, rhs);
4896 return 0;
4899 /* Handle the case of comparisons with constants. If there is something in
4900 common between the masks, those bits of the constants must be the same.
4901 If not, the condition is always false. Test for this to avoid generating
4902 incorrect code below. */
4903 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4904 if (! integer_zerop (result)
4905 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4906 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4908 if (wanted_code == NE_EXPR)
4910 warning ("`or' of unmatched not-equal tests is always 1");
4911 return constant_boolean_node (true, truth_type);
4913 else
4915 warning ("`and' of mutually exclusive equal-tests is always 0");
4916 return constant_boolean_node (false, truth_type);
4920 /* Construct the expression we will return. First get the component
4921 reference we will make. Unless the mask is all ones the width of
4922 that field, perform the mask operation. Then compare with the
4923 merged constant. */
4924 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4925 ll_unsignedp || rl_unsignedp);
4927 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4928 if (! all_ones_mask_p (ll_mask, lnbitsize))
4929 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4931 return build2 (wanted_code, truth_type, result,
4932 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4935 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4936 constant. */
4938 static tree
4939 optimize_minmax_comparison (tree t)
4941 tree type = TREE_TYPE (t);
4942 tree arg0 = TREE_OPERAND (t, 0);
4943 enum tree_code op_code;
4944 tree comp_const = TREE_OPERAND (t, 1);
4945 tree minmax_const;
4946 int consts_equal, consts_lt;
4947 tree inner;
4949 STRIP_SIGN_NOPS (arg0);
4951 op_code = TREE_CODE (arg0);
4952 minmax_const = TREE_OPERAND (arg0, 1);
4953 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4954 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4955 inner = TREE_OPERAND (arg0, 0);
4957 /* If something does not permit us to optimize, return the original tree. */
4958 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4959 || TREE_CODE (comp_const) != INTEGER_CST
4960 || TREE_CONSTANT_OVERFLOW (comp_const)
4961 || TREE_CODE (minmax_const) != INTEGER_CST
4962 || TREE_CONSTANT_OVERFLOW (minmax_const))
4963 return t;
4965 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4966 and GT_EXPR, doing the rest with recursive calls using logical
4967 simplifications. */
4968 switch (TREE_CODE (t))
4970 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4971 return
4972 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4974 case GE_EXPR:
4975 return
4976 fold (build2 (TRUTH_ORIF_EXPR, type,
4977 optimize_minmax_comparison
4978 (build2 (EQ_EXPR, type, arg0, comp_const)),
4979 optimize_minmax_comparison
4980 (build2 (GT_EXPR, type, arg0, comp_const))));
4982 case EQ_EXPR:
4983 if (op_code == MAX_EXPR && consts_equal)
4984 /* MAX (X, 0) == 0 -> X <= 0 */
4985 return fold (build2 (LE_EXPR, type, inner, comp_const));
4987 else if (op_code == MAX_EXPR && consts_lt)
4988 /* MAX (X, 0) == 5 -> X == 5 */
4989 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4991 else if (op_code == MAX_EXPR)
4992 /* MAX (X, 0) == -1 -> false */
4993 return omit_one_operand (type, integer_zero_node, inner);
4995 else if (consts_equal)
4996 /* MIN (X, 0) == 0 -> X >= 0 */
4997 return fold (build2 (GE_EXPR, type, inner, comp_const));
4999 else if (consts_lt)
5000 /* MIN (X, 0) == 5 -> false */
5001 return omit_one_operand (type, integer_zero_node, inner);
5003 else
5004 /* MIN (X, 0) == -1 -> X == -1 */
5005 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5007 case GT_EXPR:
5008 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5009 /* MAX (X, 0) > 0 -> X > 0
5010 MAX (X, 0) > 5 -> X > 5 */
5011 return fold (build2 (GT_EXPR, type, inner, comp_const));
5013 else if (op_code == MAX_EXPR)
5014 /* MAX (X, 0) > -1 -> true */
5015 return omit_one_operand (type, integer_one_node, inner);
5017 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5018 /* MIN (X, 0) > 0 -> false
5019 MIN (X, 0) > 5 -> false */
5020 return omit_one_operand (type, integer_zero_node, inner);
5022 else
5023 /* MIN (X, 0) > -1 -> X > -1 */
5024 return fold (build2 (GT_EXPR, type, inner, comp_const));
5026 default:
5027 return t;
5031 /* T is an integer expression that is being multiplied, divided, or taken a
5032 modulus (CODE says which and what kind of divide or modulus) by a
5033 constant C. See if we can eliminate that operation by folding it with
5034 other operations already in T. WIDE_TYPE, if non-null, is a type that
5035 should be used for the computation if wider than our type.
5037 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5038 (X * 2) + (Y * 4). We must, however, be assured that either the original
5039 expression would not overflow or that overflow is undefined for the type
5040 in the language in question.
5042 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5043 the machine has a multiply-accumulate insn or that this is part of an
5044 addressing calculation.
5046 If we return a non-null expression, it is an equivalent form of the
5047 original computation, but need not be in the original type. */
5049 static tree
5050 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5052 /* To avoid exponential search depth, refuse to allow recursion past
5053 three levels. Beyond that (1) it's highly unlikely that we'll find
5054 something interesting and (2) we've probably processed it before
5055 when we built the inner expression. */
5057 static int depth;
5058 tree ret;
5060 if (depth > 3)
5061 return NULL;
5063 depth++;
5064 ret = extract_muldiv_1 (t, c, code, wide_type);
5065 depth--;
5067 return ret;
5070 static tree
5071 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5073 tree type = TREE_TYPE (t);
5074 enum tree_code tcode = TREE_CODE (t);
5075 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5076 > GET_MODE_SIZE (TYPE_MODE (type)))
5077 ? wide_type : type);
5078 tree t1, t2;
5079 int same_p = tcode == code;
5080 tree op0 = NULL_TREE, op1 = NULL_TREE;
5082 /* Don't deal with constants of zero here; they confuse the code below. */
5083 if (integer_zerop (c))
5084 return NULL_TREE;
5086 if (TREE_CODE_CLASS (tcode) == '1')
5087 op0 = TREE_OPERAND (t, 0);
5089 if (TREE_CODE_CLASS (tcode) == '2')
5090 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5092 /* Note that we need not handle conditional operations here since fold
5093 already handles those cases. So just do arithmetic here. */
5094 switch (tcode)
5096 case INTEGER_CST:
5097 /* For a constant, we can always simplify if we are a multiply
5098 or (for divide and modulus) if it is a multiple of our constant. */
5099 if (code == MULT_EXPR
5100 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5101 return const_binop (code, fold_convert (ctype, t),
5102 fold_convert (ctype, c), 0);
5103 break;
5105 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5106 /* If op0 is an expression ... */
5107 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
5108 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
5109 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
5110 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
5111 /* ... and is unsigned, and its type is smaller than ctype,
5112 then we cannot pass through as widening. */
5113 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5114 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5115 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5116 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5117 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5118 /* ... or this is a truncation (t is narrower than op0),
5119 then we cannot pass through this narrowing. */
5120 || (GET_MODE_SIZE (TYPE_MODE (type))
5121 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5122 /* ... or signedness changes for division or modulus,
5123 then we cannot pass through this conversion. */
5124 || (code != MULT_EXPR
5125 && (TYPE_UNSIGNED (ctype)
5126 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5127 break;
5129 /* Pass the constant down and see if we can make a simplification. If
5130 we can, replace this expression with the inner simplification for
5131 possible later conversion to our or some other type. */
5132 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5133 && TREE_CODE (t2) == INTEGER_CST
5134 && ! TREE_CONSTANT_OVERFLOW (t2)
5135 && (0 != (t1 = extract_muldiv (op0, t2, code,
5136 code == MULT_EXPR
5137 ? ctype : NULL_TREE))))
5138 return t1;
5139 break;
5141 case NEGATE_EXPR: case ABS_EXPR:
5142 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5143 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5144 break;
5146 case MIN_EXPR: case MAX_EXPR:
5147 /* If widening the type changes the signedness, then we can't perform
5148 this optimization as that changes the result. */
5149 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5150 break;
5152 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5153 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5154 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5156 if (tree_int_cst_sgn (c) < 0)
5157 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5159 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5160 fold_convert (ctype, t2)));
5162 break;
5164 case LSHIFT_EXPR: case RSHIFT_EXPR:
5165 /* If the second operand is constant, this is a multiplication
5166 or floor division, by a power of two, so we can treat it that
5167 way unless the multiplier or divisor overflows. Signed
5168 left-shift overflow is implementation-defined rather than
5169 undefined in C90, so do not convert signed left shift into
5170 multiplication. */
5171 if (TREE_CODE (op1) == INTEGER_CST
5172 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5173 /* const_binop may not detect overflow correctly,
5174 so check for it explicitly here. */
5175 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5176 && TREE_INT_CST_HIGH (op1) == 0
5177 && 0 != (t1 = fold_convert (ctype,
5178 const_binop (LSHIFT_EXPR,
5179 size_one_node,
5180 op1, 0)))
5181 && ! TREE_OVERFLOW (t1))
5182 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5183 ? MULT_EXPR : FLOOR_DIV_EXPR,
5184 ctype, fold_convert (ctype, op0), t1),
5185 c, code, wide_type);
5186 break;
5188 case PLUS_EXPR: case MINUS_EXPR:
5189 /* See if we can eliminate the operation on both sides. If we can, we
5190 can return a new PLUS or MINUS. If we can't, the only remaining
5191 cases where we can do anything are if the second operand is a
5192 constant. */
5193 t1 = extract_muldiv (op0, c, code, wide_type);
5194 t2 = extract_muldiv (op1, c, code, wide_type);
5195 if (t1 != 0 && t2 != 0
5196 && (code == MULT_EXPR
5197 /* If not multiplication, we can only do this if both operands
5198 are divisible by c. */
5199 || (multiple_of_p (ctype, op0, c)
5200 && multiple_of_p (ctype, op1, c))))
5201 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5202 fold_convert (ctype, t2)));
5204 /* If this was a subtraction, negate OP1 and set it to be an addition.
5205 This simplifies the logic below. */
5206 if (tcode == MINUS_EXPR)
5207 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5209 if (TREE_CODE (op1) != INTEGER_CST)
5210 break;
5212 /* If either OP1 or C are negative, this optimization is not safe for
5213 some of the division and remainder types while for others we need
5214 to change the code. */
5215 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5217 if (code == CEIL_DIV_EXPR)
5218 code = FLOOR_DIV_EXPR;
5219 else if (code == FLOOR_DIV_EXPR)
5220 code = CEIL_DIV_EXPR;
5221 else if (code != MULT_EXPR
5222 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5223 break;
5226 /* If it's a multiply or a division/modulus operation of a multiple
5227 of our constant, do the operation and verify it doesn't overflow. */
5228 if (code == MULT_EXPR
5229 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5231 op1 = const_binop (code, fold_convert (ctype, op1),
5232 fold_convert (ctype, c), 0);
5233 /* We allow the constant to overflow with wrapping semantics. */
5234 if (op1 == 0
5235 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5236 break;
5238 else
5239 break;
5241 /* If we have an unsigned type is not a sizetype, we cannot widen
5242 the operation since it will change the result if the original
5243 computation overflowed. */
5244 if (TYPE_UNSIGNED (ctype)
5245 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5246 && ctype != type)
5247 break;
5249 /* If we were able to eliminate our operation from the first side,
5250 apply our operation to the second side and reform the PLUS. */
5251 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5252 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5254 /* The last case is if we are a multiply. In that case, we can
5255 apply the distributive law to commute the multiply and addition
5256 if the multiplication of the constants doesn't overflow. */
5257 if (code == MULT_EXPR)
5258 return fold (build2 (tcode, ctype,
5259 fold (build2 (code, ctype,
5260 fold_convert (ctype, op0),
5261 fold_convert (ctype, c))),
5262 op1));
5264 break;
5266 case MULT_EXPR:
5267 /* We have a special case here if we are doing something like
5268 (C * 8) % 4 since we know that's zero. */
5269 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5270 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5271 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5272 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5273 return omit_one_operand (type, integer_zero_node, op0);
5275 /* ... fall through ... */
5277 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5278 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5279 /* If we can extract our operation from the LHS, do so and return a
5280 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5281 do something only if the second operand is a constant. */
5282 if (same_p
5283 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5284 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5285 fold_convert (ctype, op1)));
5286 else if (tcode == MULT_EXPR && code == MULT_EXPR
5287 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5288 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5289 fold_convert (ctype, t1)));
5290 else if (TREE_CODE (op1) != INTEGER_CST)
5291 return 0;
5293 /* If these are the same operation types, we can associate them
5294 assuming no overflow. */
5295 if (tcode == code
5296 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5297 fold_convert (ctype, c), 0))
5298 && ! TREE_OVERFLOW (t1))
5299 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5301 /* If these operations "cancel" each other, we have the main
5302 optimizations of this pass, which occur when either constant is a
5303 multiple of the other, in which case we replace this with either an
5304 operation or CODE or TCODE.
5306 If we have an unsigned type that is not a sizetype, we cannot do
5307 this since it will change the result if the original computation
5308 overflowed. */
5309 if ((! TYPE_UNSIGNED (ctype)
5310 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5311 && ! flag_wrapv
5312 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5313 || (tcode == MULT_EXPR
5314 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5315 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5317 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5318 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5319 fold_convert (ctype,
5320 const_binop (TRUNC_DIV_EXPR,
5321 op1, c, 0))));
5322 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5323 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5324 fold_convert (ctype,
5325 const_binop (TRUNC_DIV_EXPR,
5326 c, op1, 0))));
5328 break;
5330 default:
5331 break;
5334 return 0;
5337 /* Return a node which has the indicated constant VALUE (either 0 or
5338 1), and is of the indicated TYPE. */
5340 tree
5341 constant_boolean_node (int value, tree type)
5343 if (type == integer_type_node)
5344 return value ? integer_one_node : integer_zero_node;
5345 else if (type == boolean_type_node)
5346 return value ? boolean_true_node : boolean_false_node;
5347 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5348 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5349 : integer_zero_node);
5350 else
5351 return build_int_cst (type, value, 0);
5354 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5355 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5356 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5357 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5358 COND is the first argument to CODE; otherwise (as in the example
5359 given here), it is the second argument. TYPE is the type of the
5360 original expression. Return NULL_TREE if no simplification is
5361 possible. */
5363 static tree
5364 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
5365 tree cond, tree arg, int cond_first_p)
5367 tree test, true_value, false_value;
5368 tree lhs = NULL_TREE;
5369 tree rhs = NULL_TREE;
5371 /* This transformation is only worthwhile if we don't have to wrap
5372 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5373 one of the branches once its pushed inside the COND_EXPR. */
5374 if (!TREE_CONSTANT (arg))
5375 return NULL_TREE;
5377 if (TREE_CODE (cond) == COND_EXPR)
5379 test = TREE_OPERAND (cond, 0);
5380 true_value = TREE_OPERAND (cond, 1);
5381 false_value = TREE_OPERAND (cond, 2);
5382 /* If this operand throws an expression, then it does not make
5383 sense to try to perform a logical or arithmetic operation
5384 involving it. */
5385 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5386 lhs = true_value;
5387 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5388 rhs = false_value;
5390 else
5392 tree testtype = TREE_TYPE (cond);
5393 test = cond;
5394 true_value = constant_boolean_node (true, testtype);
5395 false_value = constant_boolean_node (false, testtype);
5398 if (lhs == 0)
5399 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5400 : build2 (code, type, arg, true_value));
5401 if (rhs == 0)
5402 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5403 : build2 (code, type, arg, false_value));
5405 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5406 return fold_convert (type, test);
5410 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5412 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5413 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5414 ADDEND is the same as X.
5416 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5417 and finite. The problematic cases are when X is zero, and its mode
5418 has signed zeros. In the case of rounding towards -infinity,
5419 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5420 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5422 static bool
5423 fold_real_zero_addition_p (tree type, tree addend, int negate)
5425 if (!real_zerop (addend))
5426 return false;
5428 /* Don't allow the fold with -fsignaling-nans. */
5429 if (HONOR_SNANS (TYPE_MODE (type)))
5430 return false;
5432 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5433 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5434 return true;
5436 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5437 if (TREE_CODE (addend) == REAL_CST
5438 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5439 negate = !negate;
5441 /* The mode has signed zeros, and we have to honor their sign.
5442 In this situation, there is only one case we can return true for.
5443 X - 0 is the same as X unless rounding towards -infinity is
5444 supported. */
5445 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5448 /* Subroutine of fold() that checks comparisons of built-in math
5449 functions against real constants.
5451 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5452 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5453 is the type of the result and ARG0 and ARG1 are the operands of the
5454 comparison. ARG1 must be a TREE_REAL_CST.
5456 The function returns the constant folded tree if a simplification
5457 can be made, and NULL_TREE otherwise. */
5459 static tree
5460 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5461 tree type, tree arg0, tree arg1)
5463 REAL_VALUE_TYPE c;
5465 if (BUILTIN_SQRT_P (fcode))
5467 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5468 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5470 c = TREE_REAL_CST (arg1);
5471 if (REAL_VALUE_NEGATIVE (c))
5473 /* sqrt(x) < y is always false, if y is negative. */
5474 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5475 return omit_one_operand (type, integer_zero_node, arg);
5477 /* sqrt(x) > y is always true, if y is negative and we
5478 don't care about NaNs, i.e. negative values of x. */
5479 if (code == NE_EXPR || !HONOR_NANS (mode))
5480 return omit_one_operand (type, integer_one_node, arg);
5482 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5483 return fold (build2 (GE_EXPR, type, arg,
5484 build_real (TREE_TYPE (arg), dconst0)));
5486 else if (code == GT_EXPR || code == GE_EXPR)
5488 REAL_VALUE_TYPE c2;
5490 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5491 real_convert (&c2, mode, &c2);
5493 if (REAL_VALUE_ISINF (c2))
5495 /* sqrt(x) > y is x == +Inf, when y is very large. */
5496 if (HONOR_INFINITIES (mode))
5497 return fold (build2 (EQ_EXPR, type, arg,
5498 build_real (TREE_TYPE (arg), c2)));
5500 /* sqrt(x) > y is always false, when y is very large
5501 and we don't care about infinities. */
5502 return omit_one_operand (type, integer_zero_node, arg);
5505 /* sqrt(x) > c is the same as x > c*c. */
5506 return fold (build2 (code, type, arg,
5507 build_real (TREE_TYPE (arg), c2)));
5509 else if (code == LT_EXPR || code == LE_EXPR)
5511 REAL_VALUE_TYPE c2;
5513 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5514 real_convert (&c2, mode, &c2);
5516 if (REAL_VALUE_ISINF (c2))
5518 /* sqrt(x) < y is always true, when y is a very large
5519 value and we don't care about NaNs or Infinities. */
5520 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5521 return omit_one_operand (type, integer_one_node, arg);
5523 /* sqrt(x) < y is x != +Inf when y is very large and we
5524 don't care about NaNs. */
5525 if (! HONOR_NANS (mode))
5526 return fold (build2 (NE_EXPR, type, arg,
5527 build_real (TREE_TYPE (arg), c2)));
5529 /* sqrt(x) < y is x >= 0 when y is very large and we
5530 don't care about Infinities. */
5531 if (! HONOR_INFINITIES (mode))
5532 return fold (build2 (GE_EXPR, type, arg,
5533 build_real (TREE_TYPE (arg), dconst0)));
5535 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5536 if (lang_hooks.decls.global_bindings_p () != 0
5537 || CONTAINS_PLACEHOLDER_P (arg))
5538 return NULL_TREE;
5540 arg = save_expr (arg);
5541 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5542 fold (build2 (GE_EXPR, type, arg,
5543 build_real (TREE_TYPE (arg),
5544 dconst0))),
5545 fold (build2 (NE_EXPR, type, arg,
5546 build_real (TREE_TYPE (arg),
5547 c2)))));
5550 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5551 if (! HONOR_NANS (mode))
5552 return fold (build2 (code, type, arg,
5553 build_real (TREE_TYPE (arg), c2)));
5555 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5556 if (lang_hooks.decls.global_bindings_p () == 0
5557 && ! CONTAINS_PLACEHOLDER_P (arg))
5559 arg = save_expr (arg);
5560 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5561 fold (build2 (GE_EXPR, type, arg,
5562 build_real (TREE_TYPE (arg),
5563 dconst0))),
5564 fold (build2 (code, type, arg,
5565 build_real (TREE_TYPE (arg),
5566 c2)))));
5571 return NULL_TREE;
5574 /* Subroutine of fold() that optimizes comparisons against Infinities,
5575 either +Inf or -Inf.
5577 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5578 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5579 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5581 The function returns the constant folded tree if a simplification
5582 can be made, and NULL_TREE otherwise. */
5584 static tree
5585 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5587 enum machine_mode mode;
5588 REAL_VALUE_TYPE max;
5589 tree temp;
5590 bool neg;
5592 mode = TYPE_MODE (TREE_TYPE (arg0));
5594 /* For negative infinity swap the sense of the comparison. */
5595 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5596 if (neg)
5597 code = swap_tree_comparison (code);
5599 switch (code)
5601 case GT_EXPR:
5602 /* x > +Inf is always false, if with ignore sNANs. */
5603 if (HONOR_SNANS (mode))
5604 return NULL_TREE;
5605 return omit_one_operand (type, integer_zero_node, arg0);
5607 case LE_EXPR:
5608 /* x <= +Inf is always true, if we don't case about NaNs. */
5609 if (! HONOR_NANS (mode))
5610 return omit_one_operand (type, integer_one_node, arg0);
5612 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5613 if (lang_hooks.decls.global_bindings_p () == 0
5614 && ! CONTAINS_PLACEHOLDER_P (arg0))
5616 arg0 = save_expr (arg0);
5617 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5619 break;
5621 case EQ_EXPR:
5622 case GE_EXPR:
5623 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5624 real_maxval (&max, neg, mode);
5625 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5626 arg0, build_real (TREE_TYPE (arg0), max)));
5628 case LT_EXPR:
5629 /* x < +Inf is always equal to x <= DBL_MAX. */
5630 real_maxval (&max, neg, mode);
5631 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5632 arg0, build_real (TREE_TYPE (arg0), max)));
5634 case NE_EXPR:
5635 /* x != +Inf is always equal to !(x > DBL_MAX). */
5636 real_maxval (&max, neg, mode);
5637 if (! HONOR_NANS (mode))
5638 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5639 arg0, build_real (TREE_TYPE (arg0), max)));
5641 /* The transformation below creates non-gimple code and thus is
5642 not appropriate if we are in gimple form. */
5643 if (in_gimple_form)
5644 return NULL_TREE;
5646 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5647 arg0, build_real (TREE_TYPE (arg0), max)));
5648 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5650 default:
5651 break;
5654 return NULL_TREE;
5657 /* Subroutine of fold() that optimizes comparisons of a division by
5658 a nonzero integer constant against an integer constant, i.e.
5659 X/C1 op C2.
5661 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5662 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5663 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5665 The function returns the constant folded tree if a simplification
5666 can be made, and NULL_TREE otherwise. */
5668 static tree
5669 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5671 tree prod, tmp, hi, lo;
5672 tree arg00 = TREE_OPERAND (arg0, 0);
5673 tree arg01 = TREE_OPERAND (arg0, 1);
5674 unsigned HOST_WIDE_INT lpart;
5675 HOST_WIDE_INT hpart;
5676 int overflow;
5678 /* We have to do this the hard way to detect unsigned overflow.
5679 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5680 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5681 TREE_INT_CST_HIGH (arg01),
5682 TREE_INT_CST_LOW (arg1),
5683 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5684 prod = build_int_cst (TREE_TYPE (arg00), lpart, hpart);
5685 prod = force_fit_type (prod, -1, overflow, false);
5687 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5689 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5690 lo = prod;
5692 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5693 overflow = add_double (TREE_INT_CST_LOW (prod),
5694 TREE_INT_CST_HIGH (prod),
5695 TREE_INT_CST_LOW (tmp),
5696 TREE_INT_CST_HIGH (tmp),
5697 &lpart, &hpart);
5698 hi = build_int_cst (TREE_TYPE (arg00), lpart, hpart);
5699 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5700 TREE_CONSTANT_OVERFLOW (prod));
5702 else if (tree_int_cst_sgn (arg01) >= 0)
5704 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5705 switch (tree_int_cst_sgn (arg1))
5707 case -1:
5708 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5709 hi = prod;
5710 break;
5712 case 0:
5713 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5714 hi = tmp;
5715 break;
5717 case 1:
5718 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5719 lo = prod;
5720 break;
5722 default:
5723 abort ();
5726 else
5728 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5729 switch (tree_int_cst_sgn (arg1))
5731 case -1:
5732 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5733 lo = prod;
5734 break;
5736 case 0:
5737 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5738 lo = tmp;
5739 break;
5741 case 1:
5742 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5743 hi = prod;
5744 break;
5746 default:
5747 abort ();
5751 switch (code)
5753 case EQ_EXPR:
5754 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5755 return omit_one_operand (type, integer_zero_node, arg00);
5756 if (TREE_OVERFLOW (hi))
5757 return fold (build2 (GE_EXPR, type, arg00, lo));
5758 if (TREE_OVERFLOW (lo))
5759 return fold (build2 (LE_EXPR, type, arg00, hi));
5760 return build_range_check (type, arg00, 1, lo, hi);
5762 case NE_EXPR:
5763 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5764 return omit_one_operand (type, integer_one_node, arg00);
5765 if (TREE_OVERFLOW (hi))
5766 return fold (build2 (LT_EXPR, type, arg00, lo));
5767 if (TREE_OVERFLOW (lo))
5768 return fold (build2 (GT_EXPR, type, arg00, hi));
5769 return build_range_check (type, arg00, 0, lo, hi);
5771 case LT_EXPR:
5772 if (TREE_OVERFLOW (lo))
5773 return omit_one_operand (type, integer_zero_node, arg00);
5774 return fold (build2 (LT_EXPR, type, arg00, lo));
5776 case LE_EXPR:
5777 if (TREE_OVERFLOW (hi))
5778 return omit_one_operand (type, integer_one_node, arg00);
5779 return fold (build2 (LE_EXPR, type, arg00, hi));
5781 case GT_EXPR:
5782 if (TREE_OVERFLOW (hi))
5783 return omit_one_operand (type, integer_zero_node, arg00);
5784 return fold (build2 (GT_EXPR, type, arg00, hi));
5786 case GE_EXPR:
5787 if (TREE_OVERFLOW (lo))
5788 return omit_one_operand (type, integer_one_node, arg00);
5789 return fold (build2 (GE_EXPR, type, arg00, lo));
5791 default:
5792 break;
5795 return NULL_TREE;
5799 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5800 equality/inequality test, then return a simplified form of
5801 the test using shifts and logical operations. Otherwise return
5802 NULL. TYPE is the desired result type. */
5804 tree
5805 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5806 tree result_type)
5808 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5809 operand 0. */
5810 if (code == TRUTH_NOT_EXPR)
5812 code = TREE_CODE (arg0);
5813 if (code != NE_EXPR && code != EQ_EXPR)
5814 return NULL_TREE;
5816 /* Extract the arguments of the EQ/NE. */
5817 arg1 = TREE_OPERAND (arg0, 1);
5818 arg0 = TREE_OPERAND (arg0, 0);
5820 /* This requires us to invert the code. */
5821 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5824 /* If this is testing a single bit, we can optimize the test. */
5825 if ((code == NE_EXPR || code == EQ_EXPR)
5826 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5827 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5829 tree inner = TREE_OPERAND (arg0, 0);
5830 tree type = TREE_TYPE (arg0);
5831 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5832 enum machine_mode operand_mode = TYPE_MODE (type);
5833 int ops_unsigned;
5834 tree signed_type, unsigned_type, intermediate_type;
5835 tree arg00;
5837 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5838 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5839 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5840 if (arg00 != NULL_TREE
5841 /* This is only a win if casting to a signed type is cheap,
5842 i.e. when arg00's type is not a partial mode. */
5843 && TYPE_PRECISION (TREE_TYPE (arg00))
5844 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5846 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5847 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5848 result_type, fold_convert (stype, arg00),
5849 fold_convert (stype, integer_zero_node)));
5852 /* Otherwise we have (A & C) != 0 where C is a single bit,
5853 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5854 Similarly for (A & C) == 0. */
5856 /* If INNER is a right shift of a constant and it plus BITNUM does
5857 not overflow, adjust BITNUM and INNER. */
5858 if (TREE_CODE (inner) == RSHIFT_EXPR
5859 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5860 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5861 && bitnum < TYPE_PRECISION (type)
5862 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5863 bitnum - TYPE_PRECISION (type)))
5865 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5866 inner = TREE_OPERAND (inner, 0);
5869 /* If we are going to be able to omit the AND below, we must do our
5870 operations as unsigned. If we must use the AND, we have a choice.
5871 Normally unsigned is faster, but for some machines signed is. */
5872 #ifdef LOAD_EXTEND_OP
5873 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5874 #else
5875 ops_unsigned = 1;
5876 #endif
5878 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5879 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5880 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5881 inner = fold_convert (intermediate_type, inner);
5883 if (bitnum != 0)
5884 inner = build2 (RSHIFT_EXPR, intermediate_type,
5885 inner, size_int (bitnum));
5887 if (code == EQ_EXPR)
5888 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5889 inner, integer_one_node));
5891 /* Put the AND last so it can combine with more things. */
5892 inner = build2 (BIT_AND_EXPR, intermediate_type,
5893 inner, integer_one_node);
5895 /* Make sure to return the proper type. */
5896 inner = fold_convert (result_type, inner);
5898 return inner;
5900 return NULL_TREE;
5903 /* Check whether we are allowed to reorder operands arg0 and arg1,
5904 such that the evaluation of arg1 occurs before arg0. */
5906 static bool
5907 reorder_operands_p (tree arg0, tree arg1)
5909 if (! flag_evaluation_order)
5910 return true;
5911 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5912 return true;
5913 return ! TREE_SIDE_EFFECTS (arg0)
5914 && ! TREE_SIDE_EFFECTS (arg1);
5917 /* Test whether it is preferable two swap two operands, ARG0 and
5918 ARG1, for example because ARG0 is an integer constant and ARG1
5919 isn't. If REORDER is true, only recommend swapping if we can
5920 evaluate the operands in reverse order. */
5922 bool
5923 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5925 STRIP_SIGN_NOPS (arg0);
5926 STRIP_SIGN_NOPS (arg1);
5928 if (TREE_CODE (arg1) == INTEGER_CST)
5929 return 0;
5930 if (TREE_CODE (arg0) == INTEGER_CST)
5931 return 1;
5933 if (TREE_CODE (arg1) == REAL_CST)
5934 return 0;
5935 if (TREE_CODE (arg0) == REAL_CST)
5936 return 1;
5938 if (TREE_CODE (arg1) == COMPLEX_CST)
5939 return 0;
5940 if (TREE_CODE (arg0) == COMPLEX_CST)
5941 return 1;
5943 if (TREE_CONSTANT (arg1))
5944 return 0;
5945 if (TREE_CONSTANT (arg0))
5946 return 1;
5948 if (optimize_size)
5949 return 0;
5951 if (reorder && flag_evaluation_order
5952 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5953 return 0;
5955 if (DECL_P (arg1))
5956 return 0;
5957 if (DECL_P (arg0))
5958 return 1;
5960 if (reorder && flag_evaluation_order
5961 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5962 return 0;
5964 if (DECL_P (arg1))
5965 return 0;
5966 if (DECL_P (arg0))
5967 return 1;
5969 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5970 for commutative and comparison operators. Ensuring a canonical
5971 form allows the optimizers to find additional redundancies without
5972 having to explicitly check for both orderings. */
5973 if (TREE_CODE (arg0) == SSA_NAME
5974 && TREE_CODE (arg1) == SSA_NAME
5975 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5976 return 1;
5978 return 0;
5981 /* Perform constant folding and related simplification of EXPR.
5982 The related simplifications include x*1 => x, x*0 => 0, etc.,
5983 and application of the associative law.
5984 NOP_EXPR conversions may be removed freely (as long as we
5985 are careful not to change the type of the overall expression).
5986 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5987 but we can constant-fold them if they have constant operands. */
5989 #ifdef ENABLE_FOLD_CHECKING
5990 # define fold(x) fold_1 (x)
5991 static tree fold_1 (tree);
5992 static
5993 #endif
5994 tree
5995 fold (tree expr)
5997 const tree t = expr;
5998 const tree type = TREE_TYPE (expr);
5999 tree t1 = NULL_TREE;
6000 tree tem;
6001 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
6002 enum tree_code code = TREE_CODE (t);
6003 int kind = TREE_CODE_CLASS (code);
6005 /* WINS will be nonzero when the switch is done
6006 if all operands are constant. */
6007 int wins = 1;
6009 /* Return right away if a constant. */
6010 if (kind == 'c')
6011 return t;
6013 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6015 tree subop;
6017 /* Special case for conversion ops that can have fixed point args. */
6018 arg0 = TREE_OPERAND (t, 0);
6020 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6021 if (arg0 != 0)
6022 STRIP_SIGN_NOPS (arg0);
6024 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
6025 subop = TREE_REALPART (arg0);
6026 else
6027 subop = arg0;
6029 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
6030 && TREE_CODE (subop) != REAL_CST)
6031 /* Note that TREE_CONSTANT isn't enough:
6032 static var addresses are constant but we can't
6033 do arithmetic on them. */
6034 wins = 0;
6036 else if (IS_EXPR_CODE_CLASS (kind))
6038 int len = first_rtl_op (code);
6039 int i;
6040 for (i = 0; i < len; i++)
6042 tree op = TREE_OPERAND (t, i);
6043 tree subop;
6045 if (op == 0)
6046 continue; /* Valid for CALL_EXPR, at least. */
6048 /* Strip any conversions that don't change the mode. This is
6049 safe for every expression, except for a comparison expression
6050 because its signedness is derived from its operands. So, in
6051 the latter case, only strip conversions that don't change the
6052 signedness.
6054 Note that this is done as an internal manipulation within the
6055 constant folder, in order to find the simplest representation
6056 of the arguments so that their form can be studied. In any
6057 cases, the appropriate type conversions should be put back in
6058 the tree that will get out of the constant folder. */
6059 if (kind == '<')
6060 STRIP_SIGN_NOPS (op);
6061 else
6062 STRIP_NOPS (op);
6064 if (TREE_CODE (op) == COMPLEX_CST)
6065 subop = TREE_REALPART (op);
6066 else
6067 subop = op;
6069 if (TREE_CODE (subop) != INTEGER_CST
6070 && TREE_CODE (subop) != REAL_CST)
6071 /* Note that TREE_CONSTANT isn't enough:
6072 static var addresses are constant but we can't
6073 do arithmetic on them. */
6074 wins = 0;
6076 if (i == 0)
6077 arg0 = op;
6078 else if (i == 1)
6079 arg1 = op;
6083 /* If this is a commutative operation, and ARG0 is a constant, move it
6084 to ARG1 to reduce the number of tests below. */
6085 if (commutative_tree_code (code)
6086 && tree_swap_operands_p (arg0, arg1, true))
6087 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6088 TREE_OPERAND (t, 0)));
6090 /* Now WINS is set as described above,
6091 ARG0 is the first operand of EXPR,
6092 and ARG1 is the second operand (if it has more than one operand).
6094 First check for cases where an arithmetic operation is applied to a
6095 compound, conditional, or comparison operation. Push the arithmetic
6096 operation inside the compound or conditional to see if any folding
6097 can then be done. Convert comparison to conditional for this purpose.
6098 The also optimizes non-constant cases that used to be done in
6099 expand_expr.
6101 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6102 one of the operands is a comparison and the other is a comparison, a
6103 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6104 code below would make the expression more complex. Change it to a
6105 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6106 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6108 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6109 || code == EQ_EXPR || code == NE_EXPR)
6110 && ((truth_value_p (TREE_CODE (arg0))
6111 && (truth_value_p (TREE_CODE (arg1))
6112 || (TREE_CODE (arg1) == BIT_AND_EXPR
6113 && integer_onep (TREE_OPERAND (arg1, 1)))))
6114 || (truth_value_p (TREE_CODE (arg1))
6115 && (truth_value_p (TREE_CODE (arg0))
6116 || (TREE_CODE (arg0) == BIT_AND_EXPR
6117 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6119 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6120 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6121 : TRUTH_XOR_EXPR,
6122 type, fold_convert (boolean_type_node, arg0),
6123 fold_convert (boolean_type_node, arg1)));
6125 if (code == EQ_EXPR)
6126 tem = invert_truthvalue (tem);
6128 return tem;
6131 if (TREE_CODE_CLASS (code) == '1')
6133 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6134 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6135 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6136 else if (TREE_CODE (arg0) == COND_EXPR)
6138 tree arg01 = TREE_OPERAND (arg0, 1);
6139 tree arg02 = TREE_OPERAND (arg0, 2);
6140 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6141 arg01 = fold (build1 (code, type, arg01));
6142 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6143 arg02 = fold (build1 (code, type, arg02));
6144 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6145 arg01, arg02));
6147 /* If this was a conversion, and all we did was to move into
6148 inside the COND_EXPR, bring it back out. But leave it if
6149 it is a conversion from integer to integer and the
6150 result precision is no wider than a word since such a
6151 conversion is cheap and may be optimized away by combine,
6152 while it couldn't if it were outside the COND_EXPR. Then return
6153 so we don't get into an infinite recursion loop taking the
6154 conversion out and then back in. */
6156 if ((code == NOP_EXPR || code == CONVERT_EXPR
6157 || code == NON_LVALUE_EXPR)
6158 && TREE_CODE (tem) == COND_EXPR
6159 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6160 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6161 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6162 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6163 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6164 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6165 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6166 && (INTEGRAL_TYPE_P
6167 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6168 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
6169 tem = build1 (code, type,
6170 build3 (COND_EXPR,
6171 TREE_TYPE (TREE_OPERAND
6172 (TREE_OPERAND (tem, 1), 0)),
6173 TREE_OPERAND (tem, 0),
6174 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6175 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6176 return tem;
6178 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6180 if (TREE_CODE (type) == BOOLEAN_TYPE)
6182 arg0 = copy_node (arg0);
6183 TREE_TYPE (arg0) = type;
6184 return arg0;
6186 else if (TREE_CODE (type) != INTEGER_TYPE)
6187 return fold (build3 (COND_EXPR, type, arg0,
6188 fold (build1 (code, type,
6189 integer_one_node)),
6190 fold (build1 (code, type,
6191 integer_zero_node))));
6194 else if (TREE_CODE_CLASS (code) == '<'
6195 && TREE_CODE (arg0) == COMPOUND_EXPR)
6196 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6197 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6198 else if (TREE_CODE_CLASS (code) == '<'
6199 && TREE_CODE (arg1) == COMPOUND_EXPR)
6200 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6201 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6202 else if (TREE_CODE_CLASS (code) == '2'
6203 || TREE_CODE_CLASS (code) == '<')
6205 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6206 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6207 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6208 arg1)));
6209 if (TREE_CODE (arg1) == COMPOUND_EXPR
6210 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6211 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6212 fold (build2 (code, type,
6213 arg0, TREE_OPERAND (arg1, 1))));
6215 if (TREE_CODE (arg0) == COND_EXPR
6216 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6218 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
6219 /*cond_first_p=*/1);
6220 if (tem != NULL_TREE)
6221 return tem;
6224 if (TREE_CODE (arg1) == COND_EXPR
6225 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
6227 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
6228 /*cond_first_p=*/0);
6229 if (tem != NULL_TREE)
6230 return tem;
6234 switch (code)
6236 case CONST_DECL:
6237 return fold (DECL_INITIAL (t));
6239 case NOP_EXPR:
6240 case FLOAT_EXPR:
6241 case CONVERT_EXPR:
6242 case FIX_TRUNC_EXPR:
6243 case FIX_CEIL_EXPR:
6244 case FIX_FLOOR_EXPR:
6245 case FIX_ROUND_EXPR:
6246 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6247 return TREE_OPERAND (t, 0);
6249 /* Handle cases of two conversions in a row. */
6250 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6251 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6253 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6254 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6255 int inside_int = INTEGRAL_TYPE_P (inside_type);
6256 int inside_ptr = POINTER_TYPE_P (inside_type);
6257 int inside_float = FLOAT_TYPE_P (inside_type);
6258 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6259 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6260 int inter_int = INTEGRAL_TYPE_P (inter_type);
6261 int inter_ptr = POINTER_TYPE_P (inter_type);
6262 int inter_float = FLOAT_TYPE_P (inter_type);
6263 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6264 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6265 int final_int = INTEGRAL_TYPE_P (type);
6266 int final_ptr = POINTER_TYPE_P (type);
6267 int final_float = FLOAT_TYPE_P (type);
6268 unsigned int final_prec = TYPE_PRECISION (type);
6269 int final_unsignedp = TYPE_UNSIGNED (type);
6271 /* In addition to the cases of two conversions in a row
6272 handled below, if we are converting something to its own
6273 type via an object of identical or wider precision, neither
6274 conversion is needed. */
6275 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6276 && ((inter_int && final_int) || (inter_float && final_float))
6277 && inter_prec >= final_prec)
6278 return fold (build1 (code, type,
6279 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6281 /* Likewise, if the intermediate and final types are either both
6282 float or both integer, we don't need the middle conversion if
6283 it is wider than the final type and doesn't change the signedness
6284 (for integers). Avoid this if the final type is a pointer
6285 since then we sometimes need the inner conversion. Likewise if
6286 the outer has a precision not equal to the size of its mode. */
6287 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6288 || (inter_float && inside_float))
6289 && inter_prec >= inside_prec
6290 && (inter_float || inter_unsignedp == inside_unsignedp)
6291 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6292 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6293 && ! final_ptr)
6294 return fold (build1 (code, type,
6295 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6297 /* If we have a sign-extension of a zero-extended value, we can
6298 replace that by a single zero-extension. */
6299 if (inside_int && inter_int && final_int
6300 && inside_prec < inter_prec && inter_prec < final_prec
6301 && inside_unsignedp && !inter_unsignedp)
6302 return fold (build1 (code, type,
6303 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6305 /* Two conversions in a row are not needed unless:
6306 - some conversion is floating-point (overstrict for now), or
6307 - the intermediate type is narrower than both initial and
6308 final, or
6309 - the intermediate type and innermost type differ in signedness,
6310 and the outermost type is wider than the intermediate, or
6311 - the initial type is a pointer type and the precisions of the
6312 intermediate and final types differ, or
6313 - the final type is a pointer type and the precisions of the
6314 initial and intermediate types differ. */
6315 if (! inside_float && ! inter_float && ! final_float
6316 && (inter_prec > inside_prec || inter_prec > final_prec)
6317 && ! (inside_int && inter_int
6318 && inter_unsignedp != inside_unsignedp
6319 && inter_prec < final_prec)
6320 && ((inter_unsignedp && inter_prec > inside_prec)
6321 == (final_unsignedp && final_prec > inter_prec))
6322 && ! (inside_ptr && inter_prec != final_prec)
6323 && ! (final_ptr && inside_prec != inter_prec)
6324 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6325 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6326 && ! final_ptr)
6327 return fold (build1 (code, type,
6328 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6331 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6332 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6333 /* Detect assigning a bitfield. */
6334 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6335 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6337 /* Don't leave an assignment inside a conversion
6338 unless assigning a bitfield. */
6339 tree prev = TREE_OPERAND (t, 0);
6340 tem = copy_node (t);
6341 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6342 /* First do the assignment, then return converted constant. */
6343 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6344 TREE_NO_WARNING (tem) = 1;
6345 TREE_USED (tem) = 1;
6346 return tem;
6349 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6350 constants (if x has signed type, the sign bit cannot be set
6351 in c). This folds extension into the BIT_AND_EXPR. */
6352 if (INTEGRAL_TYPE_P (type)
6353 && TREE_CODE (type) != BOOLEAN_TYPE
6354 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6355 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6357 tree and = TREE_OPERAND (t, 0);
6358 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6359 int change = 0;
6361 if (TYPE_UNSIGNED (TREE_TYPE (and))
6362 || (TYPE_PRECISION (type)
6363 <= TYPE_PRECISION (TREE_TYPE (and))))
6364 change = 1;
6365 else if (TYPE_PRECISION (TREE_TYPE (and1))
6366 <= HOST_BITS_PER_WIDE_INT
6367 && host_integerp (and1, 1))
6369 unsigned HOST_WIDE_INT cst;
6371 cst = tree_low_cst (and1, 1);
6372 cst &= (HOST_WIDE_INT) -1
6373 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6374 change = (cst == 0);
6375 #ifdef LOAD_EXTEND_OP
6376 if (change
6377 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6378 == ZERO_EXTEND))
6380 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6381 and0 = fold_convert (uns, and0);
6382 and1 = fold_convert (uns, and1);
6384 #endif
6386 if (change)
6387 return fold (build2 (BIT_AND_EXPR, type,
6388 fold_convert (type, and0),
6389 fold_convert (type, and1)));
6392 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6393 T2 being pointers to types of the same size. */
6394 if (POINTER_TYPE_P (TREE_TYPE (t))
6395 && TREE_CODE_CLASS (TREE_CODE (arg0)) == '2'
6396 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6397 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6399 tree arg00 = TREE_OPERAND (arg0, 0);
6400 tree t0 = TREE_TYPE (t);
6401 tree t1 = TREE_TYPE (arg00);
6402 tree tt0 = TREE_TYPE (t0);
6403 tree tt1 = TREE_TYPE (t1);
6404 tree s0 = TYPE_SIZE (tt0);
6405 tree s1 = TYPE_SIZE (tt1);
6407 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6408 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6409 TREE_OPERAND (arg0, 1));
6412 tem = fold_convert_const (code, type, arg0);
6413 return tem ? tem : t;
6415 case VIEW_CONVERT_EXPR:
6416 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6417 return build1 (VIEW_CONVERT_EXPR, type,
6418 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6419 return t;
6421 case COMPONENT_REF:
6422 if (TREE_CODE (arg0) == CONSTRUCTOR
6423 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6425 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6426 if (m)
6427 return TREE_VALUE (m);
6429 return t;
6431 case RANGE_EXPR:
6432 if (TREE_CONSTANT (t) != wins)
6434 tem = copy_node (t);
6435 TREE_CONSTANT (tem) = wins;
6436 TREE_INVARIANT (tem) = wins;
6437 return tem;
6439 return t;
6441 case NEGATE_EXPR:
6442 if (negate_expr_p (arg0))
6443 return fold_convert (type, negate_expr (arg0));
6444 return t;
6446 case ABS_EXPR:
6447 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6448 return fold_abs_const (arg0, type);
6449 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6450 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6451 /* Convert fabs((double)float) into (double)fabsf(float). */
6452 else if (TREE_CODE (arg0) == NOP_EXPR
6453 && TREE_CODE (type) == REAL_TYPE)
6455 tree targ0 = strip_float_extensions (arg0);
6456 if (targ0 != arg0)
6457 return fold_convert (type, fold (build1 (ABS_EXPR,
6458 TREE_TYPE (targ0),
6459 targ0)));
6461 else if (tree_expr_nonnegative_p (arg0))
6462 return arg0;
6463 return t;
6465 case CONJ_EXPR:
6466 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6467 return fold_convert (type, arg0);
6468 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6469 return build2 (COMPLEX_EXPR, type,
6470 TREE_OPERAND (arg0, 0),
6471 negate_expr (TREE_OPERAND (arg0, 1)));
6472 else if (TREE_CODE (arg0) == COMPLEX_CST)
6473 return build_complex (type, TREE_REALPART (arg0),
6474 negate_expr (TREE_IMAGPART (arg0)));
6475 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6476 return fold (build2 (TREE_CODE (arg0), type,
6477 fold (build1 (CONJ_EXPR, type,
6478 TREE_OPERAND (arg0, 0))),
6479 fold (build1 (CONJ_EXPR, type,
6480 TREE_OPERAND (arg0, 1)))));
6481 else if (TREE_CODE (arg0) == CONJ_EXPR)
6482 return TREE_OPERAND (arg0, 0);
6483 return t;
6485 case BIT_NOT_EXPR:
6486 if (TREE_CODE (arg0) == INTEGER_CST)
6487 return fold_not_const (arg0, type);
6488 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6489 return TREE_OPERAND (arg0, 0);
6490 return t;
6492 case PLUS_EXPR:
6493 /* A + (-B) -> A - B */
6494 if (TREE_CODE (arg1) == NEGATE_EXPR)
6495 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6496 /* (-A) + B -> B - A */
6497 if (TREE_CODE (arg0) == NEGATE_EXPR
6498 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6499 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6500 if (! FLOAT_TYPE_P (type))
6502 if (integer_zerop (arg1))
6503 return non_lvalue (fold_convert (type, arg0));
6505 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6506 with a constant, and the two constants have no bits in common,
6507 we should treat this as a BIT_IOR_EXPR since this may produce more
6508 simplifications. */
6509 if (TREE_CODE (arg0) == BIT_AND_EXPR
6510 && TREE_CODE (arg1) == BIT_AND_EXPR
6511 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6512 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6513 && integer_zerop (const_binop (BIT_AND_EXPR,
6514 TREE_OPERAND (arg0, 1),
6515 TREE_OPERAND (arg1, 1), 0)))
6517 code = BIT_IOR_EXPR;
6518 goto bit_ior;
6521 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6522 (plus (plus (mult) (mult)) (foo)) so that we can
6523 take advantage of the factoring cases below. */
6524 if ((TREE_CODE (arg0) == PLUS_EXPR
6525 && TREE_CODE (arg1) == MULT_EXPR)
6526 || (TREE_CODE (arg1) == PLUS_EXPR
6527 && TREE_CODE (arg0) == MULT_EXPR))
6529 tree parg0, parg1, parg, marg;
6531 if (TREE_CODE (arg0) == PLUS_EXPR)
6532 parg = arg0, marg = arg1;
6533 else
6534 parg = arg1, marg = arg0;
6535 parg0 = TREE_OPERAND (parg, 0);
6536 parg1 = TREE_OPERAND (parg, 1);
6537 STRIP_NOPS (parg0);
6538 STRIP_NOPS (parg1);
6540 if (TREE_CODE (parg0) == MULT_EXPR
6541 && TREE_CODE (parg1) != MULT_EXPR)
6542 return fold (build2 (PLUS_EXPR, type,
6543 fold (build2 (PLUS_EXPR, type,
6544 fold_convert (type, parg0),
6545 fold_convert (type, marg))),
6546 fold_convert (type, parg1)));
6547 if (TREE_CODE (parg0) != MULT_EXPR
6548 && TREE_CODE (parg1) == MULT_EXPR)
6549 return fold (build2 (PLUS_EXPR, type,
6550 fold (build2 (PLUS_EXPR, type,
6551 fold_convert (type, parg1),
6552 fold_convert (type, marg))),
6553 fold_convert (type, parg0)));
6556 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6558 tree arg00, arg01, arg10, arg11;
6559 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6561 /* (A * C) + (B * C) -> (A+B) * C.
6562 We are most concerned about the case where C is a constant,
6563 but other combinations show up during loop reduction. Since
6564 it is not difficult, try all four possibilities. */
6566 arg00 = TREE_OPERAND (arg0, 0);
6567 arg01 = TREE_OPERAND (arg0, 1);
6568 arg10 = TREE_OPERAND (arg1, 0);
6569 arg11 = TREE_OPERAND (arg1, 1);
6570 same = NULL_TREE;
6572 if (operand_equal_p (arg01, arg11, 0))
6573 same = arg01, alt0 = arg00, alt1 = arg10;
6574 else if (operand_equal_p (arg00, arg10, 0))
6575 same = arg00, alt0 = arg01, alt1 = arg11;
6576 else if (operand_equal_p (arg00, arg11, 0))
6577 same = arg00, alt0 = arg01, alt1 = arg10;
6578 else if (operand_equal_p (arg01, arg10, 0))
6579 same = arg01, alt0 = arg00, alt1 = arg11;
6581 /* No identical multiplicands; see if we can find a common
6582 power-of-two factor in non-power-of-two multiplies. This
6583 can help in multi-dimensional array access. */
6584 else if (TREE_CODE (arg01) == INTEGER_CST
6585 && TREE_CODE (arg11) == INTEGER_CST
6586 && TREE_INT_CST_HIGH (arg01) == 0
6587 && TREE_INT_CST_HIGH (arg11) == 0)
6589 HOST_WIDE_INT int01, int11, tmp;
6590 int01 = TREE_INT_CST_LOW (arg01);
6591 int11 = TREE_INT_CST_LOW (arg11);
6593 /* Move min of absolute values to int11. */
6594 if ((int01 >= 0 ? int01 : -int01)
6595 < (int11 >= 0 ? int11 : -int11))
6597 tmp = int01, int01 = int11, int11 = tmp;
6598 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6599 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6602 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6604 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6605 build_int_cst (NULL_TREE,
6606 int01 / int11, 0)));
6607 alt1 = arg10;
6608 same = arg11;
6612 if (same)
6613 return fold (build2 (MULT_EXPR, type,
6614 fold (build2 (PLUS_EXPR, type,
6615 alt0, alt1)),
6616 same));
6619 else
6621 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6622 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6623 return non_lvalue (fold_convert (type, arg0));
6625 /* Likewise if the operands are reversed. */
6626 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6627 return non_lvalue (fold_convert (type, arg1));
6629 /* Convert X + -C into X - C. */
6630 if (TREE_CODE (arg1) == REAL_CST
6631 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
6633 tem = fold_negate_const (arg1, type);
6634 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
6635 return fold (build2 (MINUS_EXPR, type,
6636 fold_convert (type, arg0),
6637 fold_convert (type, tem)));
6640 /* Convert x+x into x*2.0. */
6641 if (operand_equal_p (arg0, arg1, 0)
6642 && SCALAR_FLOAT_TYPE_P (type))
6643 return fold (build2 (MULT_EXPR, type, arg0,
6644 build_real (type, dconst2)));
6646 /* Convert x*c+x into x*(c+1). */
6647 if (flag_unsafe_math_optimizations
6648 && TREE_CODE (arg0) == MULT_EXPR
6649 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6650 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6651 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6653 REAL_VALUE_TYPE c;
6655 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6656 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6657 return fold (build2 (MULT_EXPR, type, arg1,
6658 build_real (type, c)));
6661 /* Convert x+x*c into x*(c+1). */
6662 if (flag_unsafe_math_optimizations
6663 && TREE_CODE (arg1) == MULT_EXPR
6664 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6665 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6666 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6668 REAL_VALUE_TYPE c;
6670 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6671 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6672 return fold (build2 (MULT_EXPR, type, arg0,
6673 build_real (type, c)));
6676 /* Convert x*c1+x*c2 into x*(c1+c2). */
6677 if (flag_unsafe_math_optimizations
6678 && TREE_CODE (arg0) == MULT_EXPR
6679 && TREE_CODE (arg1) == MULT_EXPR
6680 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6681 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6682 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6683 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6684 && operand_equal_p (TREE_OPERAND (arg0, 0),
6685 TREE_OPERAND (arg1, 0), 0))
6687 REAL_VALUE_TYPE c1, c2;
6689 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6690 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6691 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6692 return fold (build2 (MULT_EXPR, type,
6693 TREE_OPERAND (arg0, 0),
6694 build_real (type, c1)));
6696 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6697 if (flag_unsafe_math_optimizations
6698 && TREE_CODE (arg1) == PLUS_EXPR
6699 && TREE_CODE (arg0) != MULT_EXPR)
6701 tree tree10 = TREE_OPERAND (arg1, 0);
6702 tree tree11 = TREE_OPERAND (arg1, 1);
6703 if (TREE_CODE (tree11) == MULT_EXPR
6704 && TREE_CODE (tree10) == MULT_EXPR)
6706 tree tree0;
6707 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6708 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6711 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6712 if (flag_unsafe_math_optimizations
6713 && TREE_CODE (arg0) == PLUS_EXPR
6714 && TREE_CODE (arg1) != MULT_EXPR)
6716 tree tree00 = TREE_OPERAND (arg0, 0);
6717 tree tree01 = TREE_OPERAND (arg0, 1);
6718 if (TREE_CODE (tree01) == MULT_EXPR
6719 && TREE_CODE (tree00) == MULT_EXPR)
6721 tree tree0;
6722 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6723 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6728 bit_rotate:
6729 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6730 is a rotate of A by C1 bits. */
6731 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6732 is a rotate of A by B bits. */
6734 enum tree_code code0, code1;
6735 code0 = TREE_CODE (arg0);
6736 code1 = TREE_CODE (arg1);
6737 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6738 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6739 && operand_equal_p (TREE_OPERAND (arg0, 0),
6740 TREE_OPERAND (arg1, 0), 0)
6741 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6743 tree tree01, tree11;
6744 enum tree_code code01, code11;
6746 tree01 = TREE_OPERAND (arg0, 1);
6747 tree11 = TREE_OPERAND (arg1, 1);
6748 STRIP_NOPS (tree01);
6749 STRIP_NOPS (tree11);
6750 code01 = TREE_CODE (tree01);
6751 code11 = TREE_CODE (tree11);
6752 if (code01 == INTEGER_CST
6753 && code11 == INTEGER_CST
6754 && TREE_INT_CST_HIGH (tree01) == 0
6755 && TREE_INT_CST_HIGH (tree11) == 0
6756 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6757 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6758 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6759 code0 == LSHIFT_EXPR ? tree01 : tree11);
6760 else if (code11 == MINUS_EXPR)
6762 tree tree110, tree111;
6763 tree110 = TREE_OPERAND (tree11, 0);
6764 tree111 = TREE_OPERAND (tree11, 1);
6765 STRIP_NOPS (tree110);
6766 STRIP_NOPS (tree111);
6767 if (TREE_CODE (tree110) == INTEGER_CST
6768 && 0 == compare_tree_int (tree110,
6769 TYPE_PRECISION
6770 (TREE_TYPE (TREE_OPERAND
6771 (arg0, 0))))
6772 && operand_equal_p (tree01, tree111, 0))
6773 return build2 ((code0 == LSHIFT_EXPR
6774 ? LROTATE_EXPR
6775 : RROTATE_EXPR),
6776 type, TREE_OPERAND (arg0, 0), tree01);
6778 else if (code01 == MINUS_EXPR)
6780 tree tree010, tree011;
6781 tree010 = TREE_OPERAND (tree01, 0);
6782 tree011 = TREE_OPERAND (tree01, 1);
6783 STRIP_NOPS (tree010);
6784 STRIP_NOPS (tree011);
6785 if (TREE_CODE (tree010) == INTEGER_CST
6786 && 0 == compare_tree_int (tree010,
6787 TYPE_PRECISION
6788 (TREE_TYPE (TREE_OPERAND
6789 (arg0, 0))))
6790 && operand_equal_p (tree11, tree011, 0))
6791 return build2 ((code0 != LSHIFT_EXPR
6792 ? LROTATE_EXPR
6793 : RROTATE_EXPR),
6794 type, TREE_OPERAND (arg0, 0), tree11);
6799 associate:
6800 /* In most languages, can't associate operations on floats through
6801 parentheses. Rather than remember where the parentheses were, we
6802 don't associate floats at all, unless the user has specified
6803 -funsafe-math-optimizations. */
6805 if (! wins
6806 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6808 tree var0, con0, lit0, minus_lit0;
6809 tree var1, con1, lit1, minus_lit1;
6811 /* Split both trees into variables, constants, and literals. Then
6812 associate each group together, the constants with literals,
6813 then the result with variables. This increases the chances of
6814 literals being recombined later and of generating relocatable
6815 expressions for the sum of a constant and literal. */
6816 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6817 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6818 code == MINUS_EXPR);
6820 /* Only do something if we found more than two objects. Otherwise,
6821 nothing has changed and we risk infinite recursion. */
6822 if (2 < ((var0 != 0) + (var1 != 0)
6823 + (con0 != 0) + (con1 != 0)
6824 + (lit0 != 0) + (lit1 != 0)
6825 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6827 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6828 if (code == MINUS_EXPR)
6829 code = PLUS_EXPR;
6831 var0 = associate_trees (var0, var1, code, type);
6832 con0 = associate_trees (con0, con1, code, type);
6833 lit0 = associate_trees (lit0, lit1, code, type);
6834 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6836 /* Preserve the MINUS_EXPR if the negative part of the literal is
6837 greater than the positive part. Otherwise, the multiplicative
6838 folding code (i.e extract_muldiv) may be fooled in case
6839 unsigned constants are subtracted, like in the following
6840 example: ((X*2 + 4) - 8U)/2. */
6841 if (minus_lit0 && lit0)
6843 if (TREE_CODE (lit0) == INTEGER_CST
6844 && TREE_CODE (minus_lit0) == INTEGER_CST
6845 && tree_int_cst_lt (lit0, minus_lit0))
6847 minus_lit0 = associate_trees (minus_lit0, lit0,
6848 MINUS_EXPR, type);
6849 lit0 = 0;
6851 else
6853 lit0 = associate_trees (lit0, minus_lit0,
6854 MINUS_EXPR, type);
6855 minus_lit0 = 0;
6858 if (minus_lit0)
6860 if (con0 == 0)
6861 return fold_convert (type,
6862 associate_trees (var0, minus_lit0,
6863 MINUS_EXPR, type));
6864 else
6866 con0 = associate_trees (con0, minus_lit0,
6867 MINUS_EXPR, type);
6868 return fold_convert (type,
6869 associate_trees (var0, con0,
6870 PLUS_EXPR, type));
6874 con0 = associate_trees (con0, lit0, code, type);
6875 return fold_convert (type, associate_trees (var0, con0,
6876 code, type));
6880 binary:
6881 if (wins)
6882 t1 = const_binop (code, arg0, arg1, 0);
6883 if (t1 != NULL_TREE)
6885 /* The return value should always have
6886 the same type as the original expression. */
6887 if (TREE_TYPE (t1) != type)
6888 t1 = fold_convert (type, t1);
6890 return t1;
6892 return t;
6894 case MINUS_EXPR:
6895 /* A - (-B) -> A + B */
6896 if (TREE_CODE (arg1) == NEGATE_EXPR)
6897 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6898 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6899 if (TREE_CODE (arg0) == NEGATE_EXPR
6900 && (FLOAT_TYPE_P (type)
6901 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6902 && negate_expr_p (arg1)
6903 && reorder_operands_p (arg0, arg1))
6904 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
6905 TREE_OPERAND (arg0, 0)));
6907 if (! FLOAT_TYPE_P (type))
6909 if (! wins && integer_zerop (arg0))
6910 return negate_expr (fold_convert (type, arg1));
6911 if (integer_zerop (arg1))
6912 return non_lvalue (fold_convert (type, arg0));
6914 /* Fold A - (A & B) into ~B & A. */
6915 if (!TREE_SIDE_EFFECTS (arg0)
6916 && TREE_CODE (arg1) == BIT_AND_EXPR)
6918 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6919 return fold (build2 (BIT_AND_EXPR, type,
6920 fold (build1 (BIT_NOT_EXPR, type,
6921 TREE_OPERAND (arg1, 0))),
6922 arg0));
6923 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6924 return fold (build2 (BIT_AND_EXPR, type,
6925 fold (build1 (BIT_NOT_EXPR, type,
6926 TREE_OPERAND (arg1, 1))),
6927 arg0));
6930 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6931 any power of 2 minus 1. */
6932 if (TREE_CODE (arg0) == BIT_AND_EXPR
6933 && TREE_CODE (arg1) == BIT_AND_EXPR
6934 && operand_equal_p (TREE_OPERAND (arg0, 0),
6935 TREE_OPERAND (arg1, 0), 0))
6937 tree mask0 = TREE_OPERAND (arg0, 1);
6938 tree mask1 = TREE_OPERAND (arg1, 1);
6939 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6941 if (operand_equal_p (tem, mask1, 0))
6943 tem = fold (build2 (BIT_XOR_EXPR, type,
6944 TREE_OPERAND (arg0, 0), mask1));
6945 return fold (build2 (MINUS_EXPR, type, tem, mask1));
6950 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6951 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6952 return non_lvalue (fold_convert (type, arg0));
6954 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6955 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6956 (-ARG1 + ARG0) reduces to -ARG1. */
6957 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6958 return negate_expr (fold_convert (type, arg1));
6960 /* Fold &x - &x. This can happen from &x.foo - &x.
6961 This is unsafe for certain floats even in non-IEEE formats.
6962 In IEEE, it is unsafe because it does wrong for NaNs.
6963 Also note that operand_equal_p is always false if an operand
6964 is volatile. */
6966 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6967 && operand_equal_p (arg0, arg1, 0))
6968 return fold_convert (type, integer_zero_node);
6970 /* A - B -> A + (-B) if B is easily negatable. */
6971 if (!wins && negate_expr_p (arg1)
6972 && ((FLOAT_TYPE_P (type)
6973 /* Avoid this transformation if B is a positive REAL_CST. */
6974 && (TREE_CODE (arg1) != REAL_CST
6975 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
6976 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6977 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6979 if (TREE_CODE (arg0) == MULT_EXPR
6980 && TREE_CODE (arg1) == MULT_EXPR
6981 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6983 /* (A * C) - (B * C) -> (A-B) * C. */
6984 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6985 TREE_OPERAND (arg1, 1), 0))
6986 return fold (build2 (MULT_EXPR, type,
6987 fold (build2 (MINUS_EXPR, type,
6988 TREE_OPERAND (arg0, 0),
6989 TREE_OPERAND (arg1, 0))),
6990 TREE_OPERAND (arg0, 1)));
6991 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6992 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6993 TREE_OPERAND (arg1, 0), 0))
6994 return fold (build2 (MULT_EXPR, type,
6995 TREE_OPERAND (arg0, 0),
6996 fold (build2 (MINUS_EXPR, type,
6997 TREE_OPERAND (arg0, 1),
6998 TREE_OPERAND (arg1, 1)))));
7001 goto associate;
7003 case MULT_EXPR:
7004 /* (-A) * (-B) -> A * B */
7005 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7006 return fold (build2 (MULT_EXPR, type,
7007 TREE_OPERAND (arg0, 0),
7008 negate_expr (arg1)));
7009 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7010 return fold (build2 (MULT_EXPR, type,
7011 negate_expr (arg0),
7012 TREE_OPERAND (arg1, 0)));
7014 if (! FLOAT_TYPE_P (type))
7016 if (integer_zerop (arg1))
7017 return omit_one_operand (type, arg1, arg0);
7018 if (integer_onep (arg1))
7019 return non_lvalue (fold_convert (type, arg0));
7021 /* (a * (1 << b)) is (a << b) */
7022 if (TREE_CODE (arg1) == LSHIFT_EXPR
7023 && integer_onep (TREE_OPERAND (arg1, 0)))
7024 return fold (build2 (LSHIFT_EXPR, type, arg0,
7025 TREE_OPERAND (arg1, 1)));
7026 if (TREE_CODE (arg0) == LSHIFT_EXPR
7027 && integer_onep (TREE_OPERAND (arg0, 0)))
7028 return fold (build2 (LSHIFT_EXPR, type, arg1,
7029 TREE_OPERAND (arg0, 1)));
7031 if (TREE_CODE (arg1) == INTEGER_CST
7032 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
7033 fold_convert (type, arg1),
7034 code, NULL_TREE)))
7035 return fold_convert (type, tem);
7038 else
7040 /* Maybe fold x * 0 to 0. The expressions aren't the same
7041 when x is NaN, since x * 0 is also NaN. Nor are they the
7042 same in modes with signed zeros, since multiplying a
7043 negative value by 0 gives -0, not +0. */
7044 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7045 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7046 && real_zerop (arg1))
7047 return omit_one_operand (type, arg1, arg0);
7048 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7049 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7050 && real_onep (arg1))
7051 return non_lvalue (fold_convert (type, arg0));
7053 /* Transform x * -1.0 into -x. */
7054 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7055 && real_minus_onep (arg1))
7056 return fold_convert (type, negate_expr (arg0));
7058 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7059 if (flag_unsafe_math_optimizations
7060 && TREE_CODE (arg0) == RDIV_EXPR
7061 && TREE_CODE (arg1) == REAL_CST
7062 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7064 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7065 arg1, 0);
7066 if (tem)
7067 return fold (build2 (RDIV_EXPR, type, tem,
7068 TREE_OPERAND (arg0, 1)));
7071 if (flag_unsafe_math_optimizations)
7073 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7074 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7076 /* Optimizations of root(...)*root(...). */
7077 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7079 tree rootfn, arg, arglist;
7080 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7081 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7083 /* Optimize sqrt(x)*sqrt(x) as x. */
7084 if (BUILTIN_SQRT_P (fcode0)
7085 && operand_equal_p (arg00, arg10, 0)
7086 && ! HONOR_SNANS (TYPE_MODE (type)))
7087 return arg00;
7089 /* Optimize root(x)*root(y) as root(x*y). */
7090 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7091 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7092 arglist = build_tree_list (NULL_TREE, arg);
7093 return build_function_call_expr (rootfn, arglist);
7096 /* Optimize expN(x)*expN(y) as expN(x+y). */
7097 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7099 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7100 tree arg = build2 (PLUS_EXPR, type,
7101 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7102 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7103 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7104 return build_function_call_expr (expfn, arglist);
7107 /* Optimizations of pow(...)*pow(...). */
7108 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7109 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7110 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7112 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7113 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7114 1)));
7115 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7116 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7117 1)));
7119 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7120 if (operand_equal_p (arg01, arg11, 0))
7122 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7123 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7124 tree arglist = tree_cons (NULL_TREE, fold (arg),
7125 build_tree_list (NULL_TREE,
7126 arg01));
7127 return build_function_call_expr (powfn, arglist);
7130 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7131 if (operand_equal_p (arg00, arg10, 0))
7133 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7134 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7135 tree arglist = tree_cons (NULL_TREE, arg00,
7136 build_tree_list (NULL_TREE,
7137 arg));
7138 return build_function_call_expr (powfn, arglist);
7142 /* Optimize tan(x)*cos(x) as sin(x). */
7143 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7144 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7145 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7146 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7147 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7148 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7149 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7150 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7152 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7154 if (sinfn != NULL_TREE)
7155 return build_function_call_expr (sinfn,
7156 TREE_OPERAND (arg0, 1));
7159 /* Optimize x*pow(x,c) as pow(x,c+1). */
7160 if (fcode1 == BUILT_IN_POW
7161 || fcode1 == BUILT_IN_POWF
7162 || fcode1 == BUILT_IN_POWL)
7164 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7165 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7166 1)));
7167 if (TREE_CODE (arg11) == REAL_CST
7168 && ! TREE_CONSTANT_OVERFLOW (arg11)
7169 && operand_equal_p (arg0, arg10, 0))
7171 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7172 REAL_VALUE_TYPE c;
7173 tree arg, arglist;
7175 c = TREE_REAL_CST (arg11);
7176 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7177 arg = build_real (type, c);
7178 arglist = build_tree_list (NULL_TREE, arg);
7179 arglist = tree_cons (NULL_TREE, arg0, arglist);
7180 return build_function_call_expr (powfn, arglist);
7184 /* Optimize pow(x,c)*x as pow(x,c+1). */
7185 if (fcode0 == BUILT_IN_POW
7186 || fcode0 == BUILT_IN_POWF
7187 || fcode0 == BUILT_IN_POWL)
7189 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7190 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7191 1)));
7192 if (TREE_CODE (arg01) == REAL_CST
7193 && ! TREE_CONSTANT_OVERFLOW (arg01)
7194 && operand_equal_p (arg1, arg00, 0))
7196 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7197 REAL_VALUE_TYPE c;
7198 tree arg, arglist;
7200 c = TREE_REAL_CST (arg01);
7201 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7202 arg = build_real (type, c);
7203 arglist = build_tree_list (NULL_TREE, arg);
7204 arglist = tree_cons (NULL_TREE, arg1, arglist);
7205 return build_function_call_expr (powfn, arglist);
7209 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7210 if (! optimize_size
7211 && operand_equal_p (arg0, arg1, 0))
7213 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7215 if (powfn)
7217 tree arg = build_real (type, dconst2);
7218 tree arglist = build_tree_list (NULL_TREE, arg);
7219 arglist = tree_cons (NULL_TREE, arg0, arglist);
7220 return build_function_call_expr (powfn, arglist);
7225 goto associate;
7227 case BIT_IOR_EXPR:
7228 bit_ior:
7229 if (integer_all_onesp (arg1))
7230 return omit_one_operand (type, arg1, arg0);
7231 if (integer_zerop (arg1))
7232 return non_lvalue (fold_convert (type, arg0));
7233 if (operand_equal_p (arg0, arg1, 0))
7234 return non_lvalue (fold_convert (type, arg0));
7236 /* ~X | X is -1. */
7237 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7238 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7240 t1 = build_int_cst (type, -1, -1);
7241 t1 = force_fit_type (t1, 0, false, false);
7242 return omit_one_operand (type, t1, arg1);
7245 /* X | ~X is -1. */
7246 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7247 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7249 t1 = build_int_cst (type, -1, -1);
7250 t1 = force_fit_type (t1, 0, false, false);
7251 return omit_one_operand (type, t1, arg0);
7254 t1 = distribute_bit_expr (code, type, arg0, arg1);
7255 if (t1 != NULL_TREE)
7256 return t1;
7258 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7260 This results in more efficient code for machines without a NAND
7261 instruction. Combine will canonicalize to the first form
7262 which will allow use of NAND instructions provided by the
7263 backend if they exist. */
7264 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7265 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7267 return fold (build1 (BIT_NOT_EXPR, type,
7268 build2 (BIT_AND_EXPR, type,
7269 TREE_OPERAND (arg0, 0),
7270 TREE_OPERAND (arg1, 0))));
7273 /* See if this can be simplified into a rotate first. If that
7274 is unsuccessful continue in the association code. */
7275 goto bit_rotate;
7277 case BIT_XOR_EXPR:
7278 if (integer_zerop (arg1))
7279 return non_lvalue (fold_convert (type, arg0));
7280 if (integer_all_onesp (arg1))
7281 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7282 if (operand_equal_p (arg0, arg1, 0))
7283 return omit_one_operand (type, integer_zero_node, arg0);
7285 /* ~X ^ X is -1. */
7286 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7287 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7289 t1 = build_int_cst (type, -1, -1);
7290 t1 = force_fit_type (t1, 0, false, false);
7291 return omit_one_operand (type, t1, arg1);
7294 /* X ^ ~X is -1. */
7295 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7296 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7298 t1 = build_int_cst (type, -1, -1);
7299 t1 = force_fit_type (t1, 0, false, false);
7300 return omit_one_operand (type, t1, arg0);
7303 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7304 with a constant, and the two constants have no bits in common,
7305 we should treat this as a BIT_IOR_EXPR since this may produce more
7306 simplifications. */
7307 if (TREE_CODE (arg0) == BIT_AND_EXPR
7308 && TREE_CODE (arg1) == BIT_AND_EXPR
7309 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7310 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7311 && integer_zerop (const_binop (BIT_AND_EXPR,
7312 TREE_OPERAND (arg0, 1),
7313 TREE_OPERAND (arg1, 1), 0)))
7315 code = BIT_IOR_EXPR;
7316 goto bit_ior;
7319 /* See if this can be simplified into a rotate first. If that
7320 is unsuccessful continue in the association code. */
7321 goto bit_rotate;
7323 case BIT_AND_EXPR:
7324 if (integer_all_onesp (arg1))
7325 return non_lvalue (fold_convert (type, arg0));
7326 if (integer_zerop (arg1))
7327 return omit_one_operand (type, arg1, arg0);
7328 if (operand_equal_p (arg0, arg1, 0))
7329 return non_lvalue (fold_convert (type, arg0));
7331 /* ~X & X is always zero. */
7332 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7333 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7334 return omit_one_operand (type, integer_zero_node, arg1);
7336 /* X & ~X is always zero. */
7337 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7338 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7339 return omit_one_operand (type, integer_zero_node, arg0);
7341 t1 = distribute_bit_expr (code, type, arg0, arg1);
7342 if (t1 != NULL_TREE)
7343 return t1;
7344 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7345 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7346 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7348 unsigned int prec
7349 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7351 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7352 && (~TREE_INT_CST_LOW (arg1)
7353 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7354 return fold_convert (type, TREE_OPERAND (arg0, 0));
7357 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7359 This results in more efficient code for machines without a NOR
7360 instruction. Combine will canonicalize to the first form
7361 which will allow use of NOR instructions provided by the
7362 backend if they exist. */
7363 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7364 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7366 return fold (build1 (BIT_NOT_EXPR, type,
7367 build2 (BIT_IOR_EXPR, type,
7368 TREE_OPERAND (arg0, 0),
7369 TREE_OPERAND (arg1, 0))));
7372 goto associate;
7374 case RDIV_EXPR:
7375 /* Don't touch a floating-point divide by zero unless the mode
7376 of the constant can represent infinity. */
7377 if (TREE_CODE (arg1) == REAL_CST
7378 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7379 && real_zerop (arg1))
7380 return t;
7382 /* (-A) / (-B) -> A / B */
7383 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7384 return fold (build2 (RDIV_EXPR, type,
7385 TREE_OPERAND (arg0, 0),
7386 negate_expr (arg1)));
7387 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7388 return fold (build2 (RDIV_EXPR, type,
7389 negate_expr (arg0),
7390 TREE_OPERAND (arg1, 0)));
7392 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7393 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7394 && real_onep (arg1))
7395 return non_lvalue (fold_convert (type, arg0));
7397 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7398 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7399 && real_minus_onep (arg1))
7400 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7402 /* If ARG1 is a constant, we can convert this to a multiply by the
7403 reciprocal. This does not have the same rounding properties,
7404 so only do this if -funsafe-math-optimizations. We can actually
7405 always safely do it if ARG1 is a power of two, but it's hard to
7406 tell if it is or not in a portable manner. */
7407 if (TREE_CODE (arg1) == REAL_CST)
7409 if (flag_unsafe_math_optimizations
7410 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7411 arg1, 0)))
7412 return fold (build2 (MULT_EXPR, type, arg0, tem));
7413 /* Find the reciprocal if optimizing and the result is exact. */
7414 if (optimize)
7416 REAL_VALUE_TYPE r;
7417 r = TREE_REAL_CST (arg1);
7418 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7420 tem = build_real (type, r);
7421 return fold (build2 (MULT_EXPR, type, arg0, tem));
7425 /* Convert A/B/C to A/(B*C). */
7426 if (flag_unsafe_math_optimizations
7427 && TREE_CODE (arg0) == RDIV_EXPR)
7428 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7429 fold (build2 (MULT_EXPR, type,
7430 TREE_OPERAND (arg0, 1), arg1))));
7432 /* Convert A/(B/C) to (A/B)*C. */
7433 if (flag_unsafe_math_optimizations
7434 && TREE_CODE (arg1) == RDIV_EXPR)
7435 return fold (build2 (MULT_EXPR, type,
7436 fold (build2 (RDIV_EXPR, type, arg0,
7437 TREE_OPERAND (arg1, 0))),
7438 TREE_OPERAND (arg1, 1)));
7440 /* Convert C1/(X*C2) into (C1/C2)/X. */
7441 if (flag_unsafe_math_optimizations
7442 && TREE_CODE (arg1) == MULT_EXPR
7443 && TREE_CODE (arg0) == REAL_CST
7444 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7446 tree tem = const_binop (RDIV_EXPR, arg0,
7447 TREE_OPERAND (arg1, 1), 0);
7448 if (tem)
7449 return fold (build2 (RDIV_EXPR, type, tem,
7450 TREE_OPERAND (arg1, 0)));
7453 if (flag_unsafe_math_optimizations)
7455 enum built_in_function fcode = builtin_mathfn_code (arg1);
7456 /* Optimize x/expN(y) into x*expN(-y). */
7457 if (BUILTIN_EXPONENT_P (fcode))
7459 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7460 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7461 tree arglist = build_tree_list (NULL_TREE,
7462 fold_convert (type, arg));
7463 arg1 = build_function_call_expr (expfn, arglist);
7464 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7467 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7468 if (fcode == BUILT_IN_POW
7469 || fcode == BUILT_IN_POWF
7470 || fcode == BUILT_IN_POWL)
7472 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7473 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7474 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7475 tree neg11 = fold_convert (type, negate_expr (arg11));
7476 tree arglist = tree_cons(NULL_TREE, arg10,
7477 build_tree_list (NULL_TREE, neg11));
7478 arg1 = build_function_call_expr (powfn, arglist);
7479 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7483 if (flag_unsafe_math_optimizations)
7485 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7486 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7488 /* Optimize sin(x)/cos(x) as tan(x). */
7489 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7490 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7491 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7492 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7493 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7495 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7497 if (tanfn != NULL_TREE)
7498 return build_function_call_expr (tanfn,
7499 TREE_OPERAND (arg0, 1));
7502 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7503 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7504 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7505 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7506 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7507 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7509 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7511 if (tanfn != NULL_TREE)
7513 tree tmp = TREE_OPERAND (arg0, 1);
7514 tmp = build_function_call_expr (tanfn, tmp);
7515 return fold (build2 (RDIV_EXPR, type,
7516 build_real (type, dconst1), tmp));
7520 /* Optimize pow(x,c)/x as pow(x,c-1). */
7521 if (fcode0 == BUILT_IN_POW
7522 || fcode0 == BUILT_IN_POWF
7523 || fcode0 == BUILT_IN_POWL)
7525 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7526 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7527 if (TREE_CODE (arg01) == REAL_CST
7528 && ! TREE_CONSTANT_OVERFLOW (arg01)
7529 && operand_equal_p (arg1, arg00, 0))
7531 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7532 REAL_VALUE_TYPE c;
7533 tree arg, arglist;
7535 c = TREE_REAL_CST (arg01);
7536 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7537 arg = build_real (type, c);
7538 arglist = build_tree_list (NULL_TREE, arg);
7539 arglist = tree_cons (NULL_TREE, arg1, arglist);
7540 return build_function_call_expr (powfn, arglist);
7544 goto binary;
7546 case TRUNC_DIV_EXPR:
7547 case ROUND_DIV_EXPR:
7548 case FLOOR_DIV_EXPR:
7549 case CEIL_DIV_EXPR:
7550 case EXACT_DIV_EXPR:
7551 if (integer_onep (arg1))
7552 return non_lvalue (fold_convert (type, arg0));
7553 if (integer_zerop (arg1))
7554 return t;
7555 /* X / -1 is -X. */
7556 if (!TYPE_UNSIGNED (type)
7557 && TREE_CODE (arg1) == INTEGER_CST
7558 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7559 && TREE_INT_CST_HIGH (arg1) == -1)
7560 return fold_convert (type, negate_expr (arg0));
7562 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7563 operation, EXACT_DIV_EXPR.
7565 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7566 At one time others generated faster code, it's not clear if they do
7567 after the last round to changes to the DIV code in expmed.c. */
7568 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7569 && multiple_of_p (type, arg0, arg1))
7570 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7572 if (TREE_CODE (arg1) == INTEGER_CST
7573 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7574 code, NULL_TREE)))
7575 return fold_convert (type, tem);
7577 goto binary;
7579 case CEIL_MOD_EXPR:
7580 case FLOOR_MOD_EXPR:
7581 case ROUND_MOD_EXPR:
7582 case TRUNC_MOD_EXPR:
7583 if (integer_onep (arg1))
7584 return omit_one_operand (type, integer_zero_node, arg0);
7585 if (integer_zerop (arg1))
7586 return t;
7588 /* X % -1 is zero. */
7589 if (!TYPE_UNSIGNED (type)
7590 && TREE_CODE (arg1) == INTEGER_CST
7591 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7592 && TREE_INT_CST_HIGH (arg1) == -1)
7593 return omit_one_operand (type, integer_zero_node, arg0);
7595 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7596 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7597 if (code == TRUNC_MOD_EXPR
7598 && TYPE_UNSIGNED (type)
7599 && integer_pow2p (arg1))
7601 unsigned HOST_WIDE_INT high, low;
7602 tree mask;
7603 int l;
7605 l = tree_log2 (arg1);
7606 if (l >= HOST_BITS_PER_WIDE_INT)
7608 high = ((unsigned HOST_WIDE_INT) 1
7609 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
7610 low = -1;
7612 else
7614 high = 0;
7615 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
7618 mask = build_int_cst (type, low, high);
7619 return fold (build2 (BIT_AND_EXPR, type,
7620 fold_convert (type, arg0), mask));
7623 /* X % -C is the same as X % C. */
7624 if (code == TRUNC_MOD_EXPR
7625 && !TYPE_UNSIGNED (type)
7626 && TREE_CODE (arg1) == INTEGER_CST
7627 && TREE_INT_CST_HIGH (arg1) < 0
7628 && !flag_trapv
7629 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
7630 && !sign_bit_p (arg1, arg1))
7631 return fold (build2 (code, type, fold_convert (type, arg0),
7632 fold_convert (type, negate_expr (arg1))));
7634 /* X % -Y is the same as X % Y. */
7635 if (code == TRUNC_MOD_EXPR
7636 && !TYPE_UNSIGNED (type)
7637 && TREE_CODE (arg1) == NEGATE_EXPR
7638 && !flag_trapv)
7639 return fold (build2 (code, type, fold_convert (type, arg0),
7640 fold_convert (type, TREE_OPERAND (arg1, 0))));
7642 if (TREE_CODE (arg1) == INTEGER_CST
7643 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7644 code, NULL_TREE)))
7645 return fold_convert (type, tem);
7647 goto binary;
7649 case LROTATE_EXPR:
7650 case RROTATE_EXPR:
7651 if (integer_all_onesp (arg0))
7652 return omit_one_operand (type, arg0, arg1);
7653 goto shift;
7655 case RSHIFT_EXPR:
7656 /* Optimize -1 >> x for arithmetic right shifts. */
7657 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7658 return omit_one_operand (type, arg0, arg1);
7659 /* ... fall through ... */
7661 case LSHIFT_EXPR:
7662 shift:
7663 if (integer_zerop (arg1))
7664 return non_lvalue (fold_convert (type, arg0));
7665 if (integer_zerop (arg0))
7666 return omit_one_operand (type, arg0, arg1);
7668 /* Since negative shift count is not well-defined,
7669 don't try to compute it in the compiler. */
7670 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7671 return t;
7672 /* Rewrite an LROTATE_EXPR by a constant into an
7673 RROTATE_EXPR by a new constant. */
7674 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7676 tree tem = build_int_cst (NULL_TREE,
7677 GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
7678 tem = fold_convert (TREE_TYPE (arg1), tem);
7679 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7680 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7683 /* If we have a rotate of a bit operation with the rotate count and
7684 the second operand of the bit operation both constant,
7685 permute the two operations. */
7686 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7687 && (TREE_CODE (arg0) == BIT_AND_EXPR
7688 || TREE_CODE (arg0) == BIT_IOR_EXPR
7689 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7690 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7691 return fold (build2 (TREE_CODE (arg0), type,
7692 fold (build2 (code, type,
7693 TREE_OPERAND (arg0, 0), arg1)),
7694 fold (build2 (code, type,
7695 TREE_OPERAND (arg0, 1), arg1))));
7697 /* Two consecutive rotates adding up to the width of the mode can
7698 be ignored. */
7699 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7700 && TREE_CODE (arg0) == RROTATE_EXPR
7701 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7702 && TREE_INT_CST_HIGH (arg1) == 0
7703 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7704 && ((TREE_INT_CST_LOW (arg1)
7705 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7706 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7707 return TREE_OPERAND (arg0, 0);
7709 goto binary;
7711 case MIN_EXPR:
7712 if (operand_equal_p (arg0, arg1, 0))
7713 return omit_one_operand (type, arg0, arg1);
7714 if (INTEGRAL_TYPE_P (type)
7715 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
7716 return omit_one_operand (type, arg1, arg0);
7717 goto associate;
7719 case MAX_EXPR:
7720 if (operand_equal_p (arg0, arg1, 0))
7721 return omit_one_operand (type, arg0, arg1);
7722 if (INTEGRAL_TYPE_P (type)
7723 && TYPE_MAX_VALUE (type)
7724 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
7725 return omit_one_operand (type, arg1, arg0);
7726 goto associate;
7728 case TRUTH_NOT_EXPR:
7729 /* The argument to invert_truthvalue must have Boolean type. */
7730 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7731 arg0 = fold_convert (boolean_type_node, arg0);
7733 /* Note that the operand of this must be an int
7734 and its values must be 0 or 1.
7735 ("true" is a fixed value perhaps depending on the language,
7736 but we don't handle values other than 1 correctly yet.) */
7737 tem = invert_truthvalue (arg0);
7738 /* Avoid infinite recursion. */
7739 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7741 tem = fold_single_bit_test (code, arg0, arg1, type);
7742 if (tem)
7743 return tem;
7744 return t;
7746 return fold_convert (type, tem);
7748 case TRUTH_ANDIF_EXPR:
7749 /* Note that the operands of this must be ints
7750 and their values must be 0 or 1.
7751 ("true" is a fixed value perhaps depending on the language.) */
7752 /* If first arg is constant zero, return it. */
7753 if (integer_zerop (arg0))
7754 return fold_convert (type, arg0);
7755 case TRUTH_AND_EXPR:
7756 /* If either arg is constant true, drop it. */
7757 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7758 return non_lvalue (fold_convert (type, arg1));
7759 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7760 /* Preserve sequence points. */
7761 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7762 return non_lvalue (fold_convert (type, arg0));
7763 /* If second arg is constant zero, result is zero, but first arg
7764 must be evaluated. */
7765 if (integer_zerop (arg1))
7766 return omit_one_operand (type, arg1, arg0);
7767 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7768 case will be handled here. */
7769 if (integer_zerop (arg0))
7770 return omit_one_operand (type, arg0, arg1);
7772 /* !X && X is always false. */
7773 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7774 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7775 return omit_one_operand (type, integer_zero_node, arg1);
7776 /* X && !X is always false. */
7777 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7778 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7779 return omit_one_operand (type, integer_zero_node, arg0);
7781 truth_andor:
7782 /* We only do these simplifications if we are optimizing. */
7783 if (!optimize)
7784 return t;
7786 /* Check for things like (A || B) && (A || C). We can convert this
7787 to A || (B && C). Note that either operator can be any of the four
7788 truth and/or operations and the transformation will still be
7789 valid. Also note that we only care about order for the
7790 ANDIF and ORIF operators. If B contains side effects, this
7791 might change the truth-value of A. */
7792 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7793 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7794 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7795 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7796 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7797 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7799 tree a00 = TREE_OPERAND (arg0, 0);
7800 tree a01 = TREE_OPERAND (arg0, 1);
7801 tree a10 = TREE_OPERAND (arg1, 0);
7802 tree a11 = TREE_OPERAND (arg1, 1);
7803 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7804 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7805 && (code == TRUTH_AND_EXPR
7806 || code == TRUTH_OR_EXPR));
7808 if (operand_equal_p (a00, a10, 0))
7809 return fold (build2 (TREE_CODE (arg0), type, a00,
7810 fold (build2 (code, type, a01, a11))));
7811 else if (commutative && operand_equal_p (a00, a11, 0))
7812 return fold (build2 (TREE_CODE (arg0), type, a00,
7813 fold (build2 (code, type, a01, a10))));
7814 else if (commutative && operand_equal_p (a01, a10, 0))
7815 return fold (build2 (TREE_CODE (arg0), type, a01,
7816 fold (build2 (code, type, a00, a11))));
7818 /* This case if tricky because we must either have commutative
7819 operators or else A10 must not have side-effects. */
7821 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7822 && operand_equal_p (a01, a11, 0))
7823 return fold (build2 (TREE_CODE (arg0), type,
7824 fold (build2 (code, type, a00, a10)),
7825 a01));
7828 /* See if we can build a range comparison. */
7829 if (0 != (tem = fold_range_test (t)))
7830 return tem;
7832 /* Check for the possibility of merging component references. If our
7833 lhs is another similar operation, try to merge its rhs with our
7834 rhs. Then try to merge our lhs and rhs. */
7835 if (TREE_CODE (arg0) == code
7836 && 0 != (tem = fold_truthop (code, type,
7837 TREE_OPERAND (arg0, 1), arg1)))
7838 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7840 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7841 return tem;
7843 return t;
7845 case TRUTH_ORIF_EXPR:
7846 /* Note that the operands of this must be ints
7847 and their values must be 0 or true.
7848 ("true" is a fixed value perhaps depending on the language.) */
7849 /* If first arg is constant true, return it. */
7850 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7851 return fold_convert (type, arg0);
7852 case TRUTH_OR_EXPR:
7853 /* If either arg is constant zero, drop it. */
7854 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7855 return non_lvalue (fold_convert (type, arg1));
7856 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7857 /* Preserve sequence points. */
7858 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7859 return non_lvalue (fold_convert (type, arg0));
7860 /* If second arg is constant true, result is true, but we must
7861 evaluate first arg. */
7862 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7863 return omit_one_operand (type, arg1, arg0);
7864 /* Likewise for first arg, but note this only occurs here for
7865 TRUTH_OR_EXPR. */
7866 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7867 return omit_one_operand (type, arg0, arg1);
7869 /* !X || X is always true. */
7870 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7871 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7872 return omit_one_operand (type, integer_one_node, arg1);
7873 /* X || !X is always true. */
7874 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7875 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7876 return omit_one_operand (type, integer_one_node, arg0);
7878 goto truth_andor;
7880 case TRUTH_XOR_EXPR:
7881 /* If the second arg is constant zero, drop it. */
7882 if (integer_zerop (arg1))
7883 return non_lvalue (fold_convert (type, arg0));
7884 /* If the second arg is constant true, this is a logical inversion. */
7885 if (integer_onep (arg1))
7886 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7887 /* Identical arguments cancel to zero. */
7888 if (operand_equal_p (arg0, arg1, 0))
7889 return omit_one_operand (type, integer_zero_node, arg0);
7891 /* !X ^ X is always true. */
7892 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7893 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7894 return omit_one_operand (type, integer_one_node, arg1);
7896 /* X ^ !X is always true. */
7897 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7898 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7899 return omit_one_operand (type, integer_one_node, arg0);
7901 return t;
7903 case EQ_EXPR:
7904 case NE_EXPR:
7905 case LT_EXPR:
7906 case GT_EXPR:
7907 case LE_EXPR:
7908 case GE_EXPR:
7909 /* If one arg is a real or integer constant, put it last. */
7910 if (tree_swap_operands_p (arg0, arg1, true))
7911 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
7913 /* If this is an equality comparison of the address of a non-weak
7914 object against zero, then we know the result. */
7915 if ((code == EQ_EXPR || code == NE_EXPR)
7916 && TREE_CODE (arg0) == ADDR_EXPR
7917 && DECL_P (TREE_OPERAND (arg0, 0))
7918 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7919 && integer_zerop (arg1))
7920 return constant_boolean_node (code != EQ_EXPR, type);
7922 /* If this is an equality comparison of the address of two non-weak,
7923 unaliased symbols neither of which are extern (since we do not
7924 have access to attributes for externs), then we know the result. */
7925 if ((code == EQ_EXPR || code == NE_EXPR)
7926 && TREE_CODE (arg0) == ADDR_EXPR
7927 && DECL_P (TREE_OPERAND (arg0, 0))
7928 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7929 && ! lookup_attribute ("alias",
7930 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7931 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7932 && TREE_CODE (arg1) == ADDR_EXPR
7933 && DECL_P (TREE_OPERAND (arg1, 0))
7934 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7935 && ! lookup_attribute ("alias",
7936 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7937 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7938 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
7939 ? code == EQ_EXPR : code != EQ_EXPR,
7940 type);
7942 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7944 tree targ0 = strip_float_extensions (arg0);
7945 tree targ1 = strip_float_extensions (arg1);
7946 tree newtype = TREE_TYPE (targ0);
7948 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7949 newtype = TREE_TYPE (targ1);
7951 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7952 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7953 return fold (build2 (code, type, fold_convert (newtype, targ0),
7954 fold_convert (newtype, targ1)));
7956 /* (-a) CMP (-b) -> b CMP a */
7957 if (TREE_CODE (arg0) == NEGATE_EXPR
7958 && TREE_CODE (arg1) == NEGATE_EXPR)
7959 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
7960 TREE_OPERAND (arg0, 0)));
7962 if (TREE_CODE (arg1) == REAL_CST)
7964 REAL_VALUE_TYPE cst;
7965 cst = TREE_REAL_CST (arg1);
7967 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7968 if (TREE_CODE (arg0) == NEGATE_EXPR)
7969 return
7970 fold (build2 (swap_tree_comparison (code), type,
7971 TREE_OPERAND (arg0, 0),
7972 build_real (TREE_TYPE (arg1),
7973 REAL_VALUE_NEGATE (cst))));
7975 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7976 /* a CMP (-0) -> a CMP 0 */
7977 if (REAL_VALUE_MINUS_ZERO (cst))
7978 return fold (build2 (code, type, arg0,
7979 build_real (TREE_TYPE (arg1), dconst0)));
7981 /* x != NaN is always true, other ops are always false. */
7982 if (REAL_VALUE_ISNAN (cst)
7983 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7985 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7986 return omit_one_operand (type, tem, arg0);
7989 /* Fold comparisons against infinity. */
7990 if (REAL_VALUE_ISINF (cst))
7992 tem = fold_inf_compare (code, type, arg0, arg1);
7993 if (tem != NULL_TREE)
7994 return tem;
7998 /* If this is a comparison of a real constant with a PLUS_EXPR
7999 or a MINUS_EXPR of a real constant, we can convert it into a
8000 comparison with a revised real constant as long as no overflow
8001 occurs when unsafe_math_optimizations are enabled. */
8002 if (flag_unsafe_math_optimizations
8003 && TREE_CODE (arg1) == REAL_CST
8004 && (TREE_CODE (arg0) == PLUS_EXPR
8005 || TREE_CODE (arg0) == MINUS_EXPR)
8006 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8007 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8008 ? MINUS_EXPR : PLUS_EXPR,
8009 arg1, TREE_OPERAND (arg0, 1), 0))
8010 && ! TREE_CONSTANT_OVERFLOW (tem))
8011 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8013 /* Likewise, we can simplify a comparison of a real constant with
8014 a MINUS_EXPR whose first operand is also a real constant, i.e.
8015 (c1 - x) < c2 becomes x > c1-c2. */
8016 if (flag_unsafe_math_optimizations
8017 && TREE_CODE (arg1) == REAL_CST
8018 && TREE_CODE (arg0) == MINUS_EXPR
8019 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8020 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8021 arg1, 0))
8022 && ! TREE_CONSTANT_OVERFLOW (tem))
8023 return fold (build2 (swap_tree_comparison (code), type,
8024 TREE_OPERAND (arg0, 1), tem));
8026 /* Fold comparisons against built-in math functions. */
8027 if (TREE_CODE (arg1) == REAL_CST
8028 && flag_unsafe_math_optimizations
8029 && ! flag_errno_math)
8031 enum built_in_function fcode = builtin_mathfn_code (arg0);
8033 if (fcode != END_BUILTINS)
8035 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8036 if (tem != NULL_TREE)
8037 return tem;
8042 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8043 if (TREE_CONSTANT (arg1)
8044 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8045 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8046 /* This optimization is invalid for ordered comparisons
8047 if CONST+INCR overflows or if foo+incr might overflow.
8048 This optimization is invalid for floating point due to rounding.
8049 For pointer types we assume overflow doesn't happen. */
8050 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8051 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8052 && (code == EQ_EXPR || code == NE_EXPR))))
8054 tree varop, newconst;
8056 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8058 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
8059 arg1, TREE_OPERAND (arg0, 1)));
8060 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8061 TREE_OPERAND (arg0, 0),
8062 TREE_OPERAND (arg0, 1));
8064 else
8066 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
8067 arg1, TREE_OPERAND (arg0, 1)));
8068 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8069 TREE_OPERAND (arg0, 0),
8070 TREE_OPERAND (arg0, 1));
8074 /* If VAROP is a reference to a bitfield, we must mask
8075 the constant by the width of the field. */
8076 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8077 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8078 && host_integerp (DECL_SIZE (TREE_OPERAND
8079 (TREE_OPERAND (varop, 0), 1)), 1))
8081 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8082 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8083 tree folded_compare, shift;
8085 /* First check whether the comparison would come out
8086 always the same. If we don't do that we would
8087 change the meaning with the masking. */
8088 folded_compare = fold (build2 (code, type,
8089 TREE_OPERAND (varop, 0), arg1));
8090 if (integer_zerop (folded_compare)
8091 || integer_onep (folded_compare))
8092 return omit_one_operand (type, folded_compare, varop);
8094 shift = build_int_cst (NULL_TREE,
8095 TYPE_PRECISION (TREE_TYPE (varop)) - size,
8097 shift = fold_convert (TREE_TYPE (varop), shift);
8098 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8099 newconst, shift));
8100 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8101 newconst, shift));
8104 return fold (build2 (code, type, varop, newconst));
8107 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8108 This transformation affects the cases which are handled in later
8109 optimizations involving comparisons with non-negative constants. */
8110 if (TREE_CODE (arg1) == INTEGER_CST
8111 && TREE_CODE (arg0) != INTEGER_CST
8112 && tree_int_cst_sgn (arg1) > 0)
8114 switch (code)
8116 case GE_EXPR:
8117 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8118 return fold (build2 (GT_EXPR, type, arg0, arg1));
8120 case LT_EXPR:
8121 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8122 return fold (build2 (LE_EXPR, type, arg0, arg1));
8124 default:
8125 break;
8129 /* Comparisons with the highest or lowest possible integer of
8130 the specified size will have known values.
8132 This is quite similar to fold_relational_hi_lo; however, my
8133 attempts to share the code have been nothing but trouble.
8134 I give up for now. */
8136 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8138 if (TREE_CODE (arg1) == INTEGER_CST
8139 && ! TREE_CONSTANT_OVERFLOW (arg1)
8140 && width <= HOST_BITS_PER_WIDE_INT
8141 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8142 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8144 unsigned HOST_WIDE_INT signed_max;
8145 unsigned HOST_WIDE_INT max, min;
8147 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
8149 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8151 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8152 min = 0;
8154 else
8156 max = signed_max;
8157 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8160 if (TREE_INT_CST_HIGH (arg1) == 0
8161 && TREE_INT_CST_LOW (arg1) == max)
8162 switch (code)
8164 case GT_EXPR:
8165 return omit_one_operand (type, integer_zero_node, arg0);
8167 case GE_EXPR:
8168 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8170 case LE_EXPR:
8171 return omit_one_operand (type, integer_one_node, arg0);
8173 case LT_EXPR:
8174 return fold (build2 (NE_EXPR, type, arg0, arg1));
8176 /* The GE_EXPR and LT_EXPR cases above are not normally
8177 reached because of previous transformations. */
8179 default:
8180 break;
8182 else if (TREE_INT_CST_HIGH (arg1) == 0
8183 && TREE_INT_CST_LOW (arg1) == max - 1)
8184 switch (code)
8186 case GT_EXPR:
8187 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8188 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8189 case LE_EXPR:
8190 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8191 return fold (build2 (NE_EXPR, type, arg0, arg1));
8192 default:
8193 break;
8195 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8196 && TREE_INT_CST_LOW (arg1) == min)
8197 switch (code)
8199 case LT_EXPR:
8200 return omit_one_operand (type, integer_zero_node, arg0);
8202 case LE_EXPR:
8203 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8205 case GE_EXPR:
8206 return omit_one_operand (type, integer_one_node, arg0);
8208 case GT_EXPR:
8209 return fold (build2 (NE_EXPR, type, arg0, arg1));
8211 default:
8212 break;
8214 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8215 && TREE_INT_CST_LOW (arg1) == min + 1)
8216 switch (code)
8218 case GE_EXPR:
8219 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8220 return fold (build2 (NE_EXPR, type, arg0, arg1));
8221 case LT_EXPR:
8222 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8223 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8224 default:
8225 break;
8228 else if (!in_gimple_form
8229 && TREE_INT_CST_HIGH (arg1) == 0
8230 && TREE_INT_CST_LOW (arg1) == signed_max
8231 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8232 /* signed_type does not work on pointer types. */
8233 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8235 /* The following case also applies to X < signed_max+1
8236 and X >= signed_max+1 because previous transformations. */
8237 if (code == LE_EXPR || code == GT_EXPR)
8239 tree st0, st1;
8240 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8241 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8242 return fold
8243 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8244 type, fold_convert (st0, arg0),
8245 fold_convert (st1, integer_zero_node)));
8251 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8252 a MINUS_EXPR of a constant, we can convert it into a comparison with
8253 a revised constant as long as no overflow occurs. */
8254 if ((code == EQ_EXPR || code == NE_EXPR)
8255 && TREE_CODE (arg1) == INTEGER_CST
8256 && (TREE_CODE (arg0) == PLUS_EXPR
8257 || TREE_CODE (arg0) == MINUS_EXPR)
8258 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8259 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8260 ? MINUS_EXPR : PLUS_EXPR,
8261 arg1, TREE_OPERAND (arg0, 1), 0))
8262 && ! TREE_CONSTANT_OVERFLOW (tem))
8263 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8265 /* Similarly for a NEGATE_EXPR. */
8266 else if ((code == EQ_EXPR || code == NE_EXPR)
8267 && TREE_CODE (arg0) == NEGATE_EXPR
8268 && TREE_CODE (arg1) == INTEGER_CST
8269 && 0 != (tem = negate_expr (arg1))
8270 && TREE_CODE (tem) == INTEGER_CST
8271 && ! TREE_CONSTANT_OVERFLOW (tem))
8272 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8274 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8275 for !=. Don't do this for ordered comparisons due to overflow. */
8276 else if ((code == NE_EXPR || code == EQ_EXPR)
8277 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8278 return fold (build2 (code, type,
8279 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8281 /* If we are widening one operand of an integer comparison,
8282 see if the other operand is similarly being widened. Perhaps we
8283 can do the comparison in the narrower type. */
8284 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8285 && TREE_CODE (arg0) == NOP_EXPR
8286 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
8287 && (code == EQ_EXPR || code == NE_EXPR
8288 || TYPE_UNSIGNED (TREE_TYPE (arg0))
8289 == TYPE_UNSIGNED (TREE_TYPE (tem)))
8290 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
8291 && (TREE_TYPE (t1) == TREE_TYPE (tem)
8292 || (TREE_CODE (t1) == INTEGER_CST
8293 && int_fits_type_p (t1, TREE_TYPE (tem)))))
8294 return fold (build2 (code, type, tem,
8295 fold_convert (TREE_TYPE (tem), t1)));
8297 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8298 constant, we can simplify it. */
8299 else if (TREE_CODE (arg1) == INTEGER_CST
8300 && (TREE_CODE (arg0) == MIN_EXPR
8301 || TREE_CODE (arg0) == MAX_EXPR)
8302 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8303 return optimize_minmax_comparison (t);
8305 /* If we are comparing an ABS_EXPR with a constant, we can
8306 convert all the cases into explicit comparisons, but they may
8307 well not be faster than doing the ABS and one comparison.
8308 But ABS (X) <= C is a range comparison, which becomes a subtraction
8309 and a comparison, and is probably faster. */
8310 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8311 && TREE_CODE (arg0) == ABS_EXPR
8312 && ! TREE_SIDE_EFFECTS (arg0)
8313 && (0 != (tem = negate_expr (arg1)))
8314 && TREE_CODE (tem) == INTEGER_CST
8315 && ! TREE_CONSTANT_OVERFLOW (tem))
8316 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8317 build2 (GE_EXPR, type,
8318 TREE_OPERAND (arg0, 0), tem),
8319 build2 (LE_EXPR, type,
8320 TREE_OPERAND (arg0, 0), arg1)));
8322 /* If this is an EQ or NE comparison with zero and ARG0 is
8323 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8324 two operations, but the latter can be done in one less insn
8325 on machines that have only two-operand insns or on which a
8326 constant cannot be the first operand. */
8327 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8328 && TREE_CODE (arg0) == BIT_AND_EXPR)
8330 tree arg00 = TREE_OPERAND (arg0, 0);
8331 tree arg01 = TREE_OPERAND (arg0, 1);
8332 if (TREE_CODE (arg00) == LSHIFT_EXPR
8333 && integer_onep (TREE_OPERAND (arg00, 0)))
8334 return
8335 fold (build2 (code, type,
8336 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8337 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8338 arg01, TREE_OPERAND (arg00, 1)),
8339 fold_convert (TREE_TYPE (arg0),
8340 integer_one_node)),
8341 arg1));
8342 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8343 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8344 return
8345 fold (build2 (code, type,
8346 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8347 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8348 arg00, TREE_OPERAND (arg01, 1)),
8349 fold_convert (TREE_TYPE (arg0),
8350 integer_one_node)),
8351 arg1));
8354 /* If this is an NE or EQ comparison of zero against the result of a
8355 signed MOD operation whose second operand is a power of 2, make
8356 the MOD operation unsigned since it is simpler and equivalent. */
8357 if ((code == NE_EXPR || code == EQ_EXPR)
8358 && integer_zerop (arg1)
8359 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8360 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8361 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8362 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8363 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8364 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8366 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8367 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
8368 fold_convert (newtype,
8369 TREE_OPERAND (arg0, 0)),
8370 fold_convert (newtype,
8371 TREE_OPERAND (arg0, 1))));
8373 return fold (build2 (code, type, newmod,
8374 fold_convert (newtype, arg1)));
8377 /* If this is an NE comparison of zero with an AND of one, remove the
8378 comparison since the AND will give the correct value. */
8379 if (code == NE_EXPR && integer_zerop (arg1)
8380 && TREE_CODE (arg0) == BIT_AND_EXPR
8381 && integer_onep (TREE_OPERAND (arg0, 1)))
8382 return fold_convert (type, arg0);
8384 /* If we have (A & C) == C where C is a power of 2, convert this into
8385 (A & C) != 0. Similarly for NE_EXPR. */
8386 if ((code == EQ_EXPR || code == NE_EXPR)
8387 && TREE_CODE (arg0) == BIT_AND_EXPR
8388 && integer_pow2p (TREE_OPERAND (arg0, 1))
8389 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8390 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8391 arg0, fold_convert (TREE_TYPE (arg0),
8392 integer_zero_node)));
8394 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8395 2, then fold the expression into shifts and logical operations. */
8396 tem = fold_single_bit_test (code, arg0, arg1, type);
8397 if (tem)
8398 return tem;
8400 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8401 Similarly for NE_EXPR. */
8402 if ((code == EQ_EXPR || code == NE_EXPR)
8403 && TREE_CODE (arg0) == BIT_AND_EXPR
8404 && TREE_CODE (arg1) == INTEGER_CST
8405 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8407 tree dandnotc
8408 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8409 arg1, build1 (BIT_NOT_EXPR,
8410 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8411 TREE_OPERAND (arg0, 1))));
8412 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8413 if (integer_nonzerop (dandnotc))
8414 return omit_one_operand (type, rslt, arg0);
8417 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8418 Similarly for NE_EXPR. */
8419 if ((code == EQ_EXPR || code == NE_EXPR)
8420 && TREE_CODE (arg0) == BIT_IOR_EXPR
8421 && TREE_CODE (arg1) == INTEGER_CST
8422 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8424 tree candnotd
8425 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8426 TREE_OPERAND (arg0, 1),
8427 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
8428 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8429 if (integer_nonzerop (candnotd))
8430 return omit_one_operand (type, rslt, arg0);
8433 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8434 and similarly for >= into !=. */
8435 if ((code == LT_EXPR || code == GE_EXPR)
8436 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8437 && TREE_CODE (arg1) == LSHIFT_EXPR
8438 && integer_onep (TREE_OPERAND (arg1, 0)))
8439 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8440 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8441 TREE_OPERAND (arg1, 1)),
8442 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8444 else if ((code == LT_EXPR || code == GE_EXPR)
8445 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8446 && (TREE_CODE (arg1) == NOP_EXPR
8447 || TREE_CODE (arg1) == CONVERT_EXPR)
8448 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8449 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8450 return
8451 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8452 fold_convert (TREE_TYPE (arg0),
8453 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8454 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8455 1))),
8456 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8458 /* Simplify comparison of something with itself. (For IEEE
8459 floating-point, we can only do some of these simplifications.) */
8460 if (operand_equal_p (arg0, arg1, 0))
8462 switch (code)
8464 case EQ_EXPR:
8465 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8466 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8467 return constant_boolean_node (1, type);
8468 break;
8470 case GE_EXPR:
8471 case LE_EXPR:
8472 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8473 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8474 return constant_boolean_node (1, type);
8475 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8477 case NE_EXPR:
8478 /* For NE, we can only do this simplification if integer
8479 or we don't honor IEEE floating point NaNs. */
8480 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8481 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8482 break;
8483 /* ... fall through ... */
8484 case GT_EXPR:
8485 case LT_EXPR:
8486 return constant_boolean_node (0, type);
8487 default:
8488 abort ();
8492 /* If we are comparing an expression that just has comparisons
8493 of two integer values, arithmetic expressions of those comparisons,
8494 and constants, we can simplify it. There are only three cases
8495 to check: the two values can either be equal, the first can be
8496 greater, or the second can be greater. Fold the expression for
8497 those three values. Since each value must be 0 or 1, we have
8498 eight possibilities, each of which corresponds to the constant 0
8499 or 1 or one of the six possible comparisons.
8501 This handles common cases like (a > b) == 0 but also handles
8502 expressions like ((x > y) - (y > x)) > 0, which supposedly
8503 occur in macroized code. */
8505 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8507 tree cval1 = 0, cval2 = 0;
8508 int save_p = 0;
8510 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8511 /* Don't handle degenerate cases here; they should already
8512 have been handled anyway. */
8513 && cval1 != 0 && cval2 != 0
8514 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8515 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8516 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8517 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8518 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8519 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8520 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8522 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8523 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8525 /* We can't just pass T to eval_subst in case cval1 or cval2
8526 was the same as ARG1. */
8528 tree high_result
8529 = fold (build2 (code, type,
8530 eval_subst (arg0, cval1, maxval,
8531 cval2, minval),
8532 arg1));
8533 tree equal_result
8534 = fold (build2 (code, type,
8535 eval_subst (arg0, cval1, maxval,
8536 cval2, maxval),
8537 arg1));
8538 tree low_result
8539 = fold (build2 (code, type,
8540 eval_subst (arg0, cval1, minval,
8541 cval2, maxval),
8542 arg1));
8544 /* All three of these results should be 0 or 1. Confirm they
8545 are. Then use those values to select the proper code
8546 to use. */
8548 if ((integer_zerop (high_result)
8549 || integer_onep (high_result))
8550 && (integer_zerop (equal_result)
8551 || integer_onep (equal_result))
8552 && (integer_zerop (low_result)
8553 || integer_onep (low_result)))
8555 /* Make a 3-bit mask with the high-order bit being the
8556 value for `>', the next for '=', and the low for '<'. */
8557 switch ((integer_onep (high_result) * 4)
8558 + (integer_onep (equal_result) * 2)
8559 + integer_onep (low_result))
8561 case 0:
8562 /* Always false. */
8563 return omit_one_operand (type, integer_zero_node, arg0);
8564 case 1:
8565 code = LT_EXPR;
8566 break;
8567 case 2:
8568 code = EQ_EXPR;
8569 break;
8570 case 3:
8571 code = LE_EXPR;
8572 break;
8573 case 4:
8574 code = GT_EXPR;
8575 break;
8576 case 5:
8577 code = NE_EXPR;
8578 break;
8579 case 6:
8580 code = GE_EXPR;
8581 break;
8582 case 7:
8583 /* Always true. */
8584 return omit_one_operand (type, integer_one_node, arg0);
8587 tem = build2 (code, type, cval1, cval2);
8588 if (save_p)
8589 return save_expr (tem);
8590 else
8591 return fold (tem);
8596 /* If this is a comparison of a field, we may be able to simplify it. */
8597 if (((TREE_CODE (arg0) == COMPONENT_REF
8598 && lang_hooks.can_use_bit_fields_p ())
8599 || TREE_CODE (arg0) == BIT_FIELD_REF)
8600 && (code == EQ_EXPR || code == NE_EXPR)
8601 /* Handle the constant case even without -O
8602 to make sure the warnings are given. */
8603 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8605 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8606 if (t1)
8607 return t1;
8610 /* If this is a comparison of complex values and either or both sides
8611 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8612 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8613 This may prevent needless evaluations. */
8614 if ((code == EQ_EXPR || code == NE_EXPR)
8615 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8616 && (TREE_CODE (arg0) == COMPLEX_EXPR
8617 || TREE_CODE (arg1) == COMPLEX_EXPR
8618 || TREE_CODE (arg0) == COMPLEX_CST
8619 || TREE_CODE (arg1) == COMPLEX_CST))
8621 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8622 tree real0, imag0, real1, imag1;
8624 arg0 = save_expr (arg0);
8625 arg1 = save_expr (arg1);
8626 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8627 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8628 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8629 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8631 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8632 : TRUTH_ORIF_EXPR),
8633 type,
8634 fold (build2 (code, type, real0, real1)),
8635 fold (build2 (code, type, imag0, imag1))));
8638 /* Optimize comparisons of strlen vs zero to a compare of the
8639 first character of the string vs zero. To wit,
8640 strlen(ptr) == 0 => *ptr == 0
8641 strlen(ptr) != 0 => *ptr != 0
8642 Other cases should reduce to one of these two (or a constant)
8643 due to the return value of strlen being unsigned. */
8644 if ((code == EQ_EXPR || code == NE_EXPR)
8645 && integer_zerop (arg1)
8646 && TREE_CODE (arg0) == CALL_EXPR)
8648 tree fndecl = get_callee_fndecl (arg0);
8649 tree arglist;
8651 if (fndecl
8652 && DECL_BUILT_IN (fndecl)
8653 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8654 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8655 && (arglist = TREE_OPERAND (arg0, 1))
8656 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8657 && ! TREE_CHAIN (arglist))
8658 return fold (build2 (code, type,
8659 build1 (INDIRECT_REF, char_type_node,
8660 TREE_VALUE (arglist)),
8661 fold_convert (char_type_node,
8662 integer_zero_node)));
8665 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8666 into a single range test. */
8667 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8668 && TREE_CODE (arg1) == INTEGER_CST
8669 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8670 && !integer_zerop (TREE_OPERAND (arg0, 1))
8671 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8672 && !TREE_OVERFLOW (arg1))
8674 t1 = fold_div_compare (code, type, arg0, arg1);
8675 if (t1 != NULL_TREE)
8676 return t1;
8679 if ((code == EQ_EXPR || code == NE_EXPR)
8680 && !TREE_SIDE_EFFECTS (arg0)
8681 && integer_zerop (arg1)
8682 && tree_expr_nonzero_p (arg0))
8683 return constant_boolean_node (code==NE_EXPR, type);
8685 t1 = fold_relational_const (code, type, arg0, arg1);
8686 return t1 == NULL_TREE ? t : t1;
8688 case UNORDERED_EXPR:
8689 case ORDERED_EXPR:
8690 case UNLT_EXPR:
8691 case UNLE_EXPR:
8692 case UNGT_EXPR:
8693 case UNGE_EXPR:
8694 case UNEQ_EXPR:
8695 case LTGT_EXPR:
8696 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8698 t1 = fold_relational_const (code, type, arg0, arg1);
8699 if (t1 != NULL_TREE)
8700 return t1;
8703 /* If the first operand is NaN, the result is constant. */
8704 if (TREE_CODE (arg0) == REAL_CST
8705 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8706 && (code != LTGT_EXPR || ! flag_trapping_math))
8708 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8709 ? integer_zero_node
8710 : integer_one_node;
8711 return omit_one_operand (type, t1, arg1);
8714 /* If the second operand is NaN, the result is constant. */
8715 if (TREE_CODE (arg1) == REAL_CST
8716 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
8717 && (code != LTGT_EXPR || ! flag_trapping_math))
8719 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8720 ? integer_zero_node
8721 : integer_one_node;
8722 return omit_one_operand (type, t1, arg0);
8725 /* Simplify unordered comparison of something with itself. */
8726 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
8727 && operand_equal_p (arg0, arg1, 0))
8728 return constant_boolean_node (1, type);
8730 if (code == LTGT_EXPR
8731 && !flag_trapping_math
8732 && operand_equal_p (arg0, arg1, 0))
8733 return constant_boolean_node (0, type);
8735 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8737 tree targ0 = strip_float_extensions (arg0);
8738 tree targ1 = strip_float_extensions (arg1);
8739 tree newtype = TREE_TYPE (targ0);
8741 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8742 newtype = TREE_TYPE (targ1);
8744 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8745 return fold (build2 (code, type, fold_convert (newtype, targ0),
8746 fold_convert (newtype, targ1)));
8749 return t;
8751 case COND_EXPR:
8752 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8753 so all simple results must be passed through pedantic_non_lvalue. */
8754 if (TREE_CODE (arg0) == INTEGER_CST)
8756 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8757 /* Only optimize constant conditions when the selected branch
8758 has the same type as the COND_EXPR. This avoids optimizing
8759 away "c ? x : throw", where the throw has a void type. */
8760 if (! VOID_TYPE_P (TREE_TYPE (tem))
8761 || VOID_TYPE_P (type))
8762 return pedantic_non_lvalue (tem);
8763 return t;
8765 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
8766 return pedantic_omit_one_operand (type, arg1, arg0);
8768 /* If we have A op B ? A : C, we may be able to convert this to a
8769 simpler expression, depending on the operation and the values
8770 of B and C. Signed zeros prevent all of these transformations,
8771 for reasons given above each one.
8773 Also try swapping the arguments and inverting the conditional. */
8774 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8775 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8776 arg1, TREE_OPERAND (arg0, 1))
8777 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8779 tem = fold_cond_expr_with_comparison (type, arg0,
8780 TREE_OPERAND (t, 1),
8781 TREE_OPERAND (t, 2));
8782 if (tem)
8783 return tem;
8786 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8787 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8788 TREE_OPERAND (t, 2),
8789 TREE_OPERAND (arg0, 1))
8790 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
8792 tem = invert_truthvalue (arg0);
8793 if (TREE_CODE_CLASS (TREE_CODE (tem)) == '<')
8795 tem = fold_cond_expr_with_comparison (type, tem,
8796 TREE_OPERAND (t, 2),
8797 TREE_OPERAND (t, 1));
8798 if (tem)
8799 return tem;
8803 /* If the second operand is simpler than the third, swap them
8804 since that produces better jump optimization results. */
8805 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8806 TREE_OPERAND (t, 2), false))
8808 /* See if this can be inverted. If it can't, possibly because
8809 it was a floating-point inequality comparison, don't do
8810 anything. */
8811 tem = invert_truthvalue (arg0);
8813 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8814 return fold (build3 (code, type, tem,
8815 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8818 /* Convert A ? 1 : 0 to simply A. */
8819 if (integer_onep (TREE_OPERAND (t, 1))
8820 && integer_zerop (TREE_OPERAND (t, 2))
8821 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8822 call to fold will try to move the conversion inside
8823 a COND, which will recurse. In that case, the COND_EXPR
8824 is probably the best choice, so leave it alone. */
8825 && type == TREE_TYPE (arg0))
8826 return pedantic_non_lvalue (arg0);
8828 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8829 over COND_EXPR in cases such as floating point comparisons. */
8830 if (integer_zerop (TREE_OPERAND (t, 1))
8831 && integer_onep (TREE_OPERAND (t, 2))
8832 && truth_value_p (TREE_CODE (arg0)))
8833 return pedantic_non_lvalue (fold_convert (type,
8834 invert_truthvalue (arg0)));
8836 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
8837 if (TREE_CODE (arg0) == LT_EXPR
8838 && integer_zerop (TREE_OPERAND (arg0, 1))
8839 && integer_zerop (TREE_OPERAND (t, 2))
8840 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
8841 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
8842 TREE_TYPE (tem), tem, arg1)));
8844 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
8845 already handled above. */
8846 if (TREE_CODE (arg0) == BIT_AND_EXPR
8847 && integer_onep (TREE_OPERAND (arg0, 1))
8848 && integer_zerop (TREE_OPERAND (t, 2))
8849 && integer_pow2p (arg1))
8851 tree tem = TREE_OPERAND (arg0, 0);
8852 STRIP_NOPS (tem);
8853 if (TREE_CODE (tem) == RSHIFT_EXPR
8854 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
8855 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
8856 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
8857 return fold (build2 (BIT_AND_EXPR, type,
8858 TREE_OPERAND (tem, 0), arg1));
8861 /* A & N ? N : 0 is simply A & N if N is a power of two. This
8862 is probably obsolete because the first operand should be a
8863 truth value (that's why we have the two cases above), but let's
8864 leave it in until we can confirm this for all front-ends. */
8865 if (integer_zerop (TREE_OPERAND (t, 2))
8866 && TREE_CODE (arg0) == NE_EXPR
8867 && integer_zerop (TREE_OPERAND (arg0, 1))
8868 && integer_pow2p (arg1)
8869 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8870 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8871 arg1, OEP_ONLY_CONST))
8872 return pedantic_non_lvalue (fold_convert (type,
8873 TREE_OPERAND (arg0, 0)));
8875 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8876 if (integer_zerop (TREE_OPERAND (t, 2))
8877 && truth_value_p (TREE_CODE (arg0))
8878 && truth_value_p (TREE_CODE (arg1)))
8879 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
8881 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8882 if (integer_onep (TREE_OPERAND (t, 2))
8883 && truth_value_p (TREE_CODE (arg0))
8884 && truth_value_p (TREE_CODE (arg1)))
8886 /* Only perform transformation if ARG0 is easily inverted. */
8887 tem = invert_truthvalue (arg0);
8888 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8889 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
8892 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
8893 if (integer_zerop (arg1)
8894 && truth_value_p (TREE_CODE (arg0))
8895 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8897 /* Only perform transformation if ARG0 is easily inverted. */
8898 tem = invert_truthvalue (arg0);
8899 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8900 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
8901 TREE_OPERAND (t, 2)));
8904 /* Convert A ? 1 : B into A || B if A and B are truth values. */
8905 if (integer_onep (arg1)
8906 && truth_value_p (TREE_CODE (arg0))
8907 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8908 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
8909 TREE_OPERAND (t, 2)));
8911 return t;
8913 case COMPOUND_EXPR:
8914 /* When pedantic, a compound expression can be neither an lvalue
8915 nor an integer constant expression. */
8916 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8917 return t;
8918 /* Don't let (0, 0) be null pointer constant. */
8919 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8920 : fold_convert (type, arg1);
8921 return pedantic_non_lvalue (tem);
8923 case COMPLEX_EXPR:
8924 if (wins)
8925 return build_complex (type, arg0, arg1);
8926 return t;
8928 case REALPART_EXPR:
8929 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8930 return t;
8931 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8932 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8933 TREE_OPERAND (arg0, 1));
8934 else if (TREE_CODE (arg0) == COMPLEX_CST)
8935 return TREE_REALPART (arg0);
8936 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8937 return fold (build2 (TREE_CODE (arg0), type,
8938 fold (build1 (REALPART_EXPR, type,
8939 TREE_OPERAND (arg0, 0))),
8940 fold (build1 (REALPART_EXPR, type,
8941 TREE_OPERAND (arg0, 1)))));
8942 return t;
8944 case IMAGPART_EXPR:
8945 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8946 return fold_convert (type, integer_zero_node);
8947 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8948 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8949 TREE_OPERAND (arg0, 0));
8950 else if (TREE_CODE (arg0) == COMPLEX_CST)
8951 return TREE_IMAGPART (arg0);
8952 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8953 return fold (build2 (TREE_CODE (arg0), type,
8954 fold (build1 (IMAGPART_EXPR, type,
8955 TREE_OPERAND (arg0, 0))),
8956 fold (build1 (IMAGPART_EXPR, type,
8957 TREE_OPERAND (arg0, 1)))));
8958 return t;
8960 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8961 appropriate. */
8962 case CLEANUP_POINT_EXPR:
8963 if (! has_cleanups (arg0))
8964 return TREE_OPERAND (t, 0);
8967 enum tree_code code0 = TREE_CODE (arg0);
8968 int kind0 = TREE_CODE_CLASS (code0);
8969 tree arg00 = TREE_OPERAND (arg0, 0);
8970 tree arg01;
8972 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8973 return fold (build1 (code0, type,
8974 fold (build1 (CLEANUP_POINT_EXPR,
8975 TREE_TYPE (arg00), arg00))));
8977 if (kind0 == '<' || kind0 == '2'
8978 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8979 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8980 || code0 == TRUTH_XOR_EXPR)
8982 arg01 = TREE_OPERAND (arg0, 1);
8984 if (TREE_CONSTANT (arg00)
8985 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8986 && ! has_cleanups (arg00)))
8987 return fold (build2 (code0, type, arg00,
8988 fold (build1 (CLEANUP_POINT_EXPR,
8989 TREE_TYPE (arg01), arg01))));
8991 if (TREE_CONSTANT (arg01))
8992 return fold (build2 (code0, type,
8993 fold (build1 (CLEANUP_POINT_EXPR,
8994 TREE_TYPE (arg00), arg00)),
8995 arg01));
8998 return t;
9001 case CALL_EXPR:
9002 /* Check for a built-in function. */
9003 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
9004 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
9005 == FUNCTION_DECL)
9006 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
9008 tree tmp = fold_builtin (t, false);
9009 if (tmp)
9010 return tmp;
9012 return t;
9014 default:
9015 return t;
9016 } /* switch (code) */
9019 #ifdef ENABLE_FOLD_CHECKING
9020 #undef fold
9022 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
9023 static void fold_check_failed (tree, tree);
9024 void print_fold_checksum (tree);
9026 /* When --enable-checking=fold, compute a digest of expr before
9027 and after actual fold call to see if fold did not accidentally
9028 change original expr. */
9030 tree
9031 fold (tree expr)
9033 tree ret;
9034 struct md5_ctx ctx;
9035 unsigned char checksum_before[16], checksum_after[16];
9036 htab_t ht;
9038 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9039 md5_init_ctx (&ctx);
9040 fold_checksum_tree (expr, &ctx, ht);
9041 md5_finish_ctx (&ctx, checksum_before);
9042 htab_empty (ht);
9044 ret = fold_1 (expr);
9046 md5_init_ctx (&ctx);
9047 fold_checksum_tree (expr, &ctx, ht);
9048 md5_finish_ctx (&ctx, checksum_after);
9049 htab_delete (ht);
9051 if (memcmp (checksum_before, checksum_after, 16))
9052 fold_check_failed (expr, ret);
9054 return ret;
9057 void
9058 print_fold_checksum (tree expr)
9060 struct md5_ctx ctx;
9061 unsigned char checksum[16], cnt;
9062 htab_t ht;
9064 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9065 md5_init_ctx (&ctx);
9066 fold_checksum_tree (expr, &ctx, ht);
9067 md5_finish_ctx (&ctx, checksum);
9068 htab_delete (ht);
9069 for (cnt = 0; cnt < 16; ++cnt)
9070 fprintf (stderr, "%02x", checksum[cnt]);
9071 putc ('\n', stderr);
9074 static void
9075 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9077 internal_error ("fold check: original tree changed by fold");
9080 static void
9081 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9083 void **slot;
9084 enum tree_code code;
9085 char buf[sizeof (struct tree_decl)];
9086 int i, len;
9088 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
9089 > sizeof (struct tree_decl)
9090 || sizeof (struct tree_type) > sizeof (struct tree_decl))
9091 abort ();
9092 if (expr == NULL)
9093 return;
9094 slot = htab_find_slot (ht, expr, INSERT);
9095 if (*slot != NULL)
9096 return;
9097 *slot = expr;
9098 code = TREE_CODE (expr);
9099 if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
9101 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9102 memcpy (buf, expr, tree_size (expr));
9103 expr = (tree) buf;
9104 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9106 else if (TREE_CODE_CLASS (code) == 't'
9107 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
9109 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
9110 memcpy (buf, expr, tree_size (expr));
9111 expr = (tree) buf;
9112 TYPE_POINTER_TO (expr) = NULL;
9113 TYPE_REFERENCE_TO (expr) = NULL;
9115 md5_process_bytes (expr, tree_size (expr), ctx);
9116 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9117 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
9118 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9119 switch (TREE_CODE_CLASS (code))
9121 case 'c':
9122 switch (code)
9124 case STRING_CST:
9125 md5_process_bytes (TREE_STRING_POINTER (expr),
9126 TREE_STRING_LENGTH (expr), ctx);
9127 break;
9128 case COMPLEX_CST:
9129 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9130 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9131 break;
9132 case VECTOR_CST:
9133 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9134 break;
9135 default:
9136 break;
9138 break;
9139 case 'x':
9140 switch (code)
9142 case TREE_LIST:
9143 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9144 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9145 break;
9146 case TREE_VEC:
9147 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9148 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9149 break;
9150 default:
9151 break;
9153 break;
9154 case 'e':
9155 case 'r':
9156 case '<':
9157 case '1':
9158 case '2':
9159 case 's':
9160 len = first_rtl_op (code);
9161 for (i = 0; i < len; ++i)
9162 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9163 break;
9164 case 'd':
9165 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9166 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9167 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9168 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9169 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9170 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9171 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9172 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9173 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9174 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9175 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9176 break;
9177 case 't':
9178 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9179 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9180 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9181 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9182 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9183 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9184 if (INTEGRAL_TYPE_P (expr)
9185 || SCALAR_FLOAT_TYPE_P (expr))
9187 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9188 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9190 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9191 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9192 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9193 break;
9194 default:
9195 break;
9199 #endif
9201 /* Perform constant folding and related simplification of initializer
9202 expression EXPR. This behaves identically to "fold" but ignores
9203 potential run-time traps and exceptions that fold must preserve. */
9205 tree
9206 fold_initializer (tree expr)
9208 int saved_signaling_nans = flag_signaling_nans;
9209 int saved_trapping_math = flag_trapping_math;
9210 int saved_trapv = flag_trapv;
9211 tree result;
9213 flag_signaling_nans = 0;
9214 flag_trapping_math = 0;
9215 flag_trapv = 0;
9217 result = fold (expr);
9219 flag_signaling_nans = saved_signaling_nans;
9220 flag_trapping_math = saved_trapping_math;
9221 flag_trapv = saved_trapv;
9223 return result;
9226 /* Determine if first argument is a multiple of second argument. Return 0 if
9227 it is not, or we cannot easily determined it to be.
9229 An example of the sort of thing we care about (at this point; this routine
9230 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9231 fold cases do now) is discovering that
9233 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9235 is a multiple of
9237 SAVE_EXPR (J * 8)
9239 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9241 This code also handles discovering that
9243 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9245 is a multiple of 8 so we don't have to worry about dealing with a
9246 possible remainder.
9248 Note that we *look* inside a SAVE_EXPR only to determine how it was
9249 calculated; it is not safe for fold to do much of anything else with the
9250 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9251 at run time. For example, the latter example above *cannot* be implemented
9252 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9253 evaluation time of the original SAVE_EXPR is not necessarily the same at
9254 the time the new expression is evaluated. The only optimization of this
9255 sort that would be valid is changing
9257 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9259 divided by 8 to
9261 SAVE_EXPR (I) * SAVE_EXPR (J)
9263 (where the same SAVE_EXPR (J) is used in the original and the
9264 transformed version). */
9266 static int
9267 multiple_of_p (tree type, tree top, tree bottom)
9269 if (operand_equal_p (top, bottom, 0))
9270 return 1;
9272 if (TREE_CODE (type) != INTEGER_TYPE)
9273 return 0;
9275 switch (TREE_CODE (top))
9277 case MULT_EXPR:
9278 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9279 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9281 case PLUS_EXPR:
9282 case MINUS_EXPR:
9283 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9284 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9286 case LSHIFT_EXPR:
9287 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9289 tree op1, t1;
9291 op1 = TREE_OPERAND (top, 1);
9292 /* const_binop may not detect overflow correctly,
9293 so check for it explicitly here. */
9294 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9295 > TREE_INT_CST_LOW (op1)
9296 && TREE_INT_CST_HIGH (op1) == 0
9297 && 0 != (t1 = fold_convert (type,
9298 const_binop (LSHIFT_EXPR,
9299 size_one_node,
9300 op1, 0)))
9301 && ! TREE_OVERFLOW (t1))
9302 return multiple_of_p (type, t1, bottom);
9304 return 0;
9306 case NOP_EXPR:
9307 /* Can't handle conversions from non-integral or wider integral type. */
9308 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9309 || (TYPE_PRECISION (type)
9310 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9311 return 0;
9313 /* .. fall through ... */
9315 case SAVE_EXPR:
9316 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9318 case INTEGER_CST:
9319 if (TREE_CODE (bottom) != INTEGER_CST
9320 || (TYPE_UNSIGNED (type)
9321 && (tree_int_cst_sgn (top) < 0
9322 || tree_int_cst_sgn (bottom) < 0)))
9323 return 0;
9324 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9325 top, bottom, 0));
9327 default:
9328 return 0;
9332 /* Return true if `t' is known to be non-negative. */
9335 tree_expr_nonnegative_p (tree t)
9337 switch (TREE_CODE (t))
9339 case ABS_EXPR:
9340 return 1;
9342 case INTEGER_CST:
9343 return tree_int_cst_sgn (t) >= 0;
9345 case REAL_CST:
9346 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9348 case PLUS_EXPR:
9349 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9350 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9351 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9353 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9354 both unsigned and at least 2 bits shorter than the result. */
9355 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9356 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9357 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9359 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9360 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9361 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9362 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9364 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9365 TYPE_PRECISION (inner2)) + 1;
9366 return prec < TYPE_PRECISION (TREE_TYPE (t));
9369 break;
9371 case MULT_EXPR:
9372 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9374 /* x * x for floating point x is always non-negative. */
9375 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9376 return 1;
9377 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9378 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9381 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9382 both unsigned and their total bits is shorter than the result. */
9383 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9384 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9385 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9387 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9388 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9389 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9390 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9391 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9392 < TYPE_PRECISION (TREE_TYPE (t));
9394 return 0;
9396 case TRUNC_DIV_EXPR:
9397 case CEIL_DIV_EXPR:
9398 case FLOOR_DIV_EXPR:
9399 case ROUND_DIV_EXPR:
9400 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9401 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9403 case TRUNC_MOD_EXPR:
9404 case CEIL_MOD_EXPR:
9405 case FLOOR_MOD_EXPR:
9406 case ROUND_MOD_EXPR:
9407 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9409 case RDIV_EXPR:
9410 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9411 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9413 case BIT_AND_EXPR:
9414 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9415 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9416 case BIT_IOR_EXPR:
9417 case BIT_XOR_EXPR:
9418 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9419 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9421 case NOP_EXPR:
9423 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9424 tree outer_type = TREE_TYPE (t);
9426 if (TREE_CODE (outer_type) == REAL_TYPE)
9428 if (TREE_CODE (inner_type) == REAL_TYPE)
9429 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9430 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9432 if (TYPE_UNSIGNED (inner_type))
9433 return 1;
9434 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9437 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9439 if (TREE_CODE (inner_type) == REAL_TYPE)
9440 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9441 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9442 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9443 && TYPE_UNSIGNED (inner_type);
9446 break;
9448 case COND_EXPR:
9449 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9450 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9451 case COMPOUND_EXPR:
9452 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9453 case MIN_EXPR:
9454 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9455 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9456 case MAX_EXPR:
9457 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9458 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9459 case MODIFY_EXPR:
9460 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9461 case BIND_EXPR:
9462 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9463 case SAVE_EXPR:
9464 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9465 case NON_LVALUE_EXPR:
9466 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9467 case FLOAT_EXPR:
9468 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9470 case TARGET_EXPR:
9472 tree temp = TARGET_EXPR_SLOT (t);
9473 t = TARGET_EXPR_INITIAL (t);
9475 /* If the initializer is non-void, then it's a normal expression
9476 that will be assigned to the slot. */
9477 if (!VOID_TYPE_P (t))
9478 return tree_expr_nonnegative_p (t);
9480 /* Otherwise, the initializer sets the slot in some way. One common
9481 way is an assignment statement at the end of the initializer. */
9482 while (1)
9484 if (TREE_CODE (t) == BIND_EXPR)
9485 t = expr_last (BIND_EXPR_BODY (t));
9486 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9487 || TREE_CODE (t) == TRY_CATCH_EXPR)
9488 t = expr_last (TREE_OPERAND (t, 0));
9489 else if (TREE_CODE (t) == STATEMENT_LIST)
9490 t = expr_last (t);
9491 else
9492 break;
9494 if (TREE_CODE (t) == MODIFY_EXPR
9495 && TREE_OPERAND (t, 0) == temp)
9496 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9498 return 0;
9501 case CALL_EXPR:
9503 tree fndecl = get_callee_fndecl (t);
9504 tree arglist = TREE_OPERAND (t, 1);
9505 if (fndecl
9506 && DECL_BUILT_IN (fndecl)
9507 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9508 switch (DECL_FUNCTION_CODE (fndecl))
9510 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9511 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9512 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9513 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9515 CASE_BUILTIN_F (BUILT_IN_ACOS)
9516 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9517 CASE_BUILTIN_F (BUILT_IN_CABS)
9518 CASE_BUILTIN_F (BUILT_IN_COSH)
9519 CASE_BUILTIN_F (BUILT_IN_ERFC)
9520 CASE_BUILTIN_F (BUILT_IN_EXP)
9521 CASE_BUILTIN_F (BUILT_IN_EXP10)
9522 CASE_BUILTIN_F (BUILT_IN_EXP2)
9523 CASE_BUILTIN_F (BUILT_IN_FABS)
9524 CASE_BUILTIN_F (BUILT_IN_FDIM)
9525 CASE_BUILTIN_F (BUILT_IN_FREXP)
9526 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9527 CASE_BUILTIN_F (BUILT_IN_POW10)
9528 CASE_BUILTIN_I (BUILT_IN_FFS)
9529 CASE_BUILTIN_I (BUILT_IN_PARITY)
9530 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9531 /* Always true. */
9532 return 1;
9534 CASE_BUILTIN_F (BUILT_IN_SQRT)
9535 /* sqrt(-0.0) is -0.0. */
9536 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9537 return 1;
9538 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9540 CASE_BUILTIN_F (BUILT_IN_ASINH)
9541 CASE_BUILTIN_F (BUILT_IN_ATAN)
9542 CASE_BUILTIN_F (BUILT_IN_ATANH)
9543 CASE_BUILTIN_F (BUILT_IN_CBRT)
9544 CASE_BUILTIN_F (BUILT_IN_CEIL)
9545 CASE_BUILTIN_F (BUILT_IN_ERF)
9546 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9547 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9548 CASE_BUILTIN_F (BUILT_IN_FMOD)
9549 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9550 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9551 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9552 CASE_BUILTIN_F (BUILT_IN_LRINT)
9553 CASE_BUILTIN_F (BUILT_IN_LROUND)
9554 CASE_BUILTIN_F (BUILT_IN_MODF)
9555 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9556 CASE_BUILTIN_F (BUILT_IN_POW)
9557 CASE_BUILTIN_F (BUILT_IN_RINT)
9558 CASE_BUILTIN_F (BUILT_IN_ROUND)
9559 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9560 CASE_BUILTIN_F (BUILT_IN_SINH)
9561 CASE_BUILTIN_F (BUILT_IN_TANH)
9562 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9563 /* True if the 1st argument is nonnegative. */
9564 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9566 CASE_BUILTIN_F (BUILT_IN_FMAX)
9567 /* True if the 1st OR 2nd arguments are nonnegative. */
9568 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9569 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9571 CASE_BUILTIN_F (BUILT_IN_FMIN)
9572 /* True if the 1st AND 2nd arguments are nonnegative. */
9573 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9574 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9576 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9577 /* True if the 2nd argument is nonnegative. */
9578 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9580 default:
9581 break;
9582 #undef CASE_BUILTIN_F
9583 #undef CASE_BUILTIN_I
9587 /* ... fall through ... */
9589 default:
9590 if (truth_value_p (TREE_CODE (t)))
9591 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9592 return 1;
9595 /* We don't know sign of `t', so be conservative and return false. */
9596 return 0;
9599 /* Return true when T is an address and is known to be nonzero.
9600 For floating point we further ensure that T is not denormal.
9601 Similar logic is present in nonzero_address in rtlanal.h */
9603 static bool
9604 tree_expr_nonzero_p (tree t)
9606 tree type = TREE_TYPE (t);
9608 /* Doing something useful for floating point would need more work. */
9609 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9610 return false;
9612 switch (TREE_CODE (t))
9614 case ABS_EXPR:
9615 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9616 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9618 case INTEGER_CST:
9619 return !integer_zerop (t);
9621 case PLUS_EXPR:
9622 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9624 /* With the presence of negative values it is hard
9625 to say something. */
9626 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9627 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9628 return false;
9629 /* One of operands must be positive and the other non-negative. */
9630 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9631 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9633 break;
9635 case MULT_EXPR:
9636 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9638 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9639 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9641 break;
9643 case NOP_EXPR:
9645 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9646 tree outer_type = TREE_TYPE (t);
9648 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9649 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9651 break;
9653 case ADDR_EXPR:
9654 /* Weak declarations may link to NULL. */
9655 if (DECL_P (TREE_OPERAND (t, 0)))
9656 return !DECL_WEAK (TREE_OPERAND (t, 0));
9657 /* Constants and all other cases are never weak. */
9658 return true;
9660 case COND_EXPR:
9661 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9662 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9664 case MIN_EXPR:
9665 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9666 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9668 case MAX_EXPR:
9669 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9671 /* When both operands are nonzero, then MAX must be too. */
9672 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9673 return true;
9675 /* MAX where operand 0 is positive is positive. */
9676 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9678 /* MAX where operand 1 is positive is positive. */
9679 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9680 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9681 return true;
9682 break;
9684 case COMPOUND_EXPR:
9685 case MODIFY_EXPR:
9686 case BIND_EXPR:
9687 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9689 case SAVE_EXPR:
9690 case NON_LVALUE_EXPR:
9691 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9693 case BIT_IOR_EXPR:
9694 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9695 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9697 default:
9698 break;
9700 return false;
9703 /* See if we are applying CODE, a relational to the highest or lowest
9704 possible integer of TYPE. If so, then the result is a compile
9705 time constant. */
9707 static tree
9708 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9709 tree *op1_p)
9711 tree op0 = *op0_p;
9712 tree op1 = *op1_p;
9713 enum tree_code code = *code_p;
9714 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9716 if (TREE_CODE (op1) == INTEGER_CST
9717 && ! TREE_CONSTANT_OVERFLOW (op1)
9718 && width <= HOST_BITS_PER_WIDE_INT
9719 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9720 || POINTER_TYPE_P (TREE_TYPE (op1))))
9722 unsigned HOST_WIDE_INT signed_max;
9723 unsigned HOST_WIDE_INT max, min;
9725 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
9727 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
9729 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9730 min = 0;
9732 else
9734 max = signed_max;
9735 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9738 if (TREE_INT_CST_HIGH (op1) == 0
9739 && TREE_INT_CST_LOW (op1) == max)
9740 switch (code)
9742 case GT_EXPR:
9743 return omit_one_operand (type, integer_zero_node, op0);
9745 case GE_EXPR:
9746 *code_p = EQ_EXPR;
9747 break;
9748 case LE_EXPR:
9749 return omit_one_operand (type, integer_one_node, op0);
9751 case LT_EXPR:
9752 *code_p = NE_EXPR;
9753 break;
9755 /* The GE_EXPR and LT_EXPR cases above are not normally
9756 reached because of previous transformations. */
9758 default:
9759 break;
9761 else if (TREE_INT_CST_HIGH (op1) == 0
9762 && TREE_INT_CST_LOW (op1) == max - 1)
9763 switch (code)
9765 case GT_EXPR:
9766 *code_p = EQ_EXPR;
9767 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9768 break;
9769 case LE_EXPR:
9770 *code_p = NE_EXPR;
9771 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9772 break;
9773 default:
9774 break;
9776 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9777 && TREE_INT_CST_LOW (op1) == min)
9778 switch (code)
9780 case LT_EXPR:
9781 return omit_one_operand (type, integer_zero_node, op0);
9783 case LE_EXPR:
9784 *code_p = EQ_EXPR;
9785 break;
9787 case GE_EXPR:
9788 return omit_one_operand (type, integer_one_node, op0);
9790 case GT_EXPR:
9791 *code_p = NE_EXPR;
9792 break;
9794 default:
9795 break;
9797 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9798 && TREE_INT_CST_LOW (op1) == min + 1)
9799 switch (code)
9801 case GE_EXPR:
9802 *code_p = NE_EXPR;
9803 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9804 break;
9805 case LT_EXPR:
9806 *code_p = EQ_EXPR;
9807 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9808 break;
9809 default:
9810 break;
9813 else if (TREE_INT_CST_HIGH (op1) == 0
9814 && TREE_INT_CST_LOW (op1) == signed_max
9815 && TYPE_UNSIGNED (TREE_TYPE (op1))
9816 /* signed_type does not work on pointer types. */
9817 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
9819 /* The following case also applies to X < signed_max+1
9820 and X >= signed_max+1 because previous transformations. */
9821 if (code == LE_EXPR || code == GT_EXPR)
9823 tree st0, st1, exp, retval;
9824 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
9825 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
9827 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9828 type,
9829 fold_convert (st0, op0),
9830 fold_convert (st1, integer_zero_node));
9832 retval
9833 = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
9834 TREE_TYPE (exp),
9835 TREE_OPERAND (exp, 0),
9836 TREE_OPERAND (exp, 1));
9838 /* If we are in gimple form, then returning EXP would create
9839 non-gimple expressions. Clearing it is safe and insures
9840 we do not allow a non-gimple expression to escape. */
9841 if (in_gimple_form)
9842 exp = NULL;
9844 return (retval ? retval : exp);
9849 return NULL_TREE;
9853 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9854 attempt to fold the expression to a constant without modifying TYPE,
9855 OP0 or OP1.
9857 If the expression could be simplified to a constant, then return
9858 the constant. If the expression would not be simplified to a
9859 constant, then return NULL_TREE.
9861 Note this is primarily designed to be called after gimplification
9862 of the tree structures and when at least one operand is a constant.
9863 As a result of those simplifying assumptions this routine is far
9864 simpler than the generic fold routine. */
9866 tree
9867 nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
9868 tree op0, tree op1)
9870 int wins = 1;
9871 tree subop0;
9872 tree subop1;
9873 tree tem;
9875 /* If this is a commutative operation, and ARG0 is a constant, move it
9876 to ARG1 to reduce the number of tests below. */
9877 if (commutative_tree_code (code)
9878 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
9880 tem = op0;
9881 op0 = op1;
9882 op1 = tem;
9885 /* If either operand is a complex type, extract its real component. */
9886 if (TREE_CODE (op0) == COMPLEX_CST)
9887 subop0 = TREE_REALPART (op0);
9888 else
9889 subop0 = op0;
9891 if (TREE_CODE (op1) == COMPLEX_CST)
9892 subop1 = TREE_REALPART (op1);
9893 else
9894 subop1 = op1;
9896 /* Note if either argument is not a real or integer constant.
9897 With a few exceptions, simplification is limited to cases
9898 where both arguments are constants. */
9899 if ((TREE_CODE (subop0) != INTEGER_CST
9900 && TREE_CODE (subop0) != REAL_CST)
9901 || (TREE_CODE (subop1) != INTEGER_CST
9902 && TREE_CODE (subop1) != REAL_CST))
9903 wins = 0;
9905 switch (code)
9907 case PLUS_EXPR:
9908 /* (plus (address) (const_int)) is a constant. */
9909 if (TREE_CODE (op0) == PLUS_EXPR
9910 && TREE_CODE (op1) == INTEGER_CST
9911 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
9912 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
9913 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
9914 == ADDR_EXPR)))
9915 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9917 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
9918 const_binop (PLUS_EXPR, op1,
9919 TREE_OPERAND (op0, 1), 0));
9921 case BIT_XOR_EXPR:
9923 binary:
9924 if (!wins)
9925 return NULL_TREE;
9927 /* Both arguments are constants. Simplify. */
9928 tem = const_binop (code, op0, op1, 0);
9929 if (tem != NULL_TREE)
9931 /* The return value should always have the same type as
9932 the original expression. */
9933 if (TREE_TYPE (tem) != type)
9934 tem = fold_convert (type, tem);
9936 return tem;
9938 return NULL_TREE;
9940 case MINUS_EXPR:
9941 /* Fold &x - &x. This can happen from &x.foo - &x.
9942 This is unsafe for certain floats even in non-IEEE formats.
9943 In IEEE, it is unsafe because it does wrong for NaNs.
9944 Also note that operand_equal_p is always false if an
9945 operand is volatile. */
9946 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
9947 return fold_convert (type, integer_zero_node);
9949 goto binary;
9951 case MULT_EXPR:
9952 case BIT_AND_EXPR:
9953 /* Special case multiplication or bitwise AND where one argument
9954 is zero. */
9955 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
9956 return omit_one_operand (type, op1, op0);
9957 else
9958 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
9959 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
9960 && real_zerop (op1))
9961 return omit_one_operand (type, op1, op0);
9963 goto binary;
9965 case BIT_IOR_EXPR:
9966 /* Special case when we know the result will be all ones. */
9967 if (integer_all_onesp (op1))
9968 return omit_one_operand (type, op1, op0);
9970 goto binary;
9972 case TRUNC_DIV_EXPR:
9973 case ROUND_DIV_EXPR:
9974 case FLOOR_DIV_EXPR:
9975 case CEIL_DIV_EXPR:
9976 case EXACT_DIV_EXPR:
9977 case TRUNC_MOD_EXPR:
9978 case ROUND_MOD_EXPR:
9979 case FLOOR_MOD_EXPR:
9980 case CEIL_MOD_EXPR:
9981 case RDIV_EXPR:
9982 /* Division by zero is undefined. */
9983 if (integer_zerop (op1))
9984 return NULL_TREE;
9986 if (TREE_CODE (op1) == REAL_CST
9987 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
9988 && real_zerop (op1))
9989 return NULL_TREE;
9991 goto binary;
9993 case MIN_EXPR:
9994 if (INTEGRAL_TYPE_P (type)
9995 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9996 return omit_one_operand (type, op1, op0);
9998 goto binary;
10000 case MAX_EXPR:
10001 if (INTEGRAL_TYPE_P (type)
10002 && TYPE_MAX_VALUE (type)
10003 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10004 return omit_one_operand (type, op1, op0);
10006 goto binary;
10008 case RSHIFT_EXPR:
10009 /* Optimize -1 >> x for arithmetic right shifts. */
10010 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
10011 return omit_one_operand (type, op0, op1);
10012 /* ... fall through ... */
10014 case LSHIFT_EXPR:
10015 if (integer_zerop (op0))
10016 return omit_one_operand (type, op0, op1);
10018 /* Since negative shift count is not well-defined, don't
10019 try to compute it in the compiler. */
10020 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
10021 return NULL_TREE;
10023 goto binary;
10025 case LROTATE_EXPR:
10026 case RROTATE_EXPR:
10027 /* -1 rotated either direction by any amount is still -1. */
10028 if (integer_all_onesp (op0))
10029 return omit_one_operand (type, op0, op1);
10031 /* 0 rotated either direction by any amount is still zero. */
10032 if (integer_zerop (op0))
10033 return omit_one_operand (type, op0, op1);
10035 goto binary;
10037 case COMPLEX_EXPR:
10038 if (wins)
10039 return build_complex (type, op0, op1);
10040 return NULL_TREE;
10042 case LT_EXPR:
10043 case LE_EXPR:
10044 case GT_EXPR:
10045 case GE_EXPR:
10046 case EQ_EXPR:
10047 case NE_EXPR:
10048 /* If one arg is a real or integer constant, put it last. */
10049 if ((TREE_CODE (op0) == INTEGER_CST
10050 && TREE_CODE (op1) != INTEGER_CST)
10051 || (TREE_CODE (op0) == REAL_CST
10052 && TREE_CODE (op0) != REAL_CST))
10054 tree temp;
10056 temp = op0;
10057 op0 = op1;
10058 op1 = temp;
10059 code = swap_tree_comparison (code);
10062 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10063 This transformation affects the cases which are handled in later
10064 optimizations involving comparisons with non-negative constants. */
10065 if (TREE_CODE (op1) == INTEGER_CST
10066 && TREE_CODE (op0) != INTEGER_CST
10067 && tree_int_cst_sgn (op1) > 0)
10069 switch (code)
10071 case GE_EXPR:
10072 code = GT_EXPR;
10073 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10074 break;
10076 case LT_EXPR:
10077 code = LE_EXPR;
10078 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10079 break;
10081 default:
10082 break;
10086 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10087 if (tem)
10088 return tem;
10090 /* Fall through. */
10092 case ORDERED_EXPR:
10093 case UNORDERED_EXPR:
10094 case UNLT_EXPR:
10095 case UNLE_EXPR:
10096 case UNGT_EXPR:
10097 case UNGE_EXPR:
10098 case UNEQ_EXPR:
10099 case LTGT_EXPR:
10100 if (!wins)
10101 return NULL_TREE;
10103 return fold_relational_const (code, type, op0, op1);
10105 case RANGE_EXPR:
10106 /* This could probably be handled. */
10107 return NULL_TREE;
10109 case TRUTH_AND_EXPR:
10110 /* If second arg is constant zero, result is zero, but first arg
10111 must be evaluated. */
10112 if (integer_zerop (op1))
10113 return omit_one_operand (type, op1, op0);
10114 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10115 case will be handled here. */
10116 if (integer_zerop (op0))
10117 return omit_one_operand (type, op0, op1);
10118 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10119 return constant_boolean_node (true, type);
10120 return NULL_TREE;
10122 case TRUTH_OR_EXPR:
10123 /* If second arg is constant true, result is true, but we must
10124 evaluate first arg. */
10125 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10126 return omit_one_operand (type, op1, op0);
10127 /* Likewise for first arg, but note this only occurs here for
10128 TRUTH_OR_EXPR. */
10129 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10130 return omit_one_operand (type, op0, op1);
10131 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10132 return constant_boolean_node (false, type);
10133 return NULL_TREE;
10135 case TRUTH_XOR_EXPR:
10136 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10138 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10139 return constant_boolean_node (x, type);
10141 return NULL_TREE;
10143 default:
10144 return NULL_TREE;
10148 /* Given the components of a unary expression CODE, TYPE and OP0,
10149 attempt to fold the expression to a constant without modifying
10150 TYPE or OP0.
10152 If the expression could be simplified to a constant, then return
10153 the constant. If the expression would not be simplified to a
10154 constant, then return NULL_TREE.
10156 Note this is primarily designed to be called after gimplification
10157 of the tree structures and when op0 is a constant. As a result
10158 of those simplifying assumptions this routine is far simpler than
10159 the generic fold routine. */
10161 tree
10162 nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
10163 tree op0)
10165 /* Make sure we have a suitable constant argument. */
10166 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10168 tree subop;
10170 if (TREE_CODE (op0) == COMPLEX_CST)
10171 subop = TREE_REALPART (op0);
10172 else
10173 subop = op0;
10175 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10176 return NULL_TREE;
10179 switch (code)
10181 case NOP_EXPR:
10182 case FLOAT_EXPR:
10183 case CONVERT_EXPR:
10184 case FIX_TRUNC_EXPR:
10185 case FIX_FLOOR_EXPR:
10186 case FIX_CEIL_EXPR:
10187 return fold_convert_const (code, type, op0);
10189 case NEGATE_EXPR:
10190 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10191 return fold_negate_const (op0, type);
10192 else
10193 return NULL_TREE;
10195 case ABS_EXPR:
10196 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10197 return fold_abs_const (op0, type);
10198 else
10199 return NULL_TREE;
10201 case BIT_NOT_EXPR:
10202 if (TREE_CODE (op0) == INTEGER_CST)
10203 return fold_not_const (op0, type);
10204 else
10205 return NULL_TREE;
10207 case REALPART_EXPR:
10208 if (TREE_CODE (op0) == COMPLEX_CST)
10209 return TREE_REALPART (op0);
10210 else
10211 return NULL_TREE;
10213 case IMAGPART_EXPR:
10214 if (TREE_CODE (op0) == COMPLEX_CST)
10215 return TREE_IMAGPART (op0);
10216 else
10217 return NULL_TREE;
10219 case CONJ_EXPR:
10220 if (TREE_CODE (op0) == COMPLEX_CST
10221 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10222 return build_complex (type, TREE_REALPART (op0),
10223 negate_expr (TREE_IMAGPART (op0)));
10224 return NULL_TREE;
10226 default:
10227 return NULL_TREE;
10231 /* If EXP represents referencing an element in a constant string
10232 (either via pointer arithmetic or array indexing), return the
10233 tree representing the value accessed, otherwise return NULL. */
10235 tree
10236 fold_read_from_constant_string (tree exp)
10238 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10240 tree exp1 = TREE_OPERAND (exp, 0);
10241 tree index;
10242 tree string;
10244 if (TREE_CODE (exp) == INDIRECT_REF)
10245 string = string_constant (exp1, &index);
10246 else
10248 tree low_bound = array_ref_low_bound (exp);
10249 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10251 /* Optimize the special-case of a zero lower bound.
10253 We convert the low_bound to sizetype to avoid some problems
10254 with constant folding. (E.g. suppose the lower bound is 1,
10255 and its mode is QI. Without the conversion,l (ARRAY
10256 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10257 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10258 if (! integer_zerop (low_bound))
10259 index = size_diffop (index, fold_convert (sizetype, low_bound));
10261 string = exp1;
10264 if (string
10265 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10266 && TREE_CODE (string) == STRING_CST
10267 && TREE_CODE (index) == INTEGER_CST
10268 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10269 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10270 == MODE_INT)
10271 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10272 return fold_convert (TREE_TYPE (exp),
10273 build_int_cst (NULL_TREE,
10274 (TREE_STRING_POINTER (string)
10275 [TREE_INT_CST_LOW (index)]), 0));
10277 return NULL;
10280 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10281 an integer constant or real constant.
10283 TYPE is the type of the result. */
10285 static tree
10286 fold_negate_const (tree arg0, tree type)
10288 tree t = NULL_TREE;
10290 if (TREE_CODE (arg0) == INTEGER_CST)
10292 unsigned HOST_WIDE_INT low;
10293 HOST_WIDE_INT high;
10294 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10295 TREE_INT_CST_HIGH (arg0),
10296 &low, &high);
10297 t = build_int_cst (type, low, high);
10298 t = force_fit_type (t, 1,
10299 (overflow | TREE_OVERFLOW (arg0))
10300 && !TYPE_UNSIGNED (type),
10301 TREE_CONSTANT_OVERFLOW (arg0));
10303 else if (TREE_CODE (arg0) == REAL_CST)
10304 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10305 #ifdef ENABLE_CHECKING
10306 else
10307 abort ();
10308 #endif
10310 return t;
10313 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10314 an integer constant or real constant.
10316 TYPE is the type of the result. */
10318 tree
10319 fold_abs_const (tree arg0, tree type)
10321 tree t = NULL_TREE;
10323 if (TREE_CODE (arg0) == INTEGER_CST)
10325 /* If the value is unsigned, then the absolute value is
10326 the same as the ordinary value. */
10327 if (TYPE_UNSIGNED (type))
10328 return arg0;
10329 /* Similarly, if the value is non-negative. */
10330 else if (INT_CST_LT (integer_minus_one_node, arg0))
10331 return arg0;
10332 /* If the value is negative, then the absolute value is
10333 its negation. */
10334 else
10336 unsigned HOST_WIDE_INT low;
10337 HOST_WIDE_INT high;
10338 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10339 TREE_INT_CST_HIGH (arg0),
10340 &low, &high);
10341 t = build_int_cst (type, low, high);
10342 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
10343 TREE_CONSTANT_OVERFLOW (arg0));
10344 return t;
10347 else if (TREE_CODE (arg0) == REAL_CST)
10349 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10350 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10351 else
10352 return arg0;
10354 #ifdef ENABLE_CHECKING
10355 else
10356 abort ();
10357 #endif
10359 return t;
10362 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10363 constant. TYPE is the type of the result. */
10365 static tree
10366 fold_not_const (tree arg0, tree type)
10368 tree t = NULL_TREE;
10370 if (TREE_CODE (arg0) == INTEGER_CST)
10372 t = build_int_cst (type,
10373 ~ TREE_INT_CST_LOW (arg0),
10374 ~ TREE_INT_CST_HIGH (arg0));
10375 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
10376 TREE_CONSTANT_OVERFLOW (arg0));
10378 #ifdef ENABLE_CHECKING
10379 else
10380 abort ();
10381 #endif
10383 return t;
10386 /* Given CODE, a relational operator, the target type, TYPE and two
10387 constant operands OP0 and OP1, return the result of the
10388 relational operation. If the result is not a compile time
10389 constant, then return NULL_TREE. */
10391 static tree
10392 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10394 int result, invert;
10396 /* From here on, the only cases we handle are when the result is
10397 known to be a constant. */
10399 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10401 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
10402 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
10404 /* Handle the cases where either operand is a NaN. */
10405 if (real_isnan (c0) || real_isnan (c1))
10407 switch (code)
10409 case EQ_EXPR:
10410 case ORDERED_EXPR:
10411 result = 0;
10412 break;
10414 case NE_EXPR:
10415 case UNORDERED_EXPR:
10416 case UNLT_EXPR:
10417 case UNLE_EXPR:
10418 case UNGT_EXPR:
10419 case UNGE_EXPR:
10420 case UNEQ_EXPR:
10421 result = 1;
10422 break;
10424 case LT_EXPR:
10425 case LE_EXPR:
10426 case GT_EXPR:
10427 case GE_EXPR:
10428 case LTGT_EXPR:
10429 if (flag_trapping_math)
10430 return NULL_TREE;
10431 result = 0;
10432 break;
10434 default:
10435 abort ();
10438 return constant_boolean_node (result, type);
10441 return constant_boolean_node (real_compare (code, c0, c1), type);
10444 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10446 To compute GT, swap the arguments and do LT.
10447 To compute GE, do LT and invert the result.
10448 To compute LE, swap the arguments, do LT and invert the result.
10449 To compute NE, do EQ and invert the result.
10451 Therefore, the code below must handle only EQ and LT. */
10453 if (code == LE_EXPR || code == GT_EXPR)
10455 tree tem = op0;
10456 op0 = op1;
10457 op1 = tem;
10458 code = swap_tree_comparison (code);
10461 /* Note that it is safe to invert for real values here because we
10462 have already handled the one case that it matters. */
10464 invert = 0;
10465 if (code == NE_EXPR || code == GE_EXPR)
10467 invert = 1;
10468 code = invert_tree_comparison (code, false);
10471 /* Compute a result for LT or EQ if args permit;
10472 Otherwise return T. */
10473 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10475 if (code == EQ_EXPR)
10476 result = tree_int_cst_equal (op0, op1);
10477 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10478 result = INT_CST_LT_UNSIGNED (op0, op1);
10479 else
10480 result = INT_CST_LT (op0, op1);
10482 else
10483 return NULL_TREE;
10485 if (invert)
10486 result ^= 1;
10487 return constant_boolean_node (result, type);
10490 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10491 avoid confusing the gimplify process. */
10493 tree
10494 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10496 if (TREE_CODE (t) == INDIRECT_REF)
10498 t = TREE_OPERAND (t, 0);
10499 if (TREE_TYPE (t) != ptrtype)
10500 t = build1 (NOP_EXPR, ptrtype, t);
10502 else
10504 tree base = t;
10506 while (handled_component_p (base)
10507 || TREE_CODE (base) == REALPART_EXPR
10508 || TREE_CODE (base) == IMAGPART_EXPR)
10509 base = TREE_OPERAND (base, 0);
10510 if (DECL_P (base))
10511 TREE_ADDRESSABLE (base) = 1;
10513 t = build1 (ADDR_EXPR, ptrtype, t);
10516 return t;
10519 tree
10520 build_fold_addr_expr (tree t)
10522 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10525 /* Builds an expression for an indirection through T, simplifying some
10526 cases. */
10528 tree
10529 build_fold_indirect_ref (tree t)
10531 tree type = TREE_TYPE (TREE_TYPE (t));
10532 tree sub = t;
10533 tree subtype;
10535 STRIP_NOPS (sub);
10536 if (TREE_CODE (sub) == ADDR_EXPR)
10538 tree op = TREE_OPERAND (sub, 0);
10539 tree optype = TREE_TYPE (op);
10540 /* *&p => p */
10541 if (lang_hooks.types_compatible_p (type, optype))
10542 return op;
10543 /* *(foo *)&fooarray => fooarray[0] */
10544 else if (TREE_CODE (optype) == ARRAY_TYPE
10545 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10546 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
10549 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10550 subtype = TREE_TYPE (sub);
10551 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10552 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10554 sub = build_fold_indirect_ref (sub);
10555 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
10558 return build1 (INDIRECT_REF, type, t);
10561 /* Strip non-trapping, non-side-effecting tree nodes from an expression
10562 whose result is ignored. The type of the returned tree need not be
10563 the same as the original expression. */
10565 tree
10566 fold_ignored_result (tree t)
10568 if (!TREE_SIDE_EFFECTS (t))
10569 return integer_zero_node;
10571 for (;;)
10572 switch (TREE_CODE_CLASS (TREE_CODE (t)))
10574 case '1':
10575 t = TREE_OPERAND (t, 0);
10576 break;
10578 case '2':
10579 case '<':
10580 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10581 t = TREE_OPERAND (t, 0);
10582 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
10583 t = TREE_OPERAND (t, 1);
10584 else
10585 return t;
10586 break;
10588 case 'e':
10589 switch (TREE_CODE (t))
10591 case COMPOUND_EXPR:
10592 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10593 return t;
10594 t = TREE_OPERAND (t, 0);
10595 break;
10597 case COND_EXPR:
10598 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
10599 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
10600 return t;
10601 t = TREE_OPERAND (t, 0);
10602 break;
10604 default:
10605 return t;
10607 break;
10609 default:
10610 return t;
10614 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
10615 This can only be applied to objects of a sizetype. */
10617 tree
10618 round_up (tree value, int divisor)
10620 tree div = NULL_TREE;
10622 if (divisor <= 0)
10623 abort ();
10624 if (divisor == 1)
10625 return value;
10627 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10628 have to do anything. Only do this when we are not given a const,
10629 because in that case, this check is more expensive than just
10630 doing it. */
10631 if (TREE_CODE (value) != INTEGER_CST)
10633 div = size_int_type (divisor, TREE_TYPE (value));
10635 if (multiple_of_p (TREE_TYPE (value), value, div))
10636 return value;
10639 /* If divisor is a power of two, simplify this to bit manipulation. */
10640 if (divisor == (divisor & -divisor))
10642 tree t;
10644 t = build_int_cst (TREE_TYPE (value), divisor - 1, 0);
10645 value = size_binop (PLUS_EXPR, value, t);
10646 t = build_int_cst (TREE_TYPE (value), -divisor, -1);
10647 value = size_binop (BIT_AND_EXPR, value, t);
10649 else
10651 if (!div)
10652 div = size_int_type (divisor, TREE_TYPE (value));
10653 value = size_binop (CEIL_DIV_EXPR, value, div);
10654 value = size_binop (MULT_EXPR, value, div);
10657 return value;
10660 /* Likewise, but round down. */
10662 tree
10663 round_down (tree value, int divisor)
10665 tree div = NULL_TREE;
10667 if (divisor <= 0)
10668 abort ();
10669 if (divisor == 1)
10670 return value;
10672 div = size_int_type (divisor, TREE_TYPE (value));
10674 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10675 have to do anything. Only do this when we are not given a const,
10676 because in that case, this check is more expensive than just
10677 doing it. */
10678 if (TREE_CODE (value) != INTEGER_CST)
10680 div = size_int_type (divisor, TREE_TYPE (value));
10682 if (multiple_of_p (TREE_TYPE (value), value, div))
10683 return value;
10686 /* If divisor is a power of two, simplify this to bit manipulation. */
10687 if (divisor == (divisor & -divisor))
10689 tree t;
10691 t = build_int_cst (TREE_TYPE (value), -divisor, -1);
10692 value = size_binop (BIT_AND_EXPR, value, t);
10694 else
10696 if (!div)
10697 div = size_int_type (divisor, TREE_TYPE (value));
10698 value = size_binop (FLOOR_DIV_EXPR, value, div);
10699 value = size_binop (MULT_EXPR, value, div);
10702 return value;
10704 #include "gt-fold-const.h"