* config/mips/mips.md (any_shift): New code macro.
[official-gcc.git] / gcc / fold-const.c
blob1c8c401d3a0eb98020ab5fd8431b7cfedce6f66a
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static hashval_t size_htab_hash (const void *);
93 static int size_htab_eq (const void *, const void *);
94 static tree fold_convert_const (enum tree_code, tree, tree);
95 static enum tree_code invert_tree_comparison (enum tree_code, bool);
96 static enum comparison_code comparison_to_compcode (enum tree_code);
97 static enum tree_code compcode_to_comparison (enum comparison_code);
98 static tree combine_comparisons (enum tree_code, enum tree_code,
99 enum tree_code, tree, tree, tree);
100 static int truth_value_p (enum tree_code);
101 static int operand_equal_for_comparison_p (tree, tree, tree);
102 static int twoval_comparison_p (tree, tree *, tree *, int *);
103 static tree eval_subst (tree, tree, tree, tree, tree);
104 static tree pedantic_omit_one_operand (tree, tree, tree);
105 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
106 static tree make_bit_field_ref (tree, tree, int, int, int);
107 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
108 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
109 enum machine_mode *, int *, int *,
110 tree *, tree *);
111 static int all_ones_mask_p (tree, int);
112 static tree sign_bit_p (tree, tree);
113 static int simple_operand_p (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree make_range (tree, int *, tree *, tree *);
116 static tree build_range_check (tree, tree, int, tree, tree);
117 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
118 tree);
119 static tree fold_range_test (tree);
120 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
121 static tree unextend (tree, int, int, tree);
122 static tree fold_truthop (enum tree_code, tree, tree, tree);
123 static tree optimize_minmax_comparison (tree);
124 static tree extract_muldiv (tree, tree, enum tree_code, tree);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
126 static int multiple_of_p (tree, tree, tree);
127 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
128 tree, int);
129 static bool fold_real_zero_addition_p (tree, tree, int);
130 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
131 tree, tree, tree);
132 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
133 static tree fold_div_compare (enum tree_code, tree, tree, tree);
134 static bool reorder_operands_p (tree, tree);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_relational_hi_lo (enum tree_code *, const tree,
139 tree *, tree *);
140 static bool tree_expr_nonzero_p (tree);
142 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
143 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
144 and SUM1. Then this yields nonzero if overflow occurred during the
145 addition.
147 Overflow occurs if A and B have the same sign, but A and SUM differ in
148 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
149 sign. */
150 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
152 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
153 We do that by representing the two-word integer in 4 words, with only
154 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
155 number. The value of the word is LOWPART + HIGHPART * BASE. */
157 #define LOWPART(x) \
158 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
159 #define HIGHPART(x) \
160 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
161 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
163 /* Unpack a two-word integer into 4 words.
164 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
165 WORDS points to the array of HOST_WIDE_INTs. */
167 static void
168 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
170 words[0] = LOWPART (low);
171 words[1] = HIGHPART (low);
172 words[2] = LOWPART (hi);
173 words[3] = HIGHPART (hi);
176 /* Pack an array of 4 words into a two-word integer.
177 WORDS points to the array of words.
178 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
180 static void
181 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
182 HOST_WIDE_INT *hi)
184 *low = words[0] + words[1] * BASE;
185 *hi = words[2] + words[3] * BASE;
188 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
189 in overflow of the value, when >0 we are only interested in signed
190 overflow, for <0 we are interested in any overflow. OVERFLOWED
191 indicates whether overflow has already occurred. CONST_OVERFLOWED
192 indicates whether constant overflow has already occurred. We force
193 T's value to be within range of T's type (by setting to 0 or 1 all
194 the bits outside the type's range). We set TREE_OVERFLOWED if,
195 OVERFLOWED is non-zero,
196 or OVERFLOWABLE is >0 and signed overflow occurs
197 or OVERFLOWABLE is <0 and any overflow occurs
198 We set TREE_CONSTANT_OVERFLOWED if,
199 CONST_OVERFLOWED is non-zero
200 or we set TREE_OVERFLOWED.
201 We return either the original T, or a copy. */
203 tree
204 force_fit_type (tree t, int overflowable,
205 bool overflowed, bool overflowed_const)
207 unsigned HOST_WIDE_INT low;
208 HOST_WIDE_INT high;
209 unsigned int prec;
210 int sign_extended_type;
212 if (TREE_CODE (t) != INTEGER_CST)
213 abort ();
215 low = TREE_INT_CST_LOW (t);
216 high = TREE_INT_CST_HIGH (t);
218 if (POINTER_TYPE_P (TREE_TYPE (t))
219 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
220 prec = POINTER_SIZE;
221 else
222 prec = TYPE_PRECISION (TREE_TYPE (t));
223 /* Size types *are* sign extended. */
224 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
225 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
226 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
228 /* First clear all bits that are beyond the type's precision. */
230 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
232 else if (prec > HOST_BITS_PER_WIDE_INT)
233 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
234 else
236 high = 0;
237 if (prec < HOST_BITS_PER_WIDE_INT)
238 low &= ~((HOST_WIDE_INT) (-1) << prec);
241 if (!sign_extended_type)
242 /* No sign extension */;
243 else if (prec == 2 * HOST_BITS_PER_WIDE_INT)
244 /* Correct width already. */;
245 else if (prec > HOST_BITS_PER_WIDE_INT)
247 /* Sign extend top half? */
248 if (high & ((unsigned HOST_WIDE_INT)1
249 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
250 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
252 else if (prec == HOST_BITS_PER_WIDE_INT)
254 if ((HOST_WIDE_INT)low < 0)
255 high = -1;
257 else
259 /* Sign extend bottom half? */
260 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
262 high = -1;
263 low |= (HOST_WIDE_INT)(-1) << prec;
267 /* If the value changed, return a new node. */
268 if (overflowed || overflowed_const
269 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
271 t = build_int_cst_wide (TREE_TYPE (t), low, high);
273 if (overflowed
274 || overflowable < 0
275 || (overflowable > 0 && sign_extended_type))
277 t = copy_node (t);
278 TREE_OVERFLOW (t) = 1;
279 TREE_CONSTANT_OVERFLOW (t) = 1;
281 else if (overflowed_const)
283 t = copy_node (t);
284 TREE_CONSTANT_OVERFLOW (t) = 1;
288 return t;
291 /* Add two doubleword integers with doubleword result.
292 Each argument is given as two `HOST_WIDE_INT' pieces.
293 One argument is L1 and H1; the other, L2 and H2.
294 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
297 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
298 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
299 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
301 unsigned HOST_WIDE_INT l;
302 HOST_WIDE_INT h;
304 l = l1 + l2;
305 h = h1 + h2 + (l < l1);
307 *lv = l;
308 *hv = h;
309 return OVERFLOW_SUM_SIGN (h1, h2, h);
312 /* Negate a doubleword integer with doubleword result.
313 Return nonzero if the operation overflows, assuming it's signed.
314 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
315 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
318 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
319 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
321 if (l1 == 0)
323 *lv = 0;
324 *hv = - h1;
325 return (*hv & h1) < 0;
327 else
329 *lv = -l1;
330 *hv = ~h1;
331 return 0;
335 /* Multiply two doubleword integers with doubleword result.
336 Return nonzero if the operation overflows, assuming it's signed.
337 Each argument is given as two `HOST_WIDE_INT' pieces.
338 One argument is L1 and H1; the other, L2 and H2.
339 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
342 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
343 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
344 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
346 HOST_WIDE_INT arg1[4];
347 HOST_WIDE_INT arg2[4];
348 HOST_WIDE_INT prod[4 * 2];
349 unsigned HOST_WIDE_INT carry;
350 int i, j, k;
351 unsigned HOST_WIDE_INT toplow, neglow;
352 HOST_WIDE_INT tophigh, neghigh;
354 encode (arg1, l1, h1);
355 encode (arg2, l2, h2);
357 memset (prod, 0, sizeof prod);
359 for (i = 0; i < 4; i++)
361 carry = 0;
362 for (j = 0; j < 4; j++)
364 k = i + j;
365 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
366 carry += arg1[i] * arg2[j];
367 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
368 carry += prod[k];
369 prod[k] = LOWPART (carry);
370 carry = HIGHPART (carry);
372 prod[i + 4] = carry;
375 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
377 /* Check for overflow by calculating the top half of the answer in full;
378 it should agree with the low half's sign bit. */
379 decode (prod + 4, &toplow, &tophigh);
380 if (h1 < 0)
382 neg_double (l2, h2, &neglow, &neghigh);
383 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
385 if (h2 < 0)
387 neg_double (l1, h1, &neglow, &neghigh);
388 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
390 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
393 /* Shift the doubleword integer in L1, H1 left by COUNT places
394 keeping only PREC bits of result.
395 Shift right if COUNT is negative.
396 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
397 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
399 void
400 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
401 HOST_WIDE_INT count, unsigned int prec,
402 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
404 unsigned HOST_WIDE_INT signmask;
406 if (count < 0)
408 rshift_double (l1, h1, -count, prec, lv, hv, arith);
409 return;
412 if (SHIFT_COUNT_TRUNCATED)
413 count %= prec;
415 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
417 /* Shifting by the host word size is undefined according to the
418 ANSI standard, so we must handle this as a special case. */
419 *hv = 0;
420 *lv = 0;
422 else if (count >= HOST_BITS_PER_WIDE_INT)
424 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
425 *lv = 0;
427 else
429 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
430 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
431 *lv = l1 << count;
434 /* Sign extend all bits that are beyond the precision. */
436 signmask = -((prec > HOST_BITS_PER_WIDE_INT
437 ? ((unsigned HOST_WIDE_INT) *hv
438 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
439 : (*lv >> (prec - 1))) & 1);
441 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
443 else if (prec >= HOST_BITS_PER_WIDE_INT)
445 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
446 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
448 else
450 *hv = signmask;
451 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
452 *lv |= signmask << prec;
456 /* Shift the doubleword integer in L1, H1 right by COUNT places
457 keeping only PREC bits of result. COUNT must be positive.
458 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
459 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
461 void
462 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
463 HOST_WIDE_INT count, unsigned int prec,
464 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
465 int arith)
467 unsigned HOST_WIDE_INT signmask;
469 signmask = (arith
470 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
471 : 0);
473 if (SHIFT_COUNT_TRUNCATED)
474 count %= prec;
476 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
478 /* Shifting by the host word size is undefined according to the
479 ANSI standard, so we must handle this as a special case. */
480 *hv = 0;
481 *lv = 0;
483 else if (count >= HOST_BITS_PER_WIDE_INT)
485 *hv = 0;
486 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
488 else
490 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
491 *lv = ((l1 >> count)
492 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
495 /* Zero / sign extend all bits that are beyond the precision. */
497 if (count >= (HOST_WIDE_INT)prec)
499 *hv = signmask;
500 *lv = signmask;
502 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
504 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
506 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
507 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
509 else
511 *hv = signmask;
512 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
513 *lv |= signmask << (prec - count);
517 /* Rotate the doubleword integer in L1, H1 left by COUNT places
518 keeping only PREC bits of result.
519 Rotate right if COUNT is negative.
520 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
522 void
523 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
524 HOST_WIDE_INT count, unsigned int prec,
525 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
527 unsigned HOST_WIDE_INT s1l, s2l;
528 HOST_WIDE_INT s1h, s2h;
530 count %= prec;
531 if (count < 0)
532 count += prec;
534 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
535 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
536 *lv = s1l | s2l;
537 *hv = s1h | s2h;
540 /* Rotate the doubleword integer in L1, H1 left by COUNT places
541 keeping only PREC bits of result. COUNT must be positive.
542 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
544 void
545 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
546 HOST_WIDE_INT count, unsigned int prec,
547 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
549 unsigned HOST_WIDE_INT s1l, s2l;
550 HOST_WIDE_INT s1h, s2h;
552 count %= prec;
553 if (count < 0)
554 count += prec;
556 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
557 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
558 *lv = s1l | s2l;
559 *hv = s1h | s2h;
562 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
563 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
564 CODE is a tree code for a kind of division, one of
565 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
566 or EXACT_DIV_EXPR
567 It controls how the quotient is rounded to an integer.
568 Return nonzero if the operation overflows.
569 UNS nonzero says do unsigned division. */
572 div_and_round_double (enum tree_code code, int uns,
573 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
574 HOST_WIDE_INT hnum_orig,
575 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
576 HOST_WIDE_INT hden_orig,
577 unsigned HOST_WIDE_INT *lquo,
578 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
579 HOST_WIDE_INT *hrem)
581 int quo_neg = 0;
582 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
583 HOST_WIDE_INT den[4], quo[4];
584 int i, j;
585 unsigned HOST_WIDE_INT work;
586 unsigned HOST_WIDE_INT carry = 0;
587 unsigned HOST_WIDE_INT lnum = lnum_orig;
588 HOST_WIDE_INT hnum = hnum_orig;
589 unsigned HOST_WIDE_INT lden = lden_orig;
590 HOST_WIDE_INT hden = hden_orig;
591 int overflow = 0;
593 if (hden == 0 && lden == 0)
594 overflow = 1, lden = 1;
596 /* Calculate quotient sign and convert operands to unsigned. */
597 if (!uns)
599 if (hnum < 0)
601 quo_neg = ~ quo_neg;
602 /* (minimum integer) / (-1) is the only overflow case. */
603 if (neg_double (lnum, hnum, &lnum, &hnum)
604 && ((HOST_WIDE_INT) lden & hden) == -1)
605 overflow = 1;
607 if (hden < 0)
609 quo_neg = ~ quo_neg;
610 neg_double (lden, hden, &lden, &hden);
614 if (hnum == 0 && hden == 0)
615 { /* single precision */
616 *hquo = *hrem = 0;
617 /* This unsigned division rounds toward zero. */
618 *lquo = lnum / lden;
619 goto finish_up;
622 if (hnum == 0)
623 { /* trivial case: dividend < divisor */
624 /* hden != 0 already checked. */
625 *hquo = *lquo = 0;
626 *hrem = hnum;
627 *lrem = lnum;
628 goto finish_up;
631 memset (quo, 0, sizeof quo);
633 memset (num, 0, sizeof num); /* to zero 9th element */
634 memset (den, 0, sizeof den);
636 encode (num, lnum, hnum);
637 encode (den, lden, hden);
639 /* Special code for when the divisor < BASE. */
640 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
642 /* hnum != 0 already checked. */
643 for (i = 4 - 1; i >= 0; i--)
645 work = num[i] + carry * BASE;
646 quo[i] = work / lden;
647 carry = work % lden;
650 else
652 /* Full double precision division,
653 with thanks to Don Knuth's "Seminumerical Algorithms". */
654 int num_hi_sig, den_hi_sig;
655 unsigned HOST_WIDE_INT quo_est, scale;
657 /* Find the highest nonzero divisor digit. */
658 for (i = 4 - 1;; i--)
659 if (den[i] != 0)
661 den_hi_sig = i;
662 break;
665 /* Insure that the first digit of the divisor is at least BASE/2.
666 This is required by the quotient digit estimation algorithm. */
668 scale = BASE / (den[den_hi_sig] + 1);
669 if (scale > 1)
670 { /* scale divisor and dividend */
671 carry = 0;
672 for (i = 0; i <= 4 - 1; i++)
674 work = (num[i] * scale) + carry;
675 num[i] = LOWPART (work);
676 carry = HIGHPART (work);
679 num[4] = carry;
680 carry = 0;
681 for (i = 0; i <= 4 - 1; i++)
683 work = (den[i] * scale) + carry;
684 den[i] = LOWPART (work);
685 carry = HIGHPART (work);
686 if (den[i] != 0) den_hi_sig = i;
690 num_hi_sig = 4;
692 /* Main loop */
693 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
695 /* Guess the next quotient digit, quo_est, by dividing the first
696 two remaining dividend digits by the high order quotient digit.
697 quo_est is never low and is at most 2 high. */
698 unsigned HOST_WIDE_INT tmp;
700 num_hi_sig = i + den_hi_sig + 1;
701 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
702 if (num[num_hi_sig] != den[den_hi_sig])
703 quo_est = work / den[den_hi_sig];
704 else
705 quo_est = BASE - 1;
707 /* Refine quo_est so it's usually correct, and at most one high. */
708 tmp = work - quo_est * den[den_hi_sig];
709 if (tmp < BASE
710 && (den[den_hi_sig - 1] * quo_est
711 > (tmp * BASE + num[num_hi_sig - 2])))
712 quo_est--;
714 /* Try QUO_EST as the quotient digit, by multiplying the
715 divisor by QUO_EST and subtracting from the remaining dividend.
716 Keep in mind that QUO_EST is the I - 1st digit. */
718 carry = 0;
719 for (j = 0; j <= den_hi_sig; j++)
721 work = quo_est * den[j] + carry;
722 carry = HIGHPART (work);
723 work = num[i + j] - LOWPART (work);
724 num[i + j] = LOWPART (work);
725 carry += HIGHPART (work) != 0;
728 /* If quo_est was high by one, then num[i] went negative and
729 we need to correct things. */
730 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
732 quo_est--;
733 carry = 0; /* add divisor back in */
734 for (j = 0; j <= den_hi_sig; j++)
736 work = num[i + j] + den[j] + carry;
737 carry = HIGHPART (work);
738 num[i + j] = LOWPART (work);
741 num [num_hi_sig] += carry;
744 /* Store the quotient digit. */
745 quo[i] = quo_est;
749 decode (quo, lquo, hquo);
751 finish_up:
752 /* If result is negative, make it so. */
753 if (quo_neg)
754 neg_double (*lquo, *hquo, lquo, hquo);
756 /* Compute trial remainder: rem = num - (quo * den) */
757 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
758 neg_double (*lrem, *hrem, lrem, hrem);
759 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
761 switch (code)
763 case TRUNC_DIV_EXPR:
764 case TRUNC_MOD_EXPR: /* round toward zero */
765 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
766 return overflow;
768 case FLOOR_DIV_EXPR:
769 case FLOOR_MOD_EXPR: /* round toward negative infinity */
770 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
772 /* quo = quo - 1; */
773 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
774 lquo, hquo);
776 else
777 return overflow;
778 break;
780 case CEIL_DIV_EXPR:
781 case CEIL_MOD_EXPR: /* round toward positive infinity */
782 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
784 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
785 lquo, hquo);
787 else
788 return overflow;
789 break;
791 case ROUND_DIV_EXPR:
792 case ROUND_MOD_EXPR: /* round to closest integer */
794 unsigned HOST_WIDE_INT labs_rem = *lrem;
795 HOST_WIDE_INT habs_rem = *hrem;
796 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
797 HOST_WIDE_INT habs_den = hden, htwice;
799 /* Get absolute values. */
800 if (*hrem < 0)
801 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
802 if (hden < 0)
803 neg_double (lden, hden, &labs_den, &habs_den);
805 /* If (2 * abs (lrem) >= abs (lden)) */
806 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
807 labs_rem, habs_rem, &ltwice, &htwice);
809 if (((unsigned HOST_WIDE_INT) habs_den
810 < (unsigned HOST_WIDE_INT) htwice)
811 || (((unsigned HOST_WIDE_INT) habs_den
812 == (unsigned HOST_WIDE_INT) htwice)
813 && (labs_den < ltwice)))
815 if (*hquo < 0)
816 /* quo = quo - 1; */
817 add_double (*lquo, *hquo,
818 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
819 else
820 /* quo = quo + 1; */
821 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
822 lquo, hquo);
824 else
825 return overflow;
827 break;
829 default:
830 abort ();
833 /* Compute true remainder: rem = num - (quo * den) */
834 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
835 neg_double (*lrem, *hrem, lrem, hrem);
836 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
837 return overflow;
840 /* Return true if built-in mathematical function specified by CODE
841 preserves the sign of it argument, i.e. -f(x) == f(-x). */
843 static bool
844 negate_mathfn_p (enum built_in_function code)
846 switch (code)
848 case BUILT_IN_ASIN:
849 case BUILT_IN_ASINF:
850 case BUILT_IN_ASINL:
851 case BUILT_IN_ATAN:
852 case BUILT_IN_ATANF:
853 case BUILT_IN_ATANL:
854 case BUILT_IN_SIN:
855 case BUILT_IN_SINF:
856 case BUILT_IN_SINL:
857 case BUILT_IN_TAN:
858 case BUILT_IN_TANF:
859 case BUILT_IN_TANL:
860 return true;
862 default:
863 break;
865 return false;
868 /* Check whether we may negate an integer constant T without causing
869 overflow. */
871 bool
872 may_negate_without_overflow_p (tree t)
874 unsigned HOST_WIDE_INT val;
875 unsigned int prec;
876 tree type;
878 if (TREE_CODE (t) != INTEGER_CST)
879 abort ();
881 type = TREE_TYPE (t);
882 if (TYPE_UNSIGNED (type))
883 return false;
885 prec = TYPE_PRECISION (type);
886 if (prec > HOST_BITS_PER_WIDE_INT)
888 if (TREE_INT_CST_LOW (t) != 0)
889 return true;
890 prec -= HOST_BITS_PER_WIDE_INT;
891 val = TREE_INT_CST_HIGH (t);
893 else
894 val = TREE_INT_CST_LOW (t);
895 if (prec < HOST_BITS_PER_WIDE_INT)
896 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
897 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
900 /* Determine whether an expression T can be cheaply negated using
901 the function negate_expr. */
903 static bool
904 negate_expr_p (tree t)
906 tree type;
908 if (t == 0)
909 return false;
911 type = TREE_TYPE (t);
913 STRIP_SIGN_NOPS (t);
914 switch (TREE_CODE (t))
916 case INTEGER_CST:
917 if (TYPE_UNSIGNED (type) || ! flag_trapv)
918 return true;
920 /* Check that -CST will not overflow type. */
921 return may_negate_without_overflow_p (t);
923 case REAL_CST:
924 case NEGATE_EXPR:
925 return true;
927 case COMPLEX_CST:
928 return negate_expr_p (TREE_REALPART (t))
929 && negate_expr_p (TREE_IMAGPART (t));
931 case PLUS_EXPR:
932 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
933 return false;
934 /* -(A + B) -> (-B) - A. */
935 if (negate_expr_p (TREE_OPERAND (t, 1))
936 && reorder_operands_p (TREE_OPERAND (t, 0),
937 TREE_OPERAND (t, 1)))
938 return true;
939 /* -(A + B) -> (-A) - B. */
940 return negate_expr_p (TREE_OPERAND (t, 0));
942 case MINUS_EXPR:
943 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
944 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
945 && reorder_operands_p (TREE_OPERAND (t, 0),
946 TREE_OPERAND (t, 1));
948 case MULT_EXPR:
949 if (TYPE_UNSIGNED (TREE_TYPE (t)))
950 break;
952 /* Fall through. */
954 case RDIV_EXPR:
955 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
956 return negate_expr_p (TREE_OPERAND (t, 1))
957 || negate_expr_p (TREE_OPERAND (t, 0));
958 break;
960 case NOP_EXPR:
961 /* Negate -((double)float) as (double)(-float). */
962 if (TREE_CODE (type) == REAL_TYPE)
964 tree tem = strip_float_extensions (t);
965 if (tem != t)
966 return negate_expr_p (tem);
968 break;
970 case CALL_EXPR:
971 /* Negate -f(x) as f(-x). */
972 if (negate_mathfn_p (builtin_mathfn_code (t)))
973 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
974 break;
976 case RSHIFT_EXPR:
977 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
978 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
980 tree op1 = TREE_OPERAND (t, 1);
981 if (TREE_INT_CST_HIGH (op1) == 0
982 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
983 == TREE_INT_CST_LOW (op1))
984 return true;
986 break;
988 default:
989 break;
991 return false;
994 /* Given T, an expression, return the negation of T. Allow for T to be
995 null, in which case return null. */
997 static tree
998 negate_expr (tree t)
1000 tree type;
1001 tree tem;
1003 if (t == 0)
1004 return 0;
1006 type = TREE_TYPE (t);
1007 STRIP_SIGN_NOPS (t);
1009 switch (TREE_CODE (t))
1011 case INTEGER_CST:
1012 tem = fold_negate_const (t, type);
1013 if (! TREE_OVERFLOW (tem)
1014 || TYPE_UNSIGNED (type)
1015 || ! flag_trapv)
1016 return tem;
1017 break;
1019 case REAL_CST:
1020 tem = fold_negate_const (t, type);
1021 /* Two's complement FP formats, such as c4x, may overflow. */
1022 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1023 return fold_convert (type, tem);
1024 break;
1026 case COMPLEX_CST:
1028 tree rpart = negate_expr (TREE_REALPART (t));
1029 tree ipart = negate_expr (TREE_IMAGPART (t));
1031 if ((TREE_CODE (rpart) == REAL_CST
1032 && TREE_CODE (ipart) == REAL_CST)
1033 || (TREE_CODE (rpart) == INTEGER_CST
1034 && TREE_CODE (ipart) == INTEGER_CST))
1035 return build_complex (type, rpart, ipart);
1037 break;
1039 case NEGATE_EXPR:
1040 return fold_convert (type, TREE_OPERAND (t, 0));
1042 case PLUS_EXPR:
1043 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1045 /* -(A + B) -> (-B) - A. */
1046 if (negate_expr_p (TREE_OPERAND (t, 1))
1047 && reorder_operands_p (TREE_OPERAND (t, 0),
1048 TREE_OPERAND (t, 1)))
1050 tem = negate_expr (TREE_OPERAND (t, 1));
1051 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1052 tem, TREE_OPERAND (t, 0)));
1053 return fold_convert (type, tem);
1056 /* -(A + B) -> (-A) - B. */
1057 if (negate_expr_p (TREE_OPERAND (t, 0)))
1059 tem = negate_expr (TREE_OPERAND (t, 0));
1060 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1061 tem, TREE_OPERAND (t, 1)));
1062 return fold_convert (type, tem);
1065 break;
1067 case MINUS_EXPR:
1068 /* - (A - B) -> B - A */
1069 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1070 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1071 return fold_convert (type,
1072 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1073 TREE_OPERAND (t, 1),
1074 TREE_OPERAND (t, 0))));
1075 break;
1077 case MULT_EXPR:
1078 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1079 break;
1081 /* Fall through. */
1083 case RDIV_EXPR:
1084 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1086 tem = TREE_OPERAND (t, 1);
1087 if (negate_expr_p (tem))
1088 return fold_convert (type,
1089 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1090 TREE_OPERAND (t, 0),
1091 negate_expr (tem))));
1092 tem = TREE_OPERAND (t, 0);
1093 if (negate_expr_p (tem))
1094 return fold_convert (type,
1095 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1096 negate_expr (tem),
1097 TREE_OPERAND (t, 1))));
1099 break;
1101 case NOP_EXPR:
1102 /* Convert -((double)float) into (double)(-float). */
1103 if (TREE_CODE (type) == REAL_TYPE)
1105 tem = strip_float_extensions (t);
1106 if (tem != t && negate_expr_p (tem))
1107 return fold_convert (type, negate_expr (tem));
1109 break;
1111 case CALL_EXPR:
1112 /* Negate -f(x) as f(-x). */
1113 if (negate_mathfn_p (builtin_mathfn_code (t))
1114 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1116 tree fndecl, arg, arglist;
1118 fndecl = get_callee_fndecl (t);
1119 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1120 arglist = build_tree_list (NULL_TREE, arg);
1121 return build_function_call_expr (fndecl, arglist);
1123 break;
1125 case RSHIFT_EXPR:
1126 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1127 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1129 tree op1 = TREE_OPERAND (t, 1);
1130 if (TREE_INT_CST_HIGH (op1) == 0
1131 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1132 == TREE_INT_CST_LOW (op1))
1134 tree ntype = TYPE_UNSIGNED (type)
1135 ? lang_hooks.types.signed_type (type)
1136 : lang_hooks.types.unsigned_type (type);
1137 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1138 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1139 return fold_convert (type, temp);
1142 break;
1144 default:
1145 break;
1148 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1149 return fold_convert (type, tem);
1152 /* Split a tree IN into a constant, literal and variable parts that could be
1153 combined with CODE to make IN. "constant" means an expression with
1154 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1155 commutative arithmetic operation. Store the constant part into *CONP,
1156 the literal in *LITP and return the variable part. If a part isn't
1157 present, set it to null. If the tree does not decompose in this way,
1158 return the entire tree as the variable part and the other parts as null.
1160 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1161 case, we negate an operand that was subtracted. Except if it is a
1162 literal for which we use *MINUS_LITP instead.
1164 If NEGATE_P is true, we are negating all of IN, again except a literal
1165 for which we use *MINUS_LITP instead.
1167 If IN is itself a literal or constant, return it as appropriate.
1169 Note that we do not guarantee that any of the three values will be the
1170 same type as IN, but they will have the same signedness and mode. */
1172 static tree
1173 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1174 tree *minus_litp, int negate_p)
1176 tree var = 0;
1178 *conp = 0;
1179 *litp = 0;
1180 *minus_litp = 0;
1182 /* Strip any conversions that don't change the machine mode or signedness. */
1183 STRIP_SIGN_NOPS (in);
1185 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1186 *litp = in;
1187 else if (TREE_CODE (in) == code
1188 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1189 /* We can associate addition and subtraction together (even
1190 though the C standard doesn't say so) for integers because
1191 the value is not affected. For reals, the value might be
1192 affected, so we can't. */
1193 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1194 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1196 tree op0 = TREE_OPERAND (in, 0);
1197 tree op1 = TREE_OPERAND (in, 1);
1198 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1199 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1201 /* First see if either of the operands is a literal, then a constant. */
1202 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1203 *litp = op0, op0 = 0;
1204 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1205 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1207 if (op0 != 0 && TREE_CONSTANT (op0))
1208 *conp = op0, op0 = 0;
1209 else if (op1 != 0 && TREE_CONSTANT (op1))
1210 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1212 /* If we haven't dealt with either operand, this is not a case we can
1213 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1214 if (op0 != 0 && op1 != 0)
1215 var = in;
1216 else if (op0 != 0)
1217 var = op0;
1218 else
1219 var = op1, neg_var_p = neg1_p;
1221 /* Now do any needed negations. */
1222 if (neg_litp_p)
1223 *minus_litp = *litp, *litp = 0;
1224 if (neg_conp_p)
1225 *conp = negate_expr (*conp);
1226 if (neg_var_p)
1227 var = negate_expr (var);
1229 else if (TREE_CONSTANT (in))
1230 *conp = in;
1231 else
1232 var = in;
1234 if (negate_p)
1236 if (*litp)
1237 *minus_litp = *litp, *litp = 0;
1238 else if (*minus_litp)
1239 *litp = *minus_litp, *minus_litp = 0;
1240 *conp = negate_expr (*conp);
1241 var = negate_expr (var);
1244 return var;
1247 /* Re-associate trees split by the above function. T1 and T2 are either
1248 expressions to associate or null. Return the new expression, if any. If
1249 we build an operation, do it in TYPE and with CODE. */
1251 static tree
1252 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1254 if (t1 == 0)
1255 return t2;
1256 else if (t2 == 0)
1257 return t1;
1259 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1260 try to fold this since we will have infinite recursion. But do
1261 deal with any NEGATE_EXPRs. */
1262 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1263 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1265 if (code == PLUS_EXPR)
1267 if (TREE_CODE (t1) == NEGATE_EXPR)
1268 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1269 fold_convert (type, TREE_OPERAND (t1, 0)));
1270 else if (TREE_CODE (t2) == NEGATE_EXPR)
1271 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1272 fold_convert (type, TREE_OPERAND (t2, 0)));
1274 return build2 (code, type, fold_convert (type, t1),
1275 fold_convert (type, t2));
1278 return fold (build2 (code, type, fold_convert (type, t1),
1279 fold_convert (type, t2)));
1282 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1283 to produce a new constant.
1285 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1287 tree
1288 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1290 unsigned HOST_WIDE_INT int1l, int2l;
1291 HOST_WIDE_INT int1h, int2h;
1292 unsigned HOST_WIDE_INT low;
1293 HOST_WIDE_INT hi;
1294 unsigned HOST_WIDE_INT garbagel;
1295 HOST_WIDE_INT garbageh;
1296 tree t;
1297 tree type = TREE_TYPE (arg1);
1298 int uns = TYPE_UNSIGNED (type);
1299 int is_sizetype
1300 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1301 int overflow = 0;
1302 int no_overflow = 0;
1304 int1l = TREE_INT_CST_LOW (arg1);
1305 int1h = TREE_INT_CST_HIGH (arg1);
1306 int2l = TREE_INT_CST_LOW (arg2);
1307 int2h = TREE_INT_CST_HIGH (arg2);
1309 switch (code)
1311 case BIT_IOR_EXPR:
1312 low = int1l | int2l, hi = int1h | int2h;
1313 break;
1315 case BIT_XOR_EXPR:
1316 low = int1l ^ int2l, hi = int1h ^ int2h;
1317 break;
1319 case BIT_AND_EXPR:
1320 low = int1l & int2l, hi = int1h & int2h;
1321 break;
1323 case RSHIFT_EXPR:
1324 int2l = -int2l;
1325 case LSHIFT_EXPR:
1326 /* It's unclear from the C standard whether shifts can overflow.
1327 The following code ignores overflow; perhaps a C standard
1328 interpretation ruling is needed. */
1329 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1330 &low, &hi, !uns);
1331 no_overflow = 1;
1332 break;
1334 case RROTATE_EXPR:
1335 int2l = - int2l;
1336 case LROTATE_EXPR:
1337 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1338 &low, &hi);
1339 break;
1341 case PLUS_EXPR:
1342 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1343 break;
1345 case MINUS_EXPR:
1346 neg_double (int2l, int2h, &low, &hi);
1347 add_double (int1l, int1h, low, hi, &low, &hi);
1348 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1349 break;
1351 case MULT_EXPR:
1352 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1353 break;
1355 case TRUNC_DIV_EXPR:
1356 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1357 case EXACT_DIV_EXPR:
1358 /* This is a shortcut for a common special case. */
1359 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1360 && ! TREE_CONSTANT_OVERFLOW (arg1)
1361 && ! TREE_CONSTANT_OVERFLOW (arg2)
1362 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1364 if (code == CEIL_DIV_EXPR)
1365 int1l += int2l - 1;
1367 low = int1l / int2l, hi = 0;
1368 break;
1371 /* ... fall through ... */
1373 case ROUND_DIV_EXPR:
1374 if (int2h == 0 && int2l == 1)
1376 low = int1l, hi = int1h;
1377 break;
1379 if (int1l == int2l && int1h == int2h
1380 && ! (int1l == 0 && int1h == 0))
1382 low = 1, hi = 0;
1383 break;
1385 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1386 &low, &hi, &garbagel, &garbageh);
1387 break;
1389 case TRUNC_MOD_EXPR:
1390 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1391 /* This is a shortcut for a common special case. */
1392 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1393 && ! TREE_CONSTANT_OVERFLOW (arg1)
1394 && ! TREE_CONSTANT_OVERFLOW (arg2)
1395 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1397 if (code == CEIL_MOD_EXPR)
1398 int1l += int2l - 1;
1399 low = int1l % int2l, hi = 0;
1400 break;
1403 /* ... fall through ... */
1405 case ROUND_MOD_EXPR:
1406 overflow = div_and_round_double (code, uns,
1407 int1l, int1h, int2l, int2h,
1408 &garbagel, &garbageh, &low, &hi);
1409 break;
1411 case MIN_EXPR:
1412 case MAX_EXPR:
1413 if (uns)
1414 low = (((unsigned HOST_WIDE_INT) int1h
1415 < (unsigned HOST_WIDE_INT) int2h)
1416 || (((unsigned HOST_WIDE_INT) int1h
1417 == (unsigned HOST_WIDE_INT) int2h)
1418 && int1l < int2l));
1419 else
1420 low = (int1h < int2h
1421 || (int1h == int2h && int1l < int2l));
1423 if (low == (code == MIN_EXPR))
1424 low = int1l, hi = int1h;
1425 else
1426 low = int2l, hi = int2h;
1427 break;
1429 default:
1430 abort ();
1433 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1435 if (notrunc)
1437 /* Propagate overflow flags ourselves. */
1438 if (((!uns || is_sizetype) && overflow)
1439 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1441 t = copy_node (t);
1442 TREE_OVERFLOW (t) = 1;
1443 TREE_CONSTANT_OVERFLOW (t) = 1;
1445 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1447 t = copy_node (t);
1448 TREE_CONSTANT_OVERFLOW (t) = 1;
1451 else
1452 t = force_fit_type (t, 1,
1453 ((!uns || is_sizetype) && overflow)
1454 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1455 TREE_CONSTANT_OVERFLOW (arg1)
1456 | TREE_CONSTANT_OVERFLOW (arg2));
1458 return t;
1461 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1462 constant. We assume ARG1 and ARG2 have the same data type, or at least
1463 are the same kind of constant and the same machine mode.
1465 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1467 static tree
1468 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1470 STRIP_NOPS (arg1);
1471 STRIP_NOPS (arg2);
1473 if (TREE_CODE (arg1) == INTEGER_CST)
1474 return int_const_binop (code, arg1, arg2, notrunc);
1476 if (TREE_CODE (arg1) == REAL_CST)
1478 enum machine_mode mode;
1479 REAL_VALUE_TYPE d1;
1480 REAL_VALUE_TYPE d2;
1481 REAL_VALUE_TYPE value;
1482 tree t, type;
1484 d1 = TREE_REAL_CST (arg1);
1485 d2 = TREE_REAL_CST (arg2);
1487 type = TREE_TYPE (arg1);
1488 mode = TYPE_MODE (type);
1490 /* Don't perform operation if we honor signaling NaNs and
1491 either operand is a NaN. */
1492 if (HONOR_SNANS (mode)
1493 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1494 return NULL_TREE;
1496 /* Don't perform operation if it would raise a division
1497 by zero exception. */
1498 if (code == RDIV_EXPR
1499 && REAL_VALUES_EQUAL (d2, dconst0)
1500 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1501 return NULL_TREE;
1503 /* If either operand is a NaN, just return it. Otherwise, set up
1504 for floating-point trap; we return an overflow. */
1505 if (REAL_VALUE_ISNAN (d1))
1506 return arg1;
1507 else if (REAL_VALUE_ISNAN (d2))
1508 return arg2;
1510 REAL_ARITHMETIC (value, code, d1, d2);
1512 t = build_real (type, real_value_truncate (mode, value));
1514 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1515 TREE_CONSTANT_OVERFLOW (t)
1516 = TREE_OVERFLOW (t)
1517 | TREE_CONSTANT_OVERFLOW (arg1)
1518 | TREE_CONSTANT_OVERFLOW (arg2);
1519 return t;
1521 if (TREE_CODE (arg1) == COMPLEX_CST)
1523 tree type = TREE_TYPE (arg1);
1524 tree r1 = TREE_REALPART (arg1);
1525 tree i1 = TREE_IMAGPART (arg1);
1526 tree r2 = TREE_REALPART (arg2);
1527 tree i2 = TREE_IMAGPART (arg2);
1528 tree t;
1530 switch (code)
1532 case PLUS_EXPR:
1533 t = build_complex (type,
1534 const_binop (PLUS_EXPR, r1, r2, notrunc),
1535 const_binop (PLUS_EXPR, i1, i2, notrunc));
1536 break;
1538 case MINUS_EXPR:
1539 t = build_complex (type,
1540 const_binop (MINUS_EXPR, r1, r2, notrunc),
1541 const_binop (MINUS_EXPR, i1, i2, notrunc));
1542 break;
1544 case MULT_EXPR:
1545 t = build_complex (type,
1546 const_binop (MINUS_EXPR,
1547 const_binop (MULT_EXPR,
1548 r1, r2, notrunc),
1549 const_binop (MULT_EXPR,
1550 i1, i2, notrunc),
1551 notrunc),
1552 const_binop (PLUS_EXPR,
1553 const_binop (MULT_EXPR,
1554 r1, i2, notrunc),
1555 const_binop (MULT_EXPR,
1556 i1, r2, notrunc),
1557 notrunc));
1558 break;
1560 case RDIV_EXPR:
1562 tree magsquared
1563 = const_binop (PLUS_EXPR,
1564 const_binop (MULT_EXPR, r2, r2, notrunc),
1565 const_binop (MULT_EXPR, i2, i2, notrunc),
1566 notrunc);
1568 t = build_complex (type,
1569 const_binop
1570 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1571 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1572 const_binop (PLUS_EXPR,
1573 const_binop (MULT_EXPR, r1, r2,
1574 notrunc),
1575 const_binop (MULT_EXPR, i1, i2,
1576 notrunc),
1577 notrunc),
1578 magsquared, notrunc),
1579 const_binop
1580 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1581 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1582 const_binop (MINUS_EXPR,
1583 const_binop (MULT_EXPR, i1, r2,
1584 notrunc),
1585 const_binop (MULT_EXPR, r1, i2,
1586 notrunc),
1587 notrunc),
1588 magsquared, notrunc));
1590 break;
1592 default:
1593 abort ();
1595 return t;
1597 return 0;
1600 /* These are the hash table functions for the hash table of INTEGER_CST
1601 nodes of a sizetype. */
1603 /* Return the hash code code X, an INTEGER_CST. */
1605 static hashval_t
1606 size_htab_hash (const void *x)
1608 tree t = (tree) x;
1610 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1611 ^ htab_hash_pointer (TREE_TYPE (t))
1612 ^ (TREE_OVERFLOW (t) << 20));
1615 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1616 is the same as that given by *Y, which is the same. */
1618 static int
1619 size_htab_eq (const void *x, const void *y)
1621 tree xt = (tree) x;
1622 tree yt = (tree) y;
1624 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1625 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1626 && TREE_TYPE (xt) == TREE_TYPE (yt)
1627 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1630 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1631 bits are given by NUMBER and of the sizetype represented by KIND. */
1633 tree
1634 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1636 return size_int_type (number, sizetype_tab[(int) kind]);
1639 /* Likewise, but the desired type is specified explicitly. */
1641 static GTY (()) tree new_const;
1642 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1643 htab_t size_htab;
1645 tree
1646 size_int_type (HOST_WIDE_INT number, tree type)
1648 void **slot;
1649 unsigned int prec;
1650 HOST_WIDE_INT high;
1651 unsigned HOST_WIDE_INT low;
1653 if (size_htab == 0)
1655 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1656 new_const = make_node (INTEGER_CST);
1659 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1660 hash table, we return the value from the hash table. Otherwise, we
1661 place that in the hash table and make a new node for the next time. */
1662 prec = TYPE_PRECISION (type);
1663 TREE_TYPE (new_const) = type;
1664 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const) = 0;
1665 low = number;
1666 if (number >= 0)
1667 high = 0;
1668 else
1670 /* Sizetype IS sign extended. */
1671 high = -1;
1672 if (prec <= HOST_BITS_PER_WIDE_INT)
1673 low |= (HOST_WIDE_INT)(-1) << (prec - 1);
1675 TREE_INT_CST_LOW (new_const) = low;
1676 TREE_INT_CST_HIGH (new_const) = high;
1678 if (low != (unsigned HOST_WIDE_INT)number
1679 || high != (number < 0 ? -1 : 0))
1680 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const) = 1;
1682 slot = htab_find_slot (size_htab, new_const, INSERT);
1683 if (*slot == 0)
1685 tree t = new_const;
1687 *slot = new_const;
1688 new_const = make_node (INTEGER_CST);
1689 return t;
1691 else
1692 return (tree) *slot;
1695 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1696 is a tree code. The type of the result is taken from the operands.
1697 Both must be the same type integer type and it must be a size type.
1698 If the operands are constant, so is the result. */
1700 tree
1701 size_binop (enum tree_code code, tree arg0, tree arg1)
1703 tree type = TREE_TYPE (arg0);
1705 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1706 || type != TREE_TYPE (arg1))
1707 abort ();
1709 /* Handle the special case of two integer constants faster. */
1710 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1712 /* And some specific cases even faster than that. */
1713 if (code == PLUS_EXPR && integer_zerop (arg0))
1714 return arg1;
1715 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1716 && integer_zerop (arg1))
1717 return arg0;
1718 else if (code == MULT_EXPR && integer_onep (arg0))
1719 return arg1;
1721 /* Handle general case of two integer constants. */
1722 return int_const_binop (code, arg0, arg1, 0);
1725 if (arg0 == error_mark_node || arg1 == error_mark_node)
1726 return error_mark_node;
1728 return fold (build2 (code, type, arg0, arg1));
1731 /* Given two values, either both of sizetype or both of bitsizetype,
1732 compute the difference between the two values. Return the value
1733 in signed type corresponding to the type of the operands. */
1735 tree
1736 size_diffop (tree arg0, tree arg1)
1738 tree type = TREE_TYPE (arg0);
1739 tree ctype;
1741 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1742 || type != TREE_TYPE (arg1))
1743 abort ();
1745 /* If the type is already signed, just do the simple thing. */
1746 if (!TYPE_UNSIGNED (type))
1747 return size_binop (MINUS_EXPR, arg0, arg1);
1749 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1751 /* If either operand is not a constant, do the conversions to the signed
1752 type and subtract. The hardware will do the right thing with any
1753 overflow in the subtraction. */
1754 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1755 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1756 fold_convert (ctype, arg1));
1758 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1759 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1760 overflow) and negate (which can't either). Special-case a result
1761 of zero while we're here. */
1762 if (tree_int_cst_equal (arg0, arg1))
1763 return fold_convert (ctype, integer_zero_node);
1764 else if (tree_int_cst_lt (arg1, arg0))
1765 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1766 else
1767 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1768 fold_convert (ctype, size_binop (MINUS_EXPR,
1769 arg1, arg0)));
1773 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1774 type TYPE. If no simplification can be done return NULL_TREE. */
1776 static tree
1777 fold_convert_const (enum tree_code code, tree type, tree arg1)
1779 int overflow = 0;
1780 tree t;
1782 if (TREE_TYPE (arg1) == type)
1783 return arg1;
1785 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1787 if (TREE_CODE (arg1) == INTEGER_CST)
1789 /* If we would build a constant wider than GCC supports,
1790 leave the conversion unfolded. */
1791 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1792 return NULL_TREE;
1794 /* Given an integer constant, make new constant with new type,
1795 appropriately sign-extended or truncated. */
1796 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1797 TREE_INT_CST_HIGH (arg1));
1799 t = force_fit_type (t,
1800 /* Don't set the overflow when
1801 converting a pointer */
1802 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1803 (TREE_INT_CST_HIGH (arg1) < 0
1804 && (TYPE_UNSIGNED (type)
1805 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1806 | TREE_OVERFLOW (arg1),
1807 TREE_CONSTANT_OVERFLOW (arg1));
1808 return t;
1810 else if (TREE_CODE (arg1) == REAL_CST)
1812 /* The following code implements the floating point to integer
1813 conversion rules required by the Java Language Specification,
1814 that IEEE NaNs are mapped to zero and values that overflow
1815 the target precision saturate, i.e. values greater than
1816 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1817 are mapped to INT_MIN. These semantics are allowed by the
1818 C and C++ standards that simply state that the behavior of
1819 FP-to-integer conversion is unspecified upon overflow. */
1821 HOST_WIDE_INT high, low;
1822 REAL_VALUE_TYPE r;
1823 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1825 switch (code)
1827 case FIX_TRUNC_EXPR:
1828 real_trunc (&r, VOIDmode, &x);
1829 break;
1831 case FIX_CEIL_EXPR:
1832 real_ceil (&r, VOIDmode, &x);
1833 break;
1835 case FIX_FLOOR_EXPR:
1836 real_floor (&r, VOIDmode, &x);
1837 break;
1839 case FIX_ROUND_EXPR:
1840 real_round (&r, VOIDmode, &x);
1841 break;
1843 default:
1844 abort ();
1847 /* If R is NaN, return zero and show we have an overflow. */
1848 if (REAL_VALUE_ISNAN (r))
1850 overflow = 1;
1851 high = 0;
1852 low = 0;
1855 /* See if R is less than the lower bound or greater than the
1856 upper bound. */
1858 if (! overflow)
1860 tree lt = TYPE_MIN_VALUE (type);
1861 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1862 if (REAL_VALUES_LESS (r, l))
1864 overflow = 1;
1865 high = TREE_INT_CST_HIGH (lt);
1866 low = TREE_INT_CST_LOW (lt);
1870 if (! overflow)
1872 tree ut = TYPE_MAX_VALUE (type);
1873 if (ut)
1875 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1876 if (REAL_VALUES_LESS (u, r))
1878 overflow = 1;
1879 high = TREE_INT_CST_HIGH (ut);
1880 low = TREE_INT_CST_LOW (ut);
1885 if (! overflow)
1886 REAL_VALUE_TO_INT (&low, &high, r);
1888 t = build_int_cst_wide (type, low, high);
1890 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1891 TREE_CONSTANT_OVERFLOW (arg1));
1892 return t;
1895 else if (TREE_CODE (type) == REAL_TYPE)
1897 if (TREE_CODE (arg1) == INTEGER_CST)
1898 return build_real_from_int_cst (type, arg1);
1899 if (TREE_CODE (arg1) == REAL_CST)
1901 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1903 /* We make a copy of ARG1 so that we don't modify an
1904 existing constant tree. */
1905 t = copy_node (arg1);
1906 TREE_TYPE (t) = type;
1907 return t;
1910 t = build_real (type,
1911 real_value_truncate (TYPE_MODE (type),
1912 TREE_REAL_CST (arg1)));
1914 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1915 TREE_CONSTANT_OVERFLOW (t)
1916 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1917 return t;
1920 return NULL_TREE;
1923 /* Convert expression ARG to type TYPE. Used by the middle-end for
1924 simple conversions in preference to calling the front-end's convert. */
1926 tree
1927 fold_convert (tree type, tree arg)
1929 tree orig = TREE_TYPE (arg);
1930 tree tem;
1932 if (type == orig)
1933 return arg;
1935 if (TREE_CODE (arg) == ERROR_MARK
1936 || TREE_CODE (type) == ERROR_MARK
1937 || TREE_CODE (orig) == ERROR_MARK)
1938 return error_mark_node;
1940 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1941 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1942 TYPE_MAIN_VARIANT (orig)))
1943 return fold (build1 (NOP_EXPR, type, arg));
1945 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
1946 || TREE_CODE (type) == OFFSET_TYPE)
1948 if (TREE_CODE (arg) == INTEGER_CST)
1950 tem = fold_convert_const (NOP_EXPR, type, arg);
1951 if (tem != NULL_TREE)
1952 return tem;
1954 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1955 || TREE_CODE (orig) == OFFSET_TYPE)
1956 return fold (build1 (NOP_EXPR, type, arg));
1957 if (TREE_CODE (orig) == COMPLEX_TYPE)
1959 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1960 return fold_convert (type, tem);
1962 if (TREE_CODE (orig) == VECTOR_TYPE
1963 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)))
1964 return fold (build1 (NOP_EXPR, type, arg));
1966 else if (TREE_CODE (type) == REAL_TYPE)
1968 if (TREE_CODE (arg) == INTEGER_CST)
1970 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1971 if (tem != NULL_TREE)
1972 return tem;
1974 else if (TREE_CODE (arg) == REAL_CST)
1976 tem = fold_convert_const (NOP_EXPR, type, arg);
1977 if (tem != NULL_TREE)
1978 return tem;
1981 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1982 return fold (build1 (FLOAT_EXPR, type, arg));
1983 if (TREE_CODE (orig) == REAL_TYPE)
1984 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1985 type, arg));
1986 if (TREE_CODE (orig) == COMPLEX_TYPE)
1988 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1989 return fold_convert (type, tem);
1992 else if (TREE_CODE (type) == COMPLEX_TYPE)
1994 if (INTEGRAL_TYPE_P (orig)
1995 || POINTER_TYPE_P (orig)
1996 || TREE_CODE (orig) == REAL_TYPE)
1997 return build2 (COMPLEX_EXPR, type,
1998 fold_convert (TREE_TYPE (type), arg),
1999 fold_convert (TREE_TYPE (type), integer_zero_node));
2000 if (TREE_CODE (orig) == COMPLEX_TYPE)
2002 tree rpart, ipart;
2004 if (TREE_CODE (arg) == COMPLEX_EXPR)
2006 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2007 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2008 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
2011 arg = save_expr (arg);
2012 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
2013 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
2014 rpart = fold_convert (TREE_TYPE (type), rpart);
2015 ipart = fold_convert (TREE_TYPE (type), ipart);
2016 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
2019 else if (TREE_CODE (type) == VECTOR_TYPE)
2021 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
2022 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)))
2023 return fold (build1 (NOP_EXPR, type, arg));
2024 if (TREE_CODE (orig) == VECTOR_TYPE
2025 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)))
2026 return fold (build1 (NOP_EXPR, type, arg));
2028 else if (VOID_TYPE_P (type))
2029 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
2030 abort ();
2033 /* Return an expr equal to X but certainly not valid as an lvalue. */
2035 tree
2036 non_lvalue (tree x)
2038 /* We only need to wrap lvalue tree codes. */
2039 switch (TREE_CODE (x))
2041 case VAR_DECL:
2042 case PARM_DECL:
2043 case RESULT_DECL:
2044 case LABEL_DECL:
2045 case FUNCTION_DECL:
2046 case SSA_NAME:
2048 case COMPONENT_REF:
2049 case INDIRECT_REF:
2050 case ARRAY_REF:
2051 case ARRAY_RANGE_REF:
2052 case BIT_FIELD_REF:
2053 case OBJ_TYPE_REF:
2055 case REALPART_EXPR:
2056 case IMAGPART_EXPR:
2057 case PREINCREMENT_EXPR:
2058 case PREDECREMENT_EXPR:
2059 case SAVE_EXPR:
2060 case TRY_CATCH_EXPR:
2061 case WITH_CLEANUP_EXPR:
2062 case COMPOUND_EXPR:
2063 case MODIFY_EXPR:
2064 case TARGET_EXPR:
2065 case COND_EXPR:
2066 case BIND_EXPR:
2067 case MIN_EXPR:
2068 case MAX_EXPR:
2069 break;
2071 default:
2072 /* Assume the worst for front-end tree codes. */
2073 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2074 break;
2075 return x;
2077 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2080 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2081 Zero means allow extended lvalues. */
2083 int pedantic_lvalues;
2085 /* When pedantic, return an expr equal to X but certainly not valid as a
2086 pedantic lvalue. Otherwise, return X. */
2088 tree
2089 pedantic_non_lvalue (tree x)
2091 if (pedantic_lvalues)
2092 return non_lvalue (x);
2093 else
2094 return x;
2097 /* Given a tree comparison code, return the code that is the logical inverse
2098 of the given code. It is not safe to do this for floating-point
2099 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2100 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2102 static enum tree_code
2103 invert_tree_comparison (enum tree_code code, bool honor_nans)
2105 if (honor_nans && flag_trapping_math)
2106 return ERROR_MARK;
2108 switch (code)
2110 case EQ_EXPR:
2111 return NE_EXPR;
2112 case NE_EXPR:
2113 return EQ_EXPR;
2114 case GT_EXPR:
2115 return honor_nans ? UNLE_EXPR : LE_EXPR;
2116 case GE_EXPR:
2117 return honor_nans ? UNLT_EXPR : LT_EXPR;
2118 case LT_EXPR:
2119 return honor_nans ? UNGE_EXPR : GE_EXPR;
2120 case LE_EXPR:
2121 return honor_nans ? UNGT_EXPR : GT_EXPR;
2122 case LTGT_EXPR:
2123 return UNEQ_EXPR;
2124 case UNEQ_EXPR:
2125 return LTGT_EXPR;
2126 case UNGT_EXPR:
2127 return LE_EXPR;
2128 case UNGE_EXPR:
2129 return LT_EXPR;
2130 case UNLT_EXPR:
2131 return GE_EXPR;
2132 case UNLE_EXPR:
2133 return GT_EXPR;
2134 case ORDERED_EXPR:
2135 return UNORDERED_EXPR;
2136 case UNORDERED_EXPR:
2137 return ORDERED_EXPR;
2138 default:
2139 abort ();
2143 /* Similar, but return the comparison that results if the operands are
2144 swapped. This is safe for floating-point. */
2146 enum tree_code
2147 swap_tree_comparison (enum tree_code code)
2149 switch (code)
2151 case EQ_EXPR:
2152 case NE_EXPR:
2153 return code;
2154 case GT_EXPR:
2155 return LT_EXPR;
2156 case GE_EXPR:
2157 return LE_EXPR;
2158 case LT_EXPR:
2159 return GT_EXPR;
2160 case LE_EXPR:
2161 return GE_EXPR;
2162 default:
2163 abort ();
2168 /* Convert a comparison tree code from an enum tree_code representation
2169 into a compcode bit-based encoding. This function is the inverse of
2170 compcode_to_comparison. */
2172 static enum comparison_code
2173 comparison_to_compcode (enum tree_code code)
2175 switch (code)
2177 case LT_EXPR:
2178 return COMPCODE_LT;
2179 case EQ_EXPR:
2180 return COMPCODE_EQ;
2181 case LE_EXPR:
2182 return COMPCODE_LE;
2183 case GT_EXPR:
2184 return COMPCODE_GT;
2185 case NE_EXPR:
2186 return COMPCODE_NE;
2187 case GE_EXPR:
2188 return COMPCODE_GE;
2189 case ORDERED_EXPR:
2190 return COMPCODE_ORD;
2191 case UNORDERED_EXPR:
2192 return COMPCODE_UNORD;
2193 case UNLT_EXPR:
2194 return COMPCODE_UNLT;
2195 case UNEQ_EXPR:
2196 return COMPCODE_UNEQ;
2197 case UNLE_EXPR:
2198 return COMPCODE_UNLE;
2199 case UNGT_EXPR:
2200 return COMPCODE_UNGT;
2201 case LTGT_EXPR:
2202 return COMPCODE_LTGT;
2203 case UNGE_EXPR:
2204 return COMPCODE_UNGE;
2205 default:
2206 abort ();
2210 /* Convert a compcode bit-based encoding of a comparison operator back
2211 to GCC's enum tree_code representation. This function is the
2212 inverse of comparison_to_compcode. */
2214 static enum tree_code
2215 compcode_to_comparison (enum comparison_code code)
2217 switch (code)
2219 case COMPCODE_LT:
2220 return LT_EXPR;
2221 case COMPCODE_EQ:
2222 return EQ_EXPR;
2223 case COMPCODE_LE:
2224 return LE_EXPR;
2225 case COMPCODE_GT:
2226 return GT_EXPR;
2227 case COMPCODE_NE:
2228 return NE_EXPR;
2229 case COMPCODE_GE:
2230 return GE_EXPR;
2231 case COMPCODE_ORD:
2232 return ORDERED_EXPR;
2233 case COMPCODE_UNORD:
2234 return UNORDERED_EXPR;
2235 case COMPCODE_UNLT:
2236 return UNLT_EXPR;
2237 case COMPCODE_UNEQ:
2238 return UNEQ_EXPR;
2239 case COMPCODE_UNLE:
2240 return UNLE_EXPR;
2241 case COMPCODE_UNGT:
2242 return UNGT_EXPR;
2243 case COMPCODE_LTGT:
2244 return LTGT_EXPR;
2245 case COMPCODE_UNGE:
2246 return UNGE_EXPR;
2247 default:
2248 abort ();
2252 /* Return a tree for the comparison which is the combination of
2253 doing the AND or OR (depending on CODE) of the two operations LCODE
2254 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2255 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2256 if this makes the transformation invalid. */
2258 tree
2259 combine_comparisons (enum tree_code code, enum tree_code lcode,
2260 enum tree_code rcode, tree truth_type,
2261 tree ll_arg, tree lr_arg)
2263 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2264 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2265 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2266 enum comparison_code compcode;
2268 switch (code)
2270 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2271 compcode = lcompcode & rcompcode;
2272 break;
2274 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2275 compcode = lcompcode | rcompcode;
2276 break;
2278 default:
2279 return NULL_TREE;
2282 if (!honor_nans)
2284 /* Eliminate unordered comparisons, as well as LTGT and ORD
2285 which are not used unless the mode has NaNs. */
2286 compcode &= ~COMPCODE_UNORD;
2287 if (compcode == COMPCODE_LTGT)
2288 compcode = COMPCODE_NE;
2289 else if (compcode == COMPCODE_ORD)
2290 compcode = COMPCODE_TRUE;
2292 else if (flag_trapping_math)
2294 /* Check that the original operation and the optimized ones will trap
2295 under the same condition. */
2296 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2297 && (lcompcode != COMPCODE_EQ)
2298 && (lcompcode != COMPCODE_ORD);
2299 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2300 && (rcompcode != COMPCODE_EQ)
2301 && (rcompcode != COMPCODE_ORD);
2302 bool trap = (compcode & COMPCODE_UNORD) == 0
2303 && (compcode != COMPCODE_EQ)
2304 && (compcode != COMPCODE_ORD);
2306 /* In a short-circuited boolean expression the LHS might be
2307 such that the RHS, if evaluated, will never trap. For
2308 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2309 if neither x nor y is NaN. (This is a mixed blessing: for
2310 example, the expression above will never trap, hence
2311 optimizing it to x < y would be invalid). */
2312 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2313 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2314 rtrap = false;
2316 /* If the comparison was short-circuited, and only the RHS
2317 trapped, we may now generate a spurious trap. */
2318 if (rtrap && !ltrap
2319 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2320 return NULL_TREE;
2322 /* If we changed the conditions that cause a trap, we lose. */
2323 if ((ltrap || rtrap) != trap)
2324 return NULL_TREE;
2327 if (compcode == COMPCODE_TRUE)
2328 return constant_boolean_node (true, truth_type);
2329 else if (compcode == COMPCODE_FALSE)
2330 return constant_boolean_node (false, truth_type);
2331 else
2332 return fold (build2 (compcode_to_comparison (compcode),
2333 truth_type, ll_arg, lr_arg));
2336 /* Return nonzero if CODE is a tree code that represents a truth value. */
2338 static int
2339 truth_value_p (enum tree_code code)
2341 return (TREE_CODE_CLASS (code) == '<'
2342 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2343 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2344 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2347 /* Return nonzero if two operands (typically of the same tree node)
2348 are necessarily equal. If either argument has side-effects this
2349 function returns zero. FLAGS modifies behavior as follows:
2351 If OEP_ONLY_CONST is set, only return nonzero for constants.
2352 This function tests whether the operands are indistinguishable;
2353 it does not test whether they are equal using C's == operation.
2354 The distinction is important for IEEE floating point, because
2355 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2356 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2358 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2359 even though it may hold multiple values during a function.
2360 This is because a GCC tree node guarantees that nothing else is
2361 executed between the evaluation of its "operands" (which may often
2362 be evaluated in arbitrary order). Hence if the operands themselves
2363 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2364 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2365 unset means assuming isochronic (or instantaneous) tree equivalence.
2366 Unless comparing arbitrary expression trees, such as from different
2367 statements, this flag can usually be left unset.
2369 If OEP_PURE_SAME is set, then pure functions with identical arguments
2370 are considered the same. It is used when the caller has other ways
2371 to ensure that global memory is unchanged in between. */
2374 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2376 /* If one is specified and the other isn't, they aren't equal and if
2377 neither is specified, they are.
2379 ??? This is temporary and is meant only to handle the cases of the
2380 optional operands for COMPONENT_REF and ARRAY_REF. */
2381 if ((arg0 && !arg1) || (!arg0 && arg1))
2382 return 0;
2383 else if (!arg0 && !arg1)
2384 return 1;
2385 /* If either is ERROR_MARK, they aren't equal. */
2386 else if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2387 return 0;
2389 /* If both types don't have the same signedness, then we can't consider
2390 them equal. We must check this before the STRIP_NOPS calls
2391 because they may change the signedness of the arguments. */
2392 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2393 return 0;
2395 STRIP_NOPS (arg0);
2396 STRIP_NOPS (arg1);
2398 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2399 /* This is needed for conversions and for COMPONENT_REF.
2400 Might as well play it safe and always test this. */
2401 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2402 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2403 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2404 return 0;
2406 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2407 We don't care about side effects in that case because the SAVE_EXPR
2408 takes care of that for us. In all other cases, two expressions are
2409 equal if they have no side effects. If we have two identical
2410 expressions with side effects that should be treated the same due
2411 to the only side effects being identical SAVE_EXPR's, that will
2412 be detected in the recursive calls below. */
2413 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2414 && (TREE_CODE (arg0) == SAVE_EXPR
2415 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2416 return 1;
2418 /* Next handle constant cases, those for which we can return 1 even
2419 if ONLY_CONST is set. */
2420 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2421 switch (TREE_CODE (arg0))
2423 case INTEGER_CST:
2424 return (! TREE_CONSTANT_OVERFLOW (arg0)
2425 && ! TREE_CONSTANT_OVERFLOW (arg1)
2426 && tree_int_cst_equal (arg0, arg1));
2428 case REAL_CST:
2429 return (! TREE_CONSTANT_OVERFLOW (arg0)
2430 && ! TREE_CONSTANT_OVERFLOW (arg1)
2431 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2432 TREE_REAL_CST (arg1)));
2434 case VECTOR_CST:
2436 tree v1, v2;
2438 if (TREE_CONSTANT_OVERFLOW (arg0)
2439 || TREE_CONSTANT_OVERFLOW (arg1))
2440 return 0;
2442 v1 = TREE_VECTOR_CST_ELTS (arg0);
2443 v2 = TREE_VECTOR_CST_ELTS (arg1);
2444 while (v1 && v2)
2446 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2447 flags))
2448 return 0;
2449 v1 = TREE_CHAIN (v1);
2450 v2 = TREE_CHAIN (v2);
2453 return 1;
2456 case COMPLEX_CST:
2457 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2458 flags)
2459 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2460 flags));
2462 case STRING_CST:
2463 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2464 && ! memcmp (TREE_STRING_POINTER (arg0),
2465 TREE_STRING_POINTER (arg1),
2466 TREE_STRING_LENGTH (arg0)));
2468 case ADDR_EXPR:
2469 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2471 default:
2472 break;
2475 if (flags & OEP_ONLY_CONST)
2476 return 0;
2478 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2480 case '1':
2481 /* Two conversions are equal only if signedness and modes match. */
2482 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2483 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2484 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2485 return 0;
2487 return operand_equal_p (TREE_OPERAND (arg0, 0),
2488 TREE_OPERAND (arg1, 0), flags);
2490 case '<':
2491 case '2':
2492 if (operand_equal_p (TREE_OPERAND (arg0, 0),
2493 TREE_OPERAND (arg1, 0), flags)
2494 && operand_equal_p (TREE_OPERAND (arg0, 1),
2495 TREE_OPERAND (arg1, 1), flags))
2496 return 1;
2498 /* For commutative ops, allow the other order. */
2499 return (commutative_tree_code (TREE_CODE (arg0))
2500 && operand_equal_p (TREE_OPERAND (arg0, 0),
2501 TREE_OPERAND (arg1, 1), flags)
2502 && operand_equal_p (TREE_OPERAND (arg0, 1),
2503 TREE_OPERAND (arg1, 0), flags));
2505 case 'r':
2506 /* If either of the pointer (or reference) expressions we are
2507 dereferencing contain a side effect, these cannot be equal. */
2508 if (TREE_SIDE_EFFECTS (arg0)
2509 || TREE_SIDE_EFFECTS (arg1))
2510 return 0;
2512 switch (TREE_CODE (arg0))
2514 case INDIRECT_REF:
2515 case REALPART_EXPR:
2516 case IMAGPART_EXPR:
2517 return operand_equal_p (TREE_OPERAND (arg0, 0),
2518 TREE_OPERAND (arg1, 0), flags);
2520 case ARRAY_REF:
2521 case ARRAY_RANGE_REF:
2522 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2523 TREE_OPERAND (arg1, 0), flags)
2524 && operand_equal_p (TREE_OPERAND (arg0, 1),
2525 TREE_OPERAND (arg1, 1), flags)
2526 && operand_equal_p (TREE_OPERAND (arg0, 2),
2527 TREE_OPERAND (arg1, 2), flags)
2528 && operand_equal_p (TREE_OPERAND (arg0, 3),
2529 TREE_OPERAND (arg1, 3), flags));
2532 case COMPONENT_REF:
2533 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2534 TREE_OPERAND (arg1, 0), flags)
2535 && operand_equal_p (TREE_OPERAND (arg0, 1),
2536 TREE_OPERAND (arg1, 1), flags)
2537 && operand_equal_p (TREE_OPERAND (arg0, 2),
2538 TREE_OPERAND (arg1, 2), flags));
2541 case BIT_FIELD_REF:
2542 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2543 TREE_OPERAND (arg1, 0), flags)
2544 && operand_equal_p (TREE_OPERAND (arg0, 1),
2545 TREE_OPERAND (arg1, 1), flags)
2546 && operand_equal_p (TREE_OPERAND (arg0, 2),
2547 TREE_OPERAND (arg1, 2), flags));
2548 default:
2549 return 0;
2552 case 'e':
2553 switch (TREE_CODE (arg0))
2555 case ADDR_EXPR:
2556 case TRUTH_NOT_EXPR:
2557 return operand_equal_p (TREE_OPERAND (arg0, 0),
2558 TREE_OPERAND (arg1, 0), flags);
2560 case TRUTH_ANDIF_EXPR:
2561 case TRUTH_ORIF_EXPR:
2562 return operand_equal_p (TREE_OPERAND (arg0, 0),
2563 TREE_OPERAND (arg1, 0), flags)
2564 && operand_equal_p (TREE_OPERAND (arg0, 1),
2565 TREE_OPERAND (arg1, 1), flags);
2567 case TRUTH_AND_EXPR:
2568 case TRUTH_OR_EXPR:
2569 case TRUTH_XOR_EXPR:
2570 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2571 TREE_OPERAND (arg1, 0), flags)
2572 && operand_equal_p (TREE_OPERAND (arg0, 1),
2573 TREE_OPERAND (arg1, 1), flags))
2574 || (operand_equal_p (TREE_OPERAND (arg0, 0),
2575 TREE_OPERAND (arg1, 1), flags)
2576 && operand_equal_p (TREE_OPERAND (arg0, 1),
2577 TREE_OPERAND (arg1, 0), flags));
2579 case CALL_EXPR:
2580 /* If the CALL_EXPRs call different functions, then they
2581 clearly can not be equal. */
2582 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2583 TREE_OPERAND (arg1, 0), flags))
2584 return 0;
2587 unsigned int cef = call_expr_flags (arg0);
2588 if (flags & OEP_PURE_SAME)
2589 cef &= ECF_CONST | ECF_PURE;
2590 else
2591 cef &= ECF_CONST;
2592 if (!cef)
2593 return 0;
2596 /* Now see if all the arguments are the same. operand_equal_p
2597 does not handle TREE_LIST, so we walk the operands here
2598 feeding them to operand_equal_p. */
2599 arg0 = TREE_OPERAND (arg0, 1);
2600 arg1 = TREE_OPERAND (arg1, 1);
2601 while (arg0 && arg1)
2603 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2604 flags))
2605 return 0;
2607 arg0 = TREE_CHAIN (arg0);
2608 arg1 = TREE_CHAIN (arg1);
2611 /* If we get here and both argument lists are exhausted
2612 then the CALL_EXPRs are equal. */
2613 return ! (arg0 || arg1);
2615 default:
2616 return 0;
2619 case 'd':
2620 /* Consider __builtin_sqrt equal to sqrt. */
2621 return (TREE_CODE (arg0) == FUNCTION_DECL
2622 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2623 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2624 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2626 default:
2627 return 0;
2631 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2632 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2634 When in doubt, return 0. */
2636 static int
2637 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2639 int unsignedp1, unsignedpo;
2640 tree primarg0, primarg1, primother;
2641 unsigned int correct_width;
2643 if (operand_equal_p (arg0, arg1, 0))
2644 return 1;
2646 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2647 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2648 return 0;
2650 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2651 and see if the inner values are the same. This removes any
2652 signedness comparison, which doesn't matter here. */
2653 primarg0 = arg0, primarg1 = arg1;
2654 STRIP_NOPS (primarg0);
2655 STRIP_NOPS (primarg1);
2656 if (operand_equal_p (primarg0, primarg1, 0))
2657 return 1;
2659 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2660 actual comparison operand, ARG0.
2662 First throw away any conversions to wider types
2663 already present in the operands. */
2665 primarg1 = get_narrower (arg1, &unsignedp1);
2666 primother = get_narrower (other, &unsignedpo);
2668 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2669 if (unsignedp1 == unsignedpo
2670 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2671 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2673 tree type = TREE_TYPE (arg0);
2675 /* Make sure shorter operand is extended the right way
2676 to match the longer operand. */
2677 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2678 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2680 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2681 return 1;
2684 return 0;
2687 /* See if ARG is an expression that is either a comparison or is performing
2688 arithmetic on comparisons. The comparisons must only be comparing
2689 two different values, which will be stored in *CVAL1 and *CVAL2; if
2690 they are nonzero it means that some operands have already been found.
2691 No variables may be used anywhere else in the expression except in the
2692 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2693 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2695 If this is true, return 1. Otherwise, return zero. */
2697 static int
2698 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2700 enum tree_code code = TREE_CODE (arg);
2701 char class = TREE_CODE_CLASS (code);
2703 /* We can handle some of the 'e' cases here. */
2704 if (class == 'e' && code == TRUTH_NOT_EXPR)
2705 class = '1';
2706 else if (class == 'e'
2707 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2708 || code == COMPOUND_EXPR))
2709 class = '2';
2711 else if (class == 'e' && code == SAVE_EXPR
2712 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2714 /* If we've already found a CVAL1 or CVAL2, this expression is
2715 two complex to handle. */
2716 if (*cval1 || *cval2)
2717 return 0;
2719 class = '1';
2720 *save_p = 1;
2723 switch (class)
2725 case '1':
2726 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2728 case '2':
2729 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2730 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2731 cval1, cval2, save_p));
2733 case 'c':
2734 return 1;
2736 case 'e':
2737 if (code == COND_EXPR)
2738 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2739 cval1, cval2, save_p)
2740 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2741 cval1, cval2, save_p)
2742 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2743 cval1, cval2, save_p));
2744 return 0;
2746 case '<':
2747 /* First see if we can handle the first operand, then the second. For
2748 the second operand, we know *CVAL1 can't be zero. It must be that
2749 one side of the comparison is each of the values; test for the
2750 case where this isn't true by failing if the two operands
2751 are the same. */
2753 if (operand_equal_p (TREE_OPERAND (arg, 0),
2754 TREE_OPERAND (arg, 1), 0))
2755 return 0;
2757 if (*cval1 == 0)
2758 *cval1 = TREE_OPERAND (arg, 0);
2759 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2761 else if (*cval2 == 0)
2762 *cval2 = TREE_OPERAND (arg, 0);
2763 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2765 else
2766 return 0;
2768 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2770 else if (*cval2 == 0)
2771 *cval2 = TREE_OPERAND (arg, 1);
2772 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2774 else
2775 return 0;
2777 return 1;
2779 default:
2780 return 0;
2784 /* ARG is a tree that is known to contain just arithmetic operations and
2785 comparisons. Evaluate the operations in the tree substituting NEW0 for
2786 any occurrence of OLD0 as an operand of a comparison and likewise for
2787 NEW1 and OLD1. */
2789 static tree
2790 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2792 tree type = TREE_TYPE (arg);
2793 enum tree_code code = TREE_CODE (arg);
2794 char class = TREE_CODE_CLASS (code);
2796 /* We can handle some of the 'e' cases here. */
2797 if (class == 'e' && code == TRUTH_NOT_EXPR)
2798 class = '1';
2799 else if (class == 'e'
2800 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2801 class = '2';
2803 switch (class)
2805 case '1':
2806 return fold (build1 (code, type,
2807 eval_subst (TREE_OPERAND (arg, 0),
2808 old0, new0, old1, new1)));
2810 case '2':
2811 return fold (build2 (code, type,
2812 eval_subst (TREE_OPERAND (arg, 0),
2813 old0, new0, old1, new1),
2814 eval_subst (TREE_OPERAND (arg, 1),
2815 old0, new0, old1, new1)));
2817 case 'e':
2818 switch (code)
2820 case SAVE_EXPR:
2821 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2823 case COMPOUND_EXPR:
2824 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2826 case COND_EXPR:
2827 return fold (build3 (code, type,
2828 eval_subst (TREE_OPERAND (arg, 0),
2829 old0, new0, old1, new1),
2830 eval_subst (TREE_OPERAND (arg, 1),
2831 old0, new0, old1, new1),
2832 eval_subst (TREE_OPERAND (arg, 2),
2833 old0, new0, old1, new1)));
2834 default:
2835 break;
2837 /* Fall through - ??? */
2839 case '<':
2841 tree arg0 = TREE_OPERAND (arg, 0);
2842 tree arg1 = TREE_OPERAND (arg, 1);
2844 /* We need to check both for exact equality and tree equality. The
2845 former will be true if the operand has a side-effect. In that
2846 case, we know the operand occurred exactly once. */
2848 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2849 arg0 = new0;
2850 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2851 arg0 = new1;
2853 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2854 arg1 = new0;
2855 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2856 arg1 = new1;
2858 return fold (build2 (code, type, arg0, arg1));
2861 default:
2862 return arg;
2866 /* Return a tree for the case when the result of an expression is RESULT
2867 converted to TYPE and OMITTED was previously an operand of the expression
2868 but is now not needed (e.g., we folded OMITTED * 0).
2870 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2871 the conversion of RESULT to TYPE. */
2873 tree
2874 omit_one_operand (tree type, tree result, tree omitted)
2876 tree t = fold_convert (type, result);
2878 if (TREE_SIDE_EFFECTS (omitted))
2879 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2881 return non_lvalue (t);
2884 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2886 static tree
2887 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2889 tree t = fold_convert (type, result);
2891 if (TREE_SIDE_EFFECTS (omitted))
2892 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2894 return pedantic_non_lvalue (t);
2897 /* Return a tree for the case when the result of an expression is RESULT
2898 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2899 of the expression but are now not needed.
2901 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2902 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2903 evaluated before OMITTED2. Otherwise, if neither has side effects,
2904 just do the conversion of RESULT to TYPE. */
2906 tree
2907 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2909 tree t = fold_convert (type, result);
2911 if (TREE_SIDE_EFFECTS (omitted2))
2912 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2913 if (TREE_SIDE_EFFECTS (omitted1))
2914 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2916 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2920 /* Return a simplified tree node for the truth-negation of ARG. This
2921 never alters ARG itself. We assume that ARG is an operation that
2922 returns a truth value (0 or 1).
2924 FIXME: one would think we would fold the result, but it causes
2925 problems with the dominator optimizer. */
2926 tree
2927 invert_truthvalue (tree arg)
2929 tree type = TREE_TYPE (arg);
2930 enum tree_code code = TREE_CODE (arg);
2932 if (code == ERROR_MARK)
2933 return arg;
2935 /* If this is a comparison, we can simply invert it, except for
2936 floating-point non-equality comparisons, in which case we just
2937 enclose a TRUTH_NOT_EXPR around what we have. */
2939 if (TREE_CODE_CLASS (code) == '<')
2941 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2942 if (FLOAT_TYPE_P (op_type)
2943 && flag_trapping_math
2944 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2945 && code != NE_EXPR && code != EQ_EXPR)
2946 return build1 (TRUTH_NOT_EXPR, type, arg);
2947 else
2949 code = invert_tree_comparison (code,
2950 HONOR_NANS (TYPE_MODE (op_type)));
2951 if (code == ERROR_MARK)
2952 return build1 (TRUTH_NOT_EXPR, type, arg);
2953 else
2954 return build2 (code, type,
2955 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2959 switch (code)
2961 case INTEGER_CST:
2962 return fold_convert (type,
2963 build_int_cst (NULL_TREE, integer_zerop (arg)));
2965 case TRUTH_AND_EXPR:
2966 return build2 (TRUTH_OR_EXPR, type,
2967 invert_truthvalue (TREE_OPERAND (arg, 0)),
2968 invert_truthvalue (TREE_OPERAND (arg, 1)));
2970 case TRUTH_OR_EXPR:
2971 return build2 (TRUTH_AND_EXPR, type,
2972 invert_truthvalue (TREE_OPERAND (arg, 0)),
2973 invert_truthvalue (TREE_OPERAND (arg, 1)));
2975 case TRUTH_XOR_EXPR:
2976 /* Here we can invert either operand. We invert the first operand
2977 unless the second operand is a TRUTH_NOT_EXPR in which case our
2978 result is the XOR of the first operand with the inside of the
2979 negation of the second operand. */
2981 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2982 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2983 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2984 else
2985 return build2 (TRUTH_XOR_EXPR, type,
2986 invert_truthvalue (TREE_OPERAND (arg, 0)),
2987 TREE_OPERAND (arg, 1));
2989 case TRUTH_ANDIF_EXPR:
2990 return build2 (TRUTH_ORIF_EXPR, type,
2991 invert_truthvalue (TREE_OPERAND (arg, 0)),
2992 invert_truthvalue (TREE_OPERAND (arg, 1)));
2994 case TRUTH_ORIF_EXPR:
2995 return build2 (TRUTH_ANDIF_EXPR, type,
2996 invert_truthvalue (TREE_OPERAND (arg, 0)),
2997 invert_truthvalue (TREE_OPERAND (arg, 1)));
2999 case TRUTH_NOT_EXPR:
3000 return TREE_OPERAND (arg, 0);
3002 case COND_EXPR:
3003 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3004 invert_truthvalue (TREE_OPERAND (arg, 1)),
3005 invert_truthvalue (TREE_OPERAND (arg, 2)));
3007 case COMPOUND_EXPR:
3008 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3009 invert_truthvalue (TREE_OPERAND (arg, 1)));
3011 case NON_LVALUE_EXPR:
3012 return invert_truthvalue (TREE_OPERAND (arg, 0));
3014 case NOP_EXPR:
3015 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3016 break;
3018 case CONVERT_EXPR:
3019 case FLOAT_EXPR:
3020 return build1 (TREE_CODE (arg), type,
3021 invert_truthvalue (TREE_OPERAND (arg, 0)));
3023 case BIT_AND_EXPR:
3024 if (!integer_onep (TREE_OPERAND (arg, 1)))
3025 break;
3026 return build2 (EQ_EXPR, type, arg,
3027 fold_convert (type, integer_zero_node));
3029 case SAVE_EXPR:
3030 return build1 (TRUTH_NOT_EXPR, type, arg);
3032 case CLEANUP_POINT_EXPR:
3033 return build1 (CLEANUP_POINT_EXPR, type,
3034 invert_truthvalue (TREE_OPERAND (arg, 0)));
3036 default:
3037 break;
3039 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
3040 abort ();
3041 return build1 (TRUTH_NOT_EXPR, type, arg);
3044 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3045 operands are another bit-wise operation with a common input. If so,
3046 distribute the bit operations to save an operation and possibly two if
3047 constants are involved. For example, convert
3048 (A | B) & (A | C) into A | (B & C)
3049 Further simplification will occur if B and C are constants.
3051 If this optimization cannot be done, 0 will be returned. */
3053 static tree
3054 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3056 tree common;
3057 tree left, right;
3059 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3060 || TREE_CODE (arg0) == code
3061 || (TREE_CODE (arg0) != BIT_AND_EXPR
3062 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3063 return 0;
3065 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3067 common = TREE_OPERAND (arg0, 0);
3068 left = TREE_OPERAND (arg0, 1);
3069 right = TREE_OPERAND (arg1, 1);
3071 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3073 common = TREE_OPERAND (arg0, 0);
3074 left = TREE_OPERAND (arg0, 1);
3075 right = TREE_OPERAND (arg1, 0);
3077 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3079 common = TREE_OPERAND (arg0, 1);
3080 left = TREE_OPERAND (arg0, 0);
3081 right = TREE_OPERAND (arg1, 1);
3083 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3085 common = TREE_OPERAND (arg0, 1);
3086 left = TREE_OPERAND (arg0, 0);
3087 right = TREE_OPERAND (arg1, 0);
3089 else
3090 return 0;
3092 return fold (build2 (TREE_CODE (arg0), type, common,
3093 fold (build2 (code, type, left, right))));
3096 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3097 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3099 static tree
3100 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3101 int unsignedp)
3103 tree result = build3 (BIT_FIELD_REF, type, inner,
3104 size_int (bitsize), bitsize_int (bitpos));
3106 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3108 return result;
3111 /* Optimize a bit-field compare.
3113 There are two cases: First is a compare against a constant and the
3114 second is a comparison of two items where the fields are at the same
3115 bit position relative to the start of a chunk (byte, halfword, word)
3116 large enough to contain it. In these cases we can avoid the shift
3117 implicit in bitfield extractions.
3119 For constants, we emit a compare of the shifted constant with the
3120 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3121 compared. For two fields at the same position, we do the ANDs with the
3122 similar mask and compare the result of the ANDs.
3124 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3125 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3126 are the left and right operands of the comparison, respectively.
3128 If the optimization described above can be done, we return the resulting
3129 tree. Otherwise we return zero. */
3131 static tree
3132 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3133 tree lhs, tree rhs)
3135 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3136 tree type = TREE_TYPE (lhs);
3137 tree signed_type, unsigned_type;
3138 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3139 enum machine_mode lmode, rmode, nmode;
3140 int lunsignedp, runsignedp;
3141 int lvolatilep = 0, rvolatilep = 0;
3142 tree linner, rinner = NULL_TREE;
3143 tree mask;
3144 tree offset;
3146 /* Get all the information about the extractions being done. If the bit size
3147 if the same as the size of the underlying object, we aren't doing an
3148 extraction at all and so can do nothing. We also don't want to
3149 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3150 then will no longer be able to replace it. */
3151 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3152 &lunsignedp, &lvolatilep);
3153 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3154 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3155 return 0;
3157 if (!const_p)
3159 /* If this is not a constant, we can only do something if bit positions,
3160 sizes, and signedness are the same. */
3161 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3162 &runsignedp, &rvolatilep);
3164 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3165 || lunsignedp != runsignedp || offset != 0
3166 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3167 return 0;
3170 /* See if we can find a mode to refer to this field. We should be able to,
3171 but fail if we can't. */
3172 nmode = get_best_mode (lbitsize, lbitpos,
3173 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3174 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3175 TYPE_ALIGN (TREE_TYPE (rinner))),
3176 word_mode, lvolatilep || rvolatilep);
3177 if (nmode == VOIDmode)
3178 return 0;
3180 /* Set signed and unsigned types of the precision of this mode for the
3181 shifts below. */
3182 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3183 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3185 /* Compute the bit position and size for the new reference and our offset
3186 within it. If the new reference is the same size as the original, we
3187 won't optimize anything, so return zero. */
3188 nbitsize = GET_MODE_BITSIZE (nmode);
3189 nbitpos = lbitpos & ~ (nbitsize - 1);
3190 lbitpos -= nbitpos;
3191 if (nbitsize == lbitsize)
3192 return 0;
3194 if (BYTES_BIG_ENDIAN)
3195 lbitpos = nbitsize - lbitsize - lbitpos;
3197 /* Make the mask to be used against the extracted field. */
3198 mask = build_int_cst (unsigned_type, -1);
3199 mask = force_fit_type (mask, 0, false, false);
3200 mask = fold_convert (unsigned_type, mask);
3201 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3202 mask = const_binop (RSHIFT_EXPR, mask,
3203 size_int (nbitsize - lbitsize - lbitpos), 0);
3205 if (! const_p)
3206 /* If not comparing with constant, just rework the comparison
3207 and return. */
3208 return build2 (code, compare_type,
3209 build2 (BIT_AND_EXPR, unsigned_type,
3210 make_bit_field_ref (linner, unsigned_type,
3211 nbitsize, nbitpos, 1),
3212 mask),
3213 build2 (BIT_AND_EXPR, unsigned_type,
3214 make_bit_field_ref (rinner, unsigned_type,
3215 nbitsize, nbitpos, 1),
3216 mask));
3218 /* Otherwise, we are handling the constant case. See if the constant is too
3219 big for the field. Warn and return a tree of for 0 (false) if so. We do
3220 this not only for its own sake, but to avoid having to test for this
3221 error case below. If we didn't, we might generate wrong code.
3223 For unsigned fields, the constant shifted right by the field length should
3224 be all zero. For signed fields, the high-order bits should agree with
3225 the sign bit. */
3227 if (lunsignedp)
3229 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3230 fold_convert (unsigned_type, rhs),
3231 size_int (lbitsize), 0)))
3233 warning ("comparison is always %d due to width of bit-field",
3234 code == NE_EXPR);
3235 return constant_boolean_node (code == NE_EXPR, compare_type);
3238 else
3240 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3241 size_int (lbitsize - 1), 0);
3242 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3244 warning ("comparison is always %d due to width of bit-field",
3245 code == NE_EXPR);
3246 return constant_boolean_node (code == NE_EXPR, compare_type);
3250 /* Single-bit compares should always be against zero. */
3251 if (lbitsize == 1 && ! integer_zerop (rhs))
3253 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3254 rhs = fold_convert (type, integer_zero_node);
3257 /* Make a new bitfield reference, shift the constant over the
3258 appropriate number of bits and mask it with the computed mask
3259 (in case this was a signed field). If we changed it, make a new one. */
3260 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3261 if (lvolatilep)
3263 TREE_SIDE_EFFECTS (lhs) = 1;
3264 TREE_THIS_VOLATILE (lhs) = 1;
3267 rhs = fold (const_binop (BIT_AND_EXPR,
3268 const_binop (LSHIFT_EXPR,
3269 fold_convert (unsigned_type, rhs),
3270 size_int (lbitpos), 0),
3271 mask, 0));
3273 return build2 (code, compare_type,
3274 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3275 rhs);
3278 /* Subroutine for fold_truthop: decode a field reference.
3280 If EXP is a comparison reference, we return the innermost reference.
3282 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3283 set to the starting bit number.
3285 If the innermost field can be completely contained in a mode-sized
3286 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3288 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3289 otherwise it is not changed.
3291 *PUNSIGNEDP is set to the signedness of the field.
3293 *PMASK is set to the mask used. This is either contained in a
3294 BIT_AND_EXPR or derived from the width of the field.
3296 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3298 Return 0 if this is not a component reference or is one that we can't
3299 do anything with. */
3301 static tree
3302 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3303 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3304 int *punsignedp, int *pvolatilep,
3305 tree *pmask, tree *pand_mask)
3307 tree outer_type = 0;
3308 tree and_mask = 0;
3309 tree mask, inner, offset;
3310 tree unsigned_type;
3311 unsigned int precision;
3313 /* All the optimizations using this function assume integer fields.
3314 There are problems with FP fields since the type_for_size call
3315 below can fail for, e.g., XFmode. */
3316 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3317 return 0;
3319 /* We are interested in the bare arrangement of bits, so strip everything
3320 that doesn't affect the machine mode. However, record the type of the
3321 outermost expression if it may matter below. */
3322 if (TREE_CODE (exp) == NOP_EXPR
3323 || TREE_CODE (exp) == CONVERT_EXPR
3324 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3325 outer_type = TREE_TYPE (exp);
3326 STRIP_NOPS (exp);
3328 if (TREE_CODE (exp) == BIT_AND_EXPR)
3330 and_mask = TREE_OPERAND (exp, 1);
3331 exp = TREE_OPERAND (exp, 0);
3332 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3333 if (TREE_CODE (and_mask) != INTEGER_CST)
3334 return 0;
3337 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3338 punsignedp, pvolatilep);
3339 if ((inner == exp && and_mask == 0)
3340 || *pbitsize < 0 || offset != 0
3341 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3342 return 0;
3344 /* If the number of bits in the reference is the same as the bitsize of
3345 the outer type, then the outer type gives the signedness. Otherwise
3346 (in case of a small bitfield) the signedness is unchanged. */
3347 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3348 *punsignedp = TYPE_UNSIGNED (outer_type);
3350 /* Compute the mask to access the bitfield. */
3351 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3352 precision = TYPE_PRECISION (unsigned_type);
3354 mask = build_int_cst (unsigned_type, -1);
3355 mask = force_fit_type (mask, 0, false, false);
3357 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3358 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3360 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3361 if (and_mask != 0)
3362 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3363 fold_convert (unsigned_type, and_mask), mask));
3365 *pmask = mask;
3366 *pand_mask = and_mask;
3367 return inner;
3370 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3371 bit positions. */
3373 static int
3374 all_ones_mask_p (tree mask, int size)
3376 tree type = TREE_TYPE (mask);
3377 unsigned int precision = TYPE_PRECISION (type);
3378 tree tmask;
3380 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3381 tmask = force_fit_type (tmask, 0, false, false);
3383 return
3384 tree_int_cst_equal (mask,
3385 const_binop (RSHIFT_EXPR,
3386 const_binop (LSHIFT_EXPR, tmask,
3387 size_int (precision - size),
3389 size_int (precision - size), 0));
3392 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3393 represents the sign bit of EXP's type. If EXP represents a sign
3394 or zero extension, also test VAL against the unextended type.
3395 The return value is the (sub)expression whose sign bit is VAL,
3396 or NULL_TREE otherwise. */
3398 static tree
3399 sign_bit_p (tree exp, tree val)
3401 unsigned HOST_WIDE_INT mask_lo, lo;
3402 HOST_WIDE_INT mask_hi, hi;
3403 int width;
3404 tree t;
3406 /* Tree EXP must have an integral type. */
3407 t = TREE_TYPE (exp);
3408 if (! INTEGRAL_TYPE_P (t))
3409 return NULL_TREE;
3411 /* Tree VAL must be an integer constant. */
3412 if (TREE_CODE (val) != INTEGER_CST
3413 || TREE_CONSTANT_OVERFLOW (val))
3414 return NULL_TREE;
3416 width = TYPE_PRECISION (t);
3417 if (width > HOST_BITS_PER_WIDE_INT)
3419 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3420 lo = 0;
3422 mask_hi = ((unsigned HOST_WIDE_INT) -1
3423 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3424 mask_lo = -1;
3426 else
3428 hi = 0;
3429 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3431 mask_hi = 0;
3432 mask_lo = ((unsigned HOST_WIDE_INT) -1
3433 >> (HOST_BITS_PER_WIDE_INT - width));
3436 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3437 treat VAL as if it were unsigned. */
3438 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3439 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3440 return exp;
3442 /* Handle extension from a narrower type. */
3443 if (TREE_CODE (exp) == NOP_EXPR
3444 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3445 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3447 return NULL_TREE;
3450 /* Subroutine for fold_truthop: determine if an operand is simple enough
3451 to be evaluated unconditionally. */
3453 static int
3454 simple_operand_p (tree exp)
3456 /* Strip any conversions that don't change the machine mode. */
3457 while ((TREE_CODE (exp) == NOP_EXPR
3458 || TREE_CODE (exp) == CONVERT_EXPR)
3459 && (TYPE_MODE (TREE_TYPE (exp))
3460 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3461 exp = TREE_OPERAND (exp, 0);
3463 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3464 || (DECL_P (exp)
3465 && ! TREE_ADDRESSABLE (exp)
3466 && ! TREE_THIS_VOLATILE (exp)
3467 && ! DECL_NONLOCAL (exp)
3468 /* Don't regard global variables as simple. They may be
3469 allocated in ways unknown to the compiler (shared memory,
3470 #pragma weak, etc). */
3471 && ! TREE_PUBLIC (exp)
3472 && ! DECL_EXTERNAL (exp)
3473 /* Loading a static variable is unduly expensive, but global
3474 registers aren't expensive. */
3475 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3478 /* The following functions are subroutines to fold_range_test and allow it to
3479 try to change a logical combination of comparisons into a range test.
3481 For example, both
3482 X == 2 || X == 3 || X == 4 || X == 5
3484 X >= 2 && X <= 5
3485 are converted to
3486 (unsigned) (X - 2) <= 3
3488 We describe each set of comparisons as being either inside or outside
3489 a range, using a variable named like IN_P, and then describe the
3490 range with a lower and upper bound. If one of the bounds is omitted,
3491 it represents either the highest or lowest value of the type.
3493 In the comments below, we represent a range by two numbers in brackets
3494 preceded by a "+" to designate being inside that range, or a "-" to
3495 designate being outside that range, so the condition can be inverted by
3496 flipping the prefix. An omitted bound is represented by a "-". For
3497 example, "- [-, 10]" means being outside the range starting at the lowest
3498 possible value and ending at 10, in other words, being greater than 10.
3499 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3500 always false.
3502 We set up things so that the missing bounds are handled in a consistent
3503 manner so neither a missing bound nor "true" and "false" need to be
3504 handled using a special case. */
3506 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3507 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3508 and UPPER1_P are nonzero if the respective argument is an upper bound
3509 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3510 must be specified for a comparison. ARG1 will be converted to ARG0's
3511 type if both are specified. */
3513 static tree
3514 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3515 tree arg1, int upper1_p)
3517 tree tem;
3518 int result;
3519 int sgn0, sgn1;
3521 /* If neither arg represents infinity, do the normal operation.
3522 Else, if not a comparison, return infinity. Else handle the special
3523 comparison rules. Note that most of the cases below won't occur, but
3524 are handled for consistency. */
3526 if (arg0 != 0 && arg1 != 0)
3528 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3529 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3530 STRIP_NOPS (tem);
3531 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3534 if (TREE_CODE_CLASS (code) != '<')
3535 return 0;
3537 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3538 for neither. In real maths, we cannot assume open ended ranges are
3539 the same. But, this is computer arithmetic, where numbers are finite.
3540 We can therefore make the transformation of any unbounded range with
3541 the value Z, Z being greater than any representable number. This permits
3542 us to treat unbounded ranges as equal. */
3543 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3544 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3545 switch (code)
3547 case EQ_EXPR:
3548 result = sgn0 == sgn1;
3549 break;
3550 case NE_EXPR:
3551 result = sgn0 != sgn1;
3552 break;
3553 case LT_EXPR:
3554 result = sgn0 < sgn1;
3555 break;
3556 case LE_EXPR:
3557 result = sgn0 <= sgn1;
3558 break;
3559 case GT_EXPR:
3560 result = sgn0 > sgn1;
3561 break;
3562 case GE_EXPR:
3563 result = sgn0 >= sgn1;
3564 break;
3565 default:
3566 abort ();
3569 return constant_boolean_node (result, type);
3572 /* Given EXP, a logical expression, set the range it is testing into
3573 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3574 actually being tested. *PLOW and *PHIGH will be made of the same type
3575 as the returned expression. If EXP is not a comparison, we will most
3576 likely not be returning a useful value and range. */
3578 static tree
3579 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3581 enum tree_code code;
3582 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3583 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3584 int in_p, n_in_p;
3585 tree low, high, n_low, n_high;
3587 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3588 and see if we can refine the range. Some of the cases below may not
3589 happen, but it doesn't seem worth worrying about this. We "continue"
3590 the outer loop when we've changed something; otherwise we "break"
3591 the switch, which will "break" the while. */
3593 in_p = 0;
3594 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3596 while (1)
3598 code = TREE_CODE (exp);
3599 exp_type = TREE_TYPE (exp);
3601 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3603 if (first_rtl_op (code) > 0)
3604 arg0 = TREE_OPERAND (exp, 0);
3605 if (TREE_CODE_CLASS (code) == '<'
3606 || TREE_CODE_CLASS (code) == '1'
3607 || TREE_CODE_CLASS (code) == '2')
3608 arg0_type = TREE_TYPE (arg0);
3609 if (TREE_CODE_CLASS (code) == '2'
3610 || TREE_CODE_CLASS (code) == '<'
3611 || (TREE_CODE_CLASS (code) == 'e'
3612 && TREE_CODE_LENGTH (code) > 1))
3613 arg1 = TREE_OPERAND (exp, 1);
3616 switch (code)
3618 case TRUTH_NOT_EXPR:
3619 in_p = ! in_p, exp = arg0;
3620 continue;
3622 case EQ_EXPR: case NE_EXPR:
3623 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3624 /* We can only do something if the range is testing for zero
3625 and if the second operand is an integer constant. Note that
3626 saying something is "in" the range we make is done by
3627 complementing IN_P since it will set in the initial case of
3628 being not equal to zero; "out" is leaving it alone. */
3629 if (low == 0 || high == 0
3630 || ! integer_zerop (low) || ! integer_zerop (high)
3631 || TREE_CODE (arg1) != INTEGER_CST)
3632 break;
3634 switch (code)
3636 case NE_EXPR: /* - [c, c] */
3637 low = high = arg1;
3638 break;
3639 case EQ_EXPR: /* + [c, c] */
3640 in_p = ! in_p, low = high = arg1;
3641 break;
3642 case GT_EXPR: /* - [-, c] */
3643 low = 0, high = arg1;
3644 break;
3645 case GE_EXPR: /* + [c, -] */
3646 in_p = ! in_p, low = arg1, high = 0;
3647 break;
3648 case LT_EXPR: /* - [c, -] */
3649 low = arg1, high = 0;
3650 break;
3651 case LE_EXPR: /* + [-, c] */
3652 in_p = ! in_p, low = 0, high = arg1;
3653 break;
3654 default:
3655 abort ();
3658 /* If this is an unsigned comparison, we also know that EXP is
3659 greater than or equal to zero. We base the range tests we make
3660 on that fact, so we record it here so we can parse existing
3661 range tests. We test arg0_type since often the return type
3662 of, e.g. EQ_EXPR, is boolean. */
3663 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3665 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3666 in_p, low, high, 1,
3667 fold_convert (arg0_type, integer_zero_node),
3668 NULL_TREE))
3669 break;
3671 in_p = n_in_p, low = n_low, high = n_high;
3673 /* If the high bound is missing, but we have a nonzero low
3674 bound, reverse the range so it goes from zero to the low bound
3675 minus 1. */
3676 if (high == 0 && low && ! integer_zerop (low))
3678 in_p = ! in_p;
3679 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3680 integer_one_node, 0);
3681 low = fold_convert (arg0_type, integer_zero_node);
3685 exp = arg0;
3686 continue;
3688 case NEGATE_EXPR:
3689 /* (-x) IN [a,b] -> x in [-b, -a] */
3690 n_low = range_binop (MINUS_EXPR, exp_type,
3691 fold_convert (exp_type, integer_zero_node),
3692 0, high, 1);
3693 n_high = range_binop (MINUS_EXPR, exp_type,
3694 fold_convert (exp_type, integer_zero_node),
3695 0, low, 0);
3696 low = n_low, high = n_high;
3697 exp = arg0;
3698 continue;
3700 case BIT_NOT_EXPR:
3701 /* ~ X -> -X - 1 */
3702 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3703 fold_convert (exp_type, integer_one_node));
3704 continue;
3706 case PLUS_EXPR: case MINUS_EXPR:
3707 if (TREE_CODE (arg1) != INTEGER_CST)
3708 break;
3710 /* If EXP is signed, any overflow in the computation is undefined,
3711 so we don't worry about it so long as our computations on
3712 the bounds don't overflow. For unsigned, overflow is defined
3713 and this is exactly the right thing. */
3714 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3715 arg0_type, low, 0, arg1, 0);
3716 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3717 arg0_type, high, 1, arg1, 0);
3718 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3719 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3720 break;
3722 /* Check for an unsigned range which has wrapped around the maximum
3723 value thus making n_high < n_low, and normalize it. */
3724 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3726 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3727 integer_one_node, 0);
3728 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3729 integer_one_node, 0);
3731 /* If the range is of the form +/- [ x+1, x ], we won't
3732 be able to normalize it. But then, it represents the
3733 whole range or the empty set, so make it
3734 +/- [ -, - ]. */
3735 if (tree_int_cst_equal (n_low, low)
3736 && tree_int_cst_equal (n_high, high))
3737 low = high = 0;
3738 else
3739 in_p = ! in_p;
3741 else
3742 low = n_low, high = n_high;
3744 exp = arg0;
3745 continue;
3747 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3748 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3749 break;
3751 if (! INTEGRAL_TYPE_P (arg0_type)
3752 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3753 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3754 break;
3756 n_low = low, n_high = high;
3758 if (n_low != 0)
3759 n_low = fold_convert (arg0_type, n_low);
3761 if (n_high != 0)
3762 n_high = fold_convert (arg0_type, n_high);
3765 /* If we're converting arg0 from an unsigned type, to exp,
3766 a signed type, we will be doing the comparison as unsigned.
3767 The tests above have already verified that LOW and HIGH
3768 are both positive.
3770 So we have to ensure that we will handle large unsigned
3771 values the same way that the current signed bounds treat
3772 negative values. */
3774 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3776 tree high_positive;
3777 tree equiv_type = lang_hooks.types.type_for_mode
3778 (TYPE_MODE (arg0_type), 1);
3780 /* A range without an upper bound is, naturally, unbounded.
3781 Since convert would have cropped a very large value, use
3782 the max value for the destination type. */
3783 high_positive
3784 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3785 : TYPE_MAX_VALUE (arg0_type);
3787 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3788 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3789 fold_convert (arg0_type,
3790 high_positive),
3791 fold_convert (arg0_type,
3792 integer_one_node)));
3794 /* If the low bound is specified, "and" the range with the
3795 range for which the original unsigned value will be
3796 positive. */
3797 if (low != 0)
3799 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3800 1, n_low, n_high, 1,
3801 fold_convert (arg0_type,
3802 integer_zero_node),
3803 high_positive))
3804 break;
3806 in_p = (n_in_p == in_p);
3808 else
3810 /* Otherwise, "or" the range with the range of the input
3811 that will be interpreted as negative. */
3812 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3813 0, n_low, n_high, 1,
3814 fold_convert (arg0_type,
3815 integer_zero_node),
3816 high_positive))
3817 break;
3819 in_p = (in_p != n_in_p);
3823 exp = arg0;
3824 low = n_low, high = n_high;
3825 continue;
3827 default:
3828 break;
3831 break;
3834 /* If EXP is a constant, we can evaluate whether this is true or false. */
3835 if (TREE_CODE (exp) == INTEGER_CST)
3837 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3838 exp, 0, low, 0))
3839 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3840 exp, 1, high, 1)));
3841 low = high = 0;
3842 exp = 0;
3845 *pin_p = in_p, *plow = low, *phigh = high;
3846 return exp;
3849 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3850 type, TYPE, return an expression to test if EXP is in (or out of, depending
3851 on IN_P) the range. Return 0 if the test couldn't be created. */
3853 static tree
3854 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3856 tree etype = TREE_TYPE (exp);
3857 tree value;
3859 if (! in_p)
3861 value = build_range_check (type, exp, 1, low, high);
3862 if (value != 0)
3863 return invert_truthvalue (value);
3865 return 0;
3868 if (low == 0 && high == 0)
3869 return fold_convert (type, integer_one_node);
3871 if (low == 0)
3872 return fold (build2 (LE_EXPR, type, exp, high));
3874 if (high == 0)
3875 return fold (build2 (GE_EXPR, type, exp, low));
3877 if (operand_equal_p (low, high, 0))
3878 return fold (build2 (EQ_EXPR, type, exp, low));
3880 if (integer_zerop (low))
3882 if (! TYPE_UNSIGNED (etype))
3884 etype = lang_hooks.types.unsigned_type (etype);
3885 high = fold_convert (etype, high);
3886 exp = fold_convert (etype, exp);
3888 return build_range_check (type, exp, 1, 0, high);
3891 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3892 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3894 unsigned HOST_WIDE_INT lo;
3895 HOST_WIDE_INT hi;
3896 int prec;
3898 prec = TYPE_PRECISION (etype);
3899 if (prec <= HOST_BITS_PER_WIDE_INT)
3901 hi = 0;
3902 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3904 else
3906 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3907 lo = (unsigned HOST_WIDE_INT) -1;
3910 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3912 if (TYPE_UNSIGNED (etype))
3914 etype = lang_hooks.types.signed_type (etype);
3915 exp = fold_convert (etype, exp);
3917 return fold (build2 (GT_EXPR, type, exp,
3918 fold_convert (etype, integer_zero_node)));
3922 value = const_binop (MINUS_EXPR, high, low, 0);
3923 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3925 tree utype, minv, maxv;
3927 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3928 for the type in question, as we rely on this here. */
3929 switch (TREE_CODE (etype))
3931 case INTEGER_TYPE:
3932 case ENUMERAL_TYPE:
3933 case CHAR_TYPE:
3934 utype = lang_hooks.types.unsigned_type (etype);
3935 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3936 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3937 integer_one_node, 1);
3938 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3939 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3940 minv, 1, maxv, 1)))
3942 etype = utype;
3943 high = fold_convert (etype, high);
3944 low = fold_convert (etype, low);
3945 exp = fold_convert (etype, exp);
3946 value = const_binop (MINUS_EXPR, high, low, 0);
3948 break;
3949 default:
3950 break;
3954 if (value != 0 && ! TREE_OVERFLOW (value))
3955 return build_range_check (type,
3956 fold (build2 (MINUS_EXPR, etype, exp, low)),
3957 1, fold_convert (etype, integer_zero_node),
3958 value);
3960 return 0;
3963 /* Given two ranges, see if we can merge them into one. Return 1 if we
3964 can, 0 if we can't. Set the output range into the specified parameters. */
3966 static int
3967 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3968 tree high0, int in1_p, tree low1, tree high1)
3970 int no_overlap;
3971 int subset;
3972 int temp;
3973 tree tem;
3974 int in_p;
3975 tree low, high;
3976 int lowequal = ((low0 == 0 && low1 == 0)
3977 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3978 low0, 0, low1, 0)));
3979 int highequal = ((high0 == 0 && high1 == 0)
3980 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3981 high0, 1, high1, 1)));
3983 /* Make range 0 be the range that starts first, or ends last if they
3984 start at the same value. Swap them if it isn't. */
3985 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3986 low0, 0, low1, 0))
3987 || (lowequal
3988 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3989 high1, 1, high0, 1))))
3991 temp = in0_p, in0_p = in1_p, in1_p = temp;
3992 tem = low0, low0 = low1, low1 = tem;
3993 tem = high0, high0 = high1, high1 = tem;
3996 /* Now flag two cases, whether the ranges are disjoint or whether the
3997 second range is totally subsumed in the first. Note that the tests
3998 below are simplified by the ones above. */
3999 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4000 high0, 1, low1, 0));
4001 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4002 high1, 1, high0, 1));
4004 /* We now have four cases, depending on whether we are including or
4005 excluding the two ranges. */
4006 if (in0_p && in1_p)
4008 /* If they don't overlap, the result is false. If the second range
4009 is a subset it is the result. Otherwise, the range is from the start
4010 of the second to the end of the first. */
4011 if (no_overlap)
4012 in_p = 0, low = high = 0;
4013 else if (subset)
4014 in_p = 1, low = low1, high = high1;
4015 else
4016 in_p = 1, low = low1, high = high0;
4019 else if (in0_p && ! in1_p)
4021 /* If they don't overlap, the result is the first range. If they are
4022 equal, the result is false. If the second range is a subset of the
4023 first, and the ranges begin at the same place, we go from just after
4024 the end of the first range to the end of the second. If the second
4025 range is not a subset of the first, or if it is a subset and both
4026 ranges end at the same place, the range starts at the start of the
4027 first range and ends just before the second range.
4028 Otherwise, we can't describe this as a single range. */
4029 if (no_overlap)
4030 in_p = 1, low = low0, high = high0;
4031 else if (lowequal && highequal)
4032 in_p = 0, low = high = 0;
4033 else if (subset && lowequal)
4035 in_p = 1, high = high0;
4036 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4037 integer_one_node, 0);
4039 else if (! subset || highequal)
4041 in_p = 1, low = low0;
4042 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4043 integer_one_node, 0);
4045 else
4046 return 0;
4049 else if (! in0_p && in1_p)
4051 /* If they don't overlap, the result is the second range. If the second
4052 is a subset of the first, the result is false. Otherwise,
4053 the range starts just after the first range and ends at the
4054 end of the second. */
4055 if (no_overlap)
4056 in_p = 1, low = low1, high = high1;
4057 else if (subset || highequal)
4058 in_p = 0, low = high = 0;
4059 else
4061 in_p = 1, high = high1;
4062 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4063 integer_one_node, 0);
4067 else
4069 /* The case where we are excluding both ranges. Here the complex case
4070 is if they don't overlap. In that case, the only time we have a
4071 range is if they are adjacent. If the second is a subset of the
4072 first, the result is the first. Otherwise, the range to exclude
4073 starts at the beginning of the first range and ends at the end of the
4074 second. */
4075 if (no_overlap)
4077 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4078 range_binop (PLUS_EXPR, NULL_TREE,
4079 high0, 1,
4080 integer_one_node, 1),
4081 1, low1, 0)))
4082 in_p = 0, low = low0, high = high1;
4083 else
4085 /* Canonicalize - [min, x] into - [-, x]. */
4086 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4087 switch (TREE_CODE (TREE_TYPE (low0)))
4089 case ENUMERAL_TYPE:
4090 if (TYPE_PRECISION (TREE_TYPE (low0))
4091 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4092 break;
4093 /* FALLTHROUGH */
4094 case INTEGER_TYPE:
4095 case CHAR_TYPE:
4096 if (tree_int_cst_equal (low0,
4097 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4098 low0 = 0;
4099 break;
4100 case POINTER_TYPE:
4101 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4102 && integer_zerop (low0))
4103 low0 = 0;
4104 break;
4105 default:
4106 break;
4109 /* Canonicalize - [x, max] into - [x, -]. */
4110 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4111 switch (TREE_CODE (TREE_TYPE (high1)))
4113 case ENUMERAL_TYPE:
4114 if (TYPE_PRECISION (TREE_TYPE (high1))
4115 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4116 break;
4117 /* FALLTHROUGH */
4118 case INTEGER_TYPE:
4119 case CHAR_TYPE:
4120 if (tree_int_cst_equal (high1,
4121 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4122 high1 = 0;
4123 break;
4124 case POINTER_TYPE:
4125 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4126 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4127 high1, 1,
4128 integer_one_node, 1)))
4129 high1 = 0;
4130 break;
4131 default:
4132 break;
4135 /* The ranges might be also adjacent between the maximum and
4136 minimum values of the given type. For
4137 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4138 return + [x + 1, y - 1]. */
4139 if (low0 == 0 && high1 == 0)
4141 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4142 integer_one_node, 1);
4143 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4144 integer_one_node, 0);
4145 if (low == 0 || high == 0)
4146 return 0;
4148 in_p = 1;
4150 else
4151 return 0;
4154 else if (subset)
4155 in_p = 0, low = low0, high = high0;
4156 else
4157 in_p = 0, low = low0, high = high1;
4160 *pin_p = in_p, *plow = low, *phigh = high;
4161 return 1;
4165 /* Subroutine of fold, looking inside expressions of the form
4166 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4167 of the COND_EXPR. This function is being used also to optimize
4168 A op B ? C : A, by reversing the comparison first.
4170 Return a folded expression whose code is not a COND_EXPR
4171 anymore, or NULL_TREE if no folding opportunity is found. */
4173 static tree
4174 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4176 enum tree_code comp_code = TREE_CODE (arg0);
4177 tree arg00 = TREE_OPERAND (arg0, 0);
4178 tree arg01 = TREE_OPERAND (arg0, 1);
4179 tree arg1_type = TREE_TYPE (arg1);
4180 tree tem;
4182 STRIP_NOPS (arg1);
4183 STRIP_NOPS (arg2);
4185 /* If we have A op 0 ? A : -A, consider applying the following
4186 transformations:
4188 A == 0? A : -A same as -A
4189 A != 0? A : -A same as A
4190 A >= 0? A : -A same as abs (A)
4191 A > 0? A : -A same as abs (A)
4192 A <= 0? A : -A same as -abs (A)
4193 A < 0? A : -A same as -abs (A)
4195 None of these transformations work for modes with signed
4196 zeros. If A is +/-0, the first two transformations will
4197 change the sign of the result (from +0 to -0, or vice
4198 versa). The last four will fix the sign of the result,
4199 even though the original expressions could be positive or
4200 negative, depending on the sign of A.
4202 Note that all these transformations are correct if A is
4203 NaN, since the two alternatives (A and -A) are also NaNs. */
4204 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4205 ? real_zerop (arg01)
4206 : integer_zerop (arg01))
4207 && TREE_CODE (arg2) == NEGATE_EXPR
4208 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4209 switch (comp_code)
4211 case EQ_EXPR:
4212 tem = fold_convert (arg1_type, arg1);
4213 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4214 case NE_EXPR:
4215 return pedantic_non_lvalue (fold_convert (type, arg1));
4216 case GE_EXPR:
4217 case GT_EXPR:
4218 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4219 arg1 = fold_convert (lang_hooks.types.signed_type
4220 (TREE_TYPE (arg1)), arg1);
4221 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4222 return pedantic_non_lvalue (fold_convert (type, tem));
4223 case LE_EXPR:
4224 case LT_EXPR:
4225 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4226 arg1 = fold_convert (lang_hooks.types.signed_type
4227 (TREE_TYPE (arg1)), arg1);
4228 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4229 return negate_expr (fold_convert (type, tem));
4230 default:
4231 abort ();
4234 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4235 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4236 both transformations are correct when A is NaN: A != 0
4237 is then true, and A == 0 is false. */
4239 if (integer_zerop (arg01) && integer_zerop (arg2))
4241 if (comp_code == NE_EXPR)
4242 return pedantic_non_lvalue (fold_convert (type, arg1));
4243 else if (comp_code == EQ_EXPR)
4244 return fold_convert (type, integer_zero_node);
4247 /* Try some transformations of A op B ? A : B.
4249 A == B? A : B same as B
4250 A != B? A : B same as A
4251 A >= B? A : B same as max (A, B)
4252 A > B? A : B same as max (B, A)
4253 A <= B? A : B same as min (A, B)
4254 A < B? A : B same as min (B, A)
4256 As above, these transformations don't work in the presence
4257 of signed zeros. For example, if A and B are zeros of
4258 opposite sign, the first two transformations will change
4259 the sign of the result. In the last four, the original
4260 expressions give different results for (A=+0, B=-0) and
4261 (A=-0, B=+0), but the transformed expressions do not.
4263 The first two transformations are correct if either A or B
4264 is a NaN. In the first transformation, the condition will
4265 be false, and B will indeed be chosen. In the case of the
4266 second transformation, the condition A != B will be true,
4267 and A will be chosen.
4269 The conversions to max() and min() are not correct if B is
4270 a number and A is not. The conditions in the original
4271 expressions will be false, so all four give B. The min()
4272 and max() versions would give a NaN instead. */
4273 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4275 tree comp_op0 = arg00;
4276 tree comp_op1 = arg01;
4277 tree comp_type = TREE_TYPE (comp_op0);
4279 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4280 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4282 comp_type = type;
4283 comp_op0 = arg1;
4284 comp_op1 = arg2;
4287 switch (comp_code)
4289 case EQ_EXPR:
4290 return pedantic_non_lvalue (fold_convert (type, arg2));
4291 case NE_EXPR:
4292 return pedantic_non_lvalue (fold_convert (type, arg1));
4293 case LE_EXPR:
4294 case LT_EXPR:
4295 /* In C++ a ?: expression can be an lvalue, so put the
4296 operand which will be used if they are equal first
4297 so that we can convert this back to the
4298 corresponding COND_EXPR. */
4299 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4301 comp_op0 = fold_convert (comp_type, comp_op0);
4302 comp_op1 = fold_convert (comp_type, comp_op1);
4303 tem = fold (build2 (MIN_EXPR, comp_type,
4304 (comp_code == LE_EXPR
4305 ? comp_op0 : comp_op1),
4306 (comp_code == LE_EXPR
4307 ? comp_op1 : comp_op0)));
4308 return pedantic_non_lvalue (fold_convert (type, tem));
4310 break;
4311 case GE_EXPR:
4312 case GT_EXPR:
4313 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4315 comp_op0 = fold_convert (comp_type, comp_op0);
4316 comp_op1 = fold_convert (comp_type, comp_op1);
4317 tem = fold (build2 (MAX_EXPR, comp_type,
4318 (comp_code == GE_EXPR
4319 ? comp_op0 : comp_op1),
4320 (comp_code == GE_EXPR
4321 ? comp_op1 : comp_op0)));
4322 tem = fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1));
4323 return pedantic_non_lvalue (fold_convert (type, tem));
4325 break;
4326 default:
4327 abort ();
4331 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4332 we might still be able to simplify this. For example,
4333 if C1 is one less or one more than C2, this might have started
4334 out as a MIN or MAX and been transformed by this function.
4335 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4337 if (INTEGRAL_TYPE_P (type)
4338 && TREE_CODE (arg01) == INTEGER_CST
4339 && TREE_CODE (arg2) == INTEGER_CST)
4340 switch (comp_code)
4342 case EQ_EXPR:
4343 /* We can replace A with C1 in this case. */
4344 arg1 = fold_convert (type, arg01);
4345 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4347 case LT_EXPR:
4348 /* If C1 is C2 + 1, this is min(A, C2). */
4349 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4350 OEP_ONLY_CONST)
4351 && operand_equal_p (arg01,
4352 const_binop (PLUS_EXPR, arg2,
4353 integer_one_node, 0),
4354 OEP_ONLY_CONST))
4355 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4356 type, arg1, arg2)));
4357 break;
4359 case LE_EXPR:
4360 /* If C1 is C2 - 1, this is min(A, C2). */
4361 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4362 OEP_ONLY_CONST)
4363 && operand_equal_p (arg01,
4364 const_binop (MINUS_EXPR, arg2,
4365 integer_one_node, 0),
4366 OEP_ONLY_CONST))
4367 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4368 type, arg1, arg2)));
4369 break;
4371 case GT_EXPR:
4372 /* If C1 is C2 - 1, this is max(A, C2). */
4373 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4374 OEP_ONLY_CONST)
4375 && operand_equal_p (arg01,
4376 const_binop (MINUS_EXPR, arg2,
4377 integer_one_node, 0),
4378 OEP_ONLY_CONST))
4379 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4380 type, arg1, arg2)));
4381 break;
4383 case GE_EXPR:
4384 /* If C1 is C2 + 1, this is max(A, C2). */
4385 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4386 OEP_ONLY_CONST)
4387 && operand_equal_p (arg01,
4388 const_binop (PLUS_EXPR, arg2,
4389 integer_one_node, 0),
4390 OEP_ONLY_CONST))
4391 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4392 type, arg1, arg2)));
4393 break;
4394 case NE_EXPR:
4395 break;
4396 default:
4397 abort ();
4400 return NULL_TREE;
4405 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
4406 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4407 #endif
4409 /* EXP is some logical combination of boolean tests. See if we can
4410 merge it into some range test. Return the new tree if so. */
4412 static tree
4413 fold_range_test (tree exp)
4415 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4416 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4417 int in0_p, in1_p, in_p;
4418 tree low0, low1, low, high0, high1, high;
4419 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4420 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4421 tree tem;
4423 /* If this is an OR operation, invert both sides; we will invert
4424 again at the end. */
4425 if (or_op)
4426 in0_p = ! in0_p, in1_p = ! in1_p;
4428 /* If both expressions are the same, if we can merge the ranges, and we
4429 can build the range test, return it or it inverted. If one of the
4430 ranges is always true or always false, consider it to be the same
4431 expression as the other. */
4432 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4433 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4434 in1_p, low1, high1)
4435 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4436 lhs != 0 ? lhs
4437 : rhs != 0 ? rhs : integer_zero_node,
4438 in_p, low, high))))
4439 return or_op ? invert_truthvalue (tem) : tem;
4441 /* On machines where the branch cost is expensive, if this is a
4442 short-circuited branch and the underlying object on both sides
4443 is the same, make a non-short-circuit operation. */
4444 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4445 && lhs != 0 && rhs != 0
4446 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4447 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4448 && operand_equal_p (lhs, rhs, 0))
4450 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4451 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4452 which cases we can't do this. */
4453 if (simple_operand_p (lhs))
4454 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4455 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4456 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4457 TREE_OPERAND (exp, 1));
4459 else if (lang_hooks.decls.global_bindings_p () == 0
4460 && ! CONTAINS_PLACEHOLDER_P (lhs))
4462 tree common = save_expr (lhs);
4464 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4465 or_op ? ! in0_p : in0_p,
4466 low0, high0))
4467 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4468 or_op ? ! in1_p : in1_p,
4469 low1, high1))))
4470 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4471 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4472 TREE_TYPE (exp), lhs, rhs);
4476 return 0;
4479 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4480 bit value. Arrange things so the extra bits will be set to zero if and
4481 only if C is signed-extended to its full width. If MASK is nonzero,
4482 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4484 static tree
4485 unextend (tree c, int p, int unsignedp, tree mask)
4487 tree type = TREE_TYPE (c);
4488 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4489 tree temp;
4491 if (p == modesize || unsignedp)
4492 return c;
4494 /* We work by getting just the sign bit into the low-order bit, then
4495 into the high-order bit, then sign-extend. We then XOR that value
4496 with C. */
4497 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4498 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4500 /* We must use a signed type in order to get an arithmetic right shift.
4501 However, we must also avoid introducing accidental overflows, so that
4502 a subsequent call to integer_zerop will work. Hence we must
4503 do the type conversion here. At this point, the constant is either
4504 zero or one, and the conversion to a signed type can never overflow.
4505 We could get an overflow if this conversion is done anywhere else. */
4506 if (TYPE_UNSIGNED (type))
4507 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4509 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4510 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4511 if (mask != 0)
4512 temp = const_binop (BIT_AND_EXPR, temp,
4513 fold_convert (TREE_TYPE (c), mask), 0);
4514 /* If necessary, convert the type back to match the type of C. */
4515 if (TYPE_UNSIGNED (type))
4516 temp = fold_convert (type, temp);
4518 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4521 /* Find ways of folding logical expressions of LHS and RHS:
4522 Try to merge two comparisons to the same innermost item.
4523 Look for range tests like "ch >= '0' && ch <= '9'".
4524 Look for combinations of simple terms on machines with expensive branches
4525 and evaluate the RHS unconditionally.
4527 For example, if we have p->a == 2 && p->b == 4 and we can make an
4528 object large enough to span both A and B, we can do this with a comparison
4529 against the object ANDed with the a mask.
4531 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4532 operations to do this with one comparison.
4534 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4535 function and the one above.
4537 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4538 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4540 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4541 two operands.
4543 We return the simplified tree or 0 if no optimization is possible. */
4545 static tree
4546 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4548 /* If this is the "or" of two comparisons, we can do something if
4549 the comparisons are NE_EXPR. If this is the "and", we can do something
4550 if the comparisons are EQ_EXPR. I.e.,
4551 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4553 WANTED_CODE is this operation code. For single bit fields, we can
4554 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4555 comparison for one-bit fields. */
4557 enum tree_code wanted_code;
4558 enum tree_code lcode, rcode;
4559 tree ll_arg, lr_arg, rl_arg, rr_arg;
4560 tree ll_inner, lr_inner, rl_inner, rr_inner;
4561 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4562 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4563 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4564 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4565 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4566 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4567 enum machine_mode lnmode, rnmode;
4568 tree ll_mask, lr_mask, rl_mask, rr_mask;
4569 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4570 tree l_const, r_const;
4571 tree lntype, rntype, result;
4572 int first_bit, end_bit;
4573 int volatilep;
4575 /* Start by getting the comparison codes. Fail if anything is volatile.
4576 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4577 it were surrounded with a NE_EXPR. */
4579 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4580 return 0;
4582 lcode = TREE_CODE (lhs);
4583 rcode = TREE_CODE (rhs);
4585 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4587 lhs = build2 (NE_EXPR, truth_type, lhs,
4588 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4589 lcode = NE_EXPR;
4592 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4594 rhs = build2 (NE_EXPR, truth_type, rhs,
4595 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4596 rcode = NE_EXPR;
4599 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
4600 return 0;
4602 ll_arg = TREE_OPERAND (lhs, 0);
4603 lr_arg = TREE_OPERAND (lhs, 1);
4604 rl_arg = TREE_OPERAND (rhs, 0);
4605 rr_arg = TREE_OPERAND (rhs, 1);
4607 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4608 if (simple_operand_p (ll_arg)
4609 && simple_operand_p (lr_arg))
4611 tree result;
4612 if (operand_equal_p (ll_arg, rl_arg, 0)
4613 && operand_equal_p (lr_arg, rr_arg, 0))
4615 result = combine_comparisons (code, lcode, rcode,
4616 truth_type, ll_arg, lr_arg);
4617 if (result)
4618 return result;
4620 else if (operand_equal_p (ll_arg, rr_arg, 0)
4621 && operand_equal_p (lr_arg, rl_arg, 0))
4623 result = combine_comparisons (code, lcode,
4624 swap_tree_comparison (rcode),
4625 truth_type, ll_arg, lr_arg);
4626 if (result)
4627 return result;
4631 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4632 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4634 /* If the RHS can be evaluated unconditionally and its operands are
4635 simple, it wins to evaluate the RHS unconditionally on machines
4636 with expensive branches. In this case, this isn't a comparison
4637 that can be merged. Avoid doing this if the RHS is a floating-point
4638 comparison since those can trap. */
4640 if (BRANCH_COST >= 2
4641 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4642 && simple_operand_p (rl_arg)
4643 && simple_operand_p (rr_arg))
4645 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4646 if (code == TRUTH_OR_EXPR
4647 && lcode == NE_EXPR && integer_zerop (lr_arg)
4648 && rcode == NE_EXPR && integer_zerop (rr_arg)
4649 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4650 return build2 (NE_EXPR, truth_type,
4651 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4652 ll_arg, rl_arg),
4653 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4655 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4656 if (code == TRUTH_AND_EXPR
4657 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4658 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4659 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4660 return build2 (EQ_EXPR, truth_type,
4661 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4662 ll_arg, rl_arg),
4663 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4665 return build2 (code, truth_type, lhs, rhs);
4668 /* See if the comparisons can be merged. Then get all the parameters for
4669 each side. */
4671 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4672 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4673 return 0;
4675 volatilep = 0;
4676 ll_inner = decode_field_reference (ll_arg,
4677 &ll_bitsize, &ll_bitpos, &ll_mode,
4678 &ll_unsignedp, &volatilep, &ll_mask,
4679 &ll_and_mask);
4680 lr_inner = decode_field_reference (lr_arg,
4681 &lr_bitsize, &lr_bitpos, &lr_mode,
4682 &lr_unsignedp, &volatilep, &lr_mask,
4683 &lr_and_mask);
4684 rl_inner = decode_field_reference (rl_arg,
4685 &rl_bitsize, &rl_bitpos, &rl_mode,
4686 &rl_unsignedp, &volatilep, &rl_mask,
4687 &rl_and_mask);
4688 rr_inner = decode_field_reference (rr_arg,
4689 &rr_bitsize, &rr_bitpos, &rr_mode,
4690 &rr_unsignedp, &volatilep, &rr_mask,
4691 &rr_and_mask);
4693 /* It must be true that the inner operation on the lhs of each
4694 comparison must be the same if we are to be able to do anything.
4695 Then see if we have constants. If not, the same must be true for
4696 the rhs's. */
4697 if (volatilep || ll_inner == 0 || rl_inner == 0
4698 || ! operand_equal_p (ll_inner, rl_inner, 0))
4699 return 0;
4701 if (TREE_CODE (lr_arg) == INTEGER_CST
4702 && TREE_CODE (rr_arg) == INTEGER_CST)
4703 l_const = lr_arg, r_const = rr_arg;
4704 else if (lr_inner == 0 || rr_inner == 0
4705 || ! operand_equal_p (lr_inner, rr_inner, 0))
4706 return 0;
4707 else
4708 l_const = r_const = 0;
4710 /* If either comparison code is not correct for our logical operation,
4711 fail. However, we can convert a one-bit comparison against zero into
4712 the opposite comparison against that bit being set in the field. */
4714 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4715 if (lcode != wanted_code)
4717 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4719 /* Make the left operand unsigned, since we are only interested
4720 in the value of one bit. Otherwise we are doing the wrong
4721 thing below. */
4722 ll_unsignedp = 1;
4723 l_const = ll_mask;
4725 else
4726 return 0;
4729 /* This is analogous to the code for l_const above. */
4730 if (rcode != wanted_code)
4732 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4734 rl_unsignedp = 1;
4735 r_const = rl_mask;
4737 else
4738 return 0;
4741 /* After this point all optimizations will generate bit-field
4742 references, which we might not want. */
4743 if (! lang_hooks.can_use_bit_fields_p ())
4744 return 0;
4746 /* See if we can find a mode that contains both fields being compared on
4747 the left. If we can't, fail. Otherwise, update all constants and masks
4748 to be relative to a field of that size. */
4749 first_bit = MIN (ll_bitpos, rl_bitpos);
4750 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4751 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4752 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4753 volatilep);
4754 if (lnmode == VOIDmode)
4755 return 0;
4757 lnbitsize = GET_MODE_BITSIZE (lnmode);
4758 lnbitpos = first_bit & ~ (lnbitsize - 1);
4759 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4760 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4762 if (BYTES_BIG_ENDIAN)
4764 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4765 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4768 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4769 size_int (xll_bitpos), 0);
4770 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4771 size_int (xrl_bitpos), 0);
4773 if (l_const)
4775 l_const = fold_convert (lntype, l_const);
4776 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4777 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4778 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4779 fold (build1 (BIT_NOT_EXPR,
4780 lntype, ll_mask)),
4781 0)))
4783 warning ("comparison is always %d", wanted_code == NE_EXPR);
4785 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4788 if (r_const)
4790 r_const = fold_convert (lntype, r_const);
4791 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4792 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4793 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4794 fold (build1 (BIT_NOT_EXPR,
4795 lntype, rl_mask)),
4796 0)))
4798 warning ("comparison is always %d", wanted_code == NE_EXPR);
4800 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4804 /* If the right sides are not constant, do the same for it. Also,
4805 disallow this optimization if a size or signedness mismatch occurs
4806 between the left and right sides. */
4807 if (l_const == 0)
4809 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4810 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4811 /* Make sure the two fields on the right
4812 correspond to the left without being swapped. */
4813 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4814 return 0;
4816 first_bit = MIN (lr_bitpos, rr_bitpos);
4817 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4818 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4819 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4820 volatilep);
4821 if (rnmode == VOIDmode)
4822 return 0;
4824 rnbitsize = GET_MODE_BITSIZE (rnmode);
4825 rnbitpos = first_bit & ~ (rnbitsize - 1);
4826 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4827 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4829 if (BYTES_BIG_ENDIAN)
4831 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4832 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4835 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4836 size_int (xlr_bitpos), 0);
4837 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4838 size_int (xrr_bitpos), 0);
4840 /* Make a mask that corresponds to both fields being compared.
4841 Do this for both items being compared. If the operands are the
4842 same size and the bits being compared are in the same position
4843 then we can do this by masking both and comparing the masked
4844 results. */
4845 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4846 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4847 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4849 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4850 ll_unsignedp || rl_unsignedp);
4851 if (! all_ones_mask_p (ll_mask, lnbitsize))
4852 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4854 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4855 lr_unsignedp || rr_unsignedp);
4856 if (! all_ones_mask_p (lr_mask, rnbitsize))
4857 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4859 return build2 (wanted_code, truth_type, lhs, rhs);
4862 /* There is still another way we can do something: If both pairs of
4863 fields being compared are adjacent, we may be able to make a wider
4864 field containing them both.
4866 Note that we still must mask the lhs/rhs expressions. Furthermore,
4867 the mask must be shifted to account for the shift done by
4868 make_bit_field_ref. */
4869 if ((ll_bitsize + ll_bitpos == rl_bitpos
4870 && lr_bitsize + lr_bitpos == rr_bitpos)
4871 || (ll_bitpos == rl_bitpos + rl_bitsize
4872 && lr_bitpos == rr_bitpos + rr_bitsize))
4874 tree type;
4876 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4877 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4878 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4879 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4881 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4882 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4883 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4884 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4886 /* Convert to the smaller type before masking out unwanted bits. */
4887 type = lntype;
4888 if (lntype != rntype)
4890 if (lnbitsize > rnbitsize)
4892 lhs = fold_convert (rntype, lhs);
4893 ll_mask = fold_convert (rntype, ll_mask);
4894 type = rntype;
4896 else if (lnbitsize < rnbitsize)
4898 rhs = fold_convert (lntype, rhs);
4899 lr_mask = fold_convert (lntype, lr_mask);
4900 type = lntype;
4904 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4905 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4907 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4908 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4910 return build2 (wanted_code, truth_type, lhs, rhs);
4913 return 0;
4916 /* Handle the case of comparisons with constants. If there is something in
4917 common between the masks, those bits of the constants must be the same.
4918 If not, the condition is always false. Test for this to avoid generating
4919 incorrect code below. */
4920 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4921 if (! integer_zerop (result)
4922 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4923 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4925 if (wanted_code == NE_EXPR)
4927 warning ("`or' of unmatched not-equal tests is always 1");
4928 return constant_boolean_node (true, truth_type);
4930 else
4932 warning ("`and' of mutually exclusive equal-tests is always 0");
4933 return constant_boolean_node (false, truth_type);
4937 /* Construct the expression we will return. First get the component
4938 reference we will make. Unless the mask is all ones the width of
4939 that field, perform the mask operation. Then compare with the
4940 merged constant. */
4941 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4942 ll_unsignedp || rl_unsignedp);
4944 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4945 if (! all_ones_mask_p (ll_mask, lnbitsize))
4946 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4948 return build2 (wanted_code, truth_type, result,
4949 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4952 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4953 constant. */
4955 static tree
4956 optimize_minmax_comparison (tree t)
4958 tree type = TREE_TYPE (t);
4959 tree arg0 = TREE_OPERAND (t, 0);
4960 enum tree_code op_code;
4961 tree comp_const = TREE_OPERAND (t, 1);
4962 tree minmax_const;
4963 int consts_equal, consts_lt;
4964 tree inner;
4966 STRIP_SIGN_NOPS (arg0);
4968 op_code = TREE_CODE (arg0);
4969 minmax_const = TREE_OPERAND (arg0, 1);
4970 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4971 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4972 inner = TREE_OPERAND (arg0, 0);
4974 /* If something does not permit us to optimize, return the original tree. */
4975 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4976 || TREE_CODE (comp_const) != INTEGER_CST
4977 || TREE_CONSTANT_OVERFLOW (comp_const)
4978 || TREE_CODE (minmax_const) != INTEGER_CST
4979 || TREE_CONSTANT_OVERFLOW (minmax_const))
4980 return t;
4982 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4983 and GT_EXPR, doing the rest with recursive calls using logical
4984 simplifications. */
4985 switch (TREE_CODE (t))
4987 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4988 return
4989 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4991 case GE_EXPR:
4992 return
4993 fold (build2 (TRUTH_ORIF_EXPR, type,
4994 optimize_minmax_comparison
4995 (build2 (EQ_EXPR, type, arg0, comp_const)),
4996 optimize_minmax_comparison
4997 (build2 (GT_EXPR, type, arg0, comp_const))));
4999 case EQ_EXPR:
5000 if (op_code == MAX_EXPR && consts_equal)
5001 /* MAX (X, 0) == 0 -> X <= 0 */
5002 return fold (build2 (LE_EXPR, type, inner, comp_const));
5004 else if (op_code == MAX_EXPR && consts_lt)
5005 /* MAX (X, 0) == 5 -> X == 5 */
5006 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5008 else if (op_code == MAX_EXPR)
5009 /* MAX (X, 0) == -1 -> false */
5010 return omit_one_operand (type, integer_zero_node, inner);
5012 else if (consts_equal)
5013 /* MIN (X, 0) == 0 -> X >= 0 */
5014 return fold (build2 (GE_EXPR, type, inner, comp_const));
5016 else if (consts_lt)
5017 /* MIN (X, 0) == 5 -> false */
5018 return omit_one_operand (type, integer_zero_node, inner);
5020 else
5021 /* MIN (X, 0) == -1 -> X == -1 */
5022 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5024 case GT_EXPR:
5025 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5026 /* MAX (X, 0) > 0 -> X > 0
5027 MAX (X, 0) > 5 -> X > 5 */
5028 return fold (build2 (GT_EXPR, type, inner, comp_const));
5030 else if (op_code == MAX_EXPR)
5031 /* MAX (X, 0) > -1 -> true */
5032 return omit_one_operand (type, integer_one_node, inner);
5034 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5035 /* MIN (X, 0) > 0 -> false
5036 MIN (X, 0) > 5 -> false */
5037 return omit_one_operand (type, integer_zero_node, inner);
5039 else
5040 /* MIN (X, 0) > -1 -> X > -1 */
5041 return fold (build2 (GT_EXPR, type, inner, comp_const));
5043 default:
5044 return t;
5048 /* T is an integer expression that is being multiplied, divided, or taken a
5049 modulus (CODE says which and what kind of divide or modulus) by a
5050 constant C. See if we can eliminate that operation by folding it with
5051 other operations already in T. WIDE_TYPE, if non-null, is a type that
5052 should be used for the computation if wider than our type.
5054 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5055 (X * 2) + (Y * 4). We must, however, be assured that either the original
5056 expression would not overflow or that overflow is undefined for the type
5057 in the language in question.
5059 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5060 the machine has a multiply-accumulate insn or that this is part of an
5061 addressing calculation.
5063 If we return a non-null expression, it is an equivalent form of the
5064 original computation, but need not be in the original type. */
5066 static tree
5067 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5069 /* To avoid exponential search depth, refuse to allow recursion past
5070 three levels. Beyond that (1) it's highly unlikely that we'll find
5071 something interesting and (2) we've probably processed it before
5072 when we built the inner expression. */
5074 static int depth;
5075 tree ret;
5077 if (depth > 3)
5078 return NULL;
5080 depth++;
5081 ret = extract_muldiv_1 (t, c, code, wide_type);
5082 depth--;
5084 return ret;
5087 static tree
5088 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5090 tree type = TREE_TYPE (t);
5091 enum tree_code tcode = TREE_CODE (t);
5092 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5093 > GET_MODE_SIZE (TYPE_MODE (type)))
5094 ? wide_type : type);
5095 tree t1, t2;
5096 int same_p = tcode == code;
5097 tree op0 = NULL_TREE, op1 = NULL_TREE;
5099 /* Don't deal with constants of zero here; they confuse the code below. */
5100 if (integer_zerop (c))
5101 return NULL_TREE;
5103 if (TREE_CODE_CLASS (tcode) == '1')
5104 op0 = TREE_OPERAND (t, 0);
5106 if (TREE_CODE_CLASS (tcode) == '2')
5107 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5109 /* Note that we need not handle conditional operations here since fold
5110 already handles those cases. So just do arithmetic here. */
5111 switch (tcode)
5113 case INTEGER_CST:
5114 /* For a constant, we can always simplify if we are a multiply
5115 or (for divide and modulus) if it is a multiple of our constant. */
5116 if (code == MULT_EXPR
5117 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5118 return const_binop (code, fold_convert (ctype, t),
5119 fold_convert (ctype, c), 0);
5120 break;
5122 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5123 /* If op0 is an expression ... */
5124 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
5125 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
5126 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
5127 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
5128 /* ... and is unsigned, and its type is smaller than ctype,
5129 then we cannot pass through as widening. */
5130 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5131 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5132 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5133 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5134 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5135 /* ... or this is a truncation (t is narrower than op0),
5136 then we cannot pass through this narrowing. */
5137 || (GET_MODE_SIZE (TYPE_MODE (type))
5138 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5139 /* ... or signedness changes for division or modulus,
5140 then we cannot pass through this conversion. */
5141 || (code != MULT_EXPR
5142 && (TYPE_UNSIGNED (ctype)
5143 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5144 break;
5146 /* Pass the constant down and see if we can make a simplification. If
5147 we can, replace this expression with the inner simplification for
5148 possible later conversion to our or some other type. */
5149 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5150 && TREE_CODE (t2) == INTEGER_CST
5151 && ! TREE_CONSTANT_OVERFLOW (t2)
5152 && (0 != (t1 = extract_muldiv (op0, t2, code,
5153 code == MULT_EXPR
5154 ? ctype : NULL_TREE))))
5155 return t1;
5156 break;
5158 case NEGATE_EXPR: case ABS_EXPR:
5159 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5160 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5161 break;
5163 case MIN_EXPR: case MAX_EXPR:
5164 /* If widening the type changes the signedness, then we can't perform
5165 this optimization as that changes the result. */
5166 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5167 break;
5169 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5170 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5171 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5173 if (tree_int_cst_sgn (c) < 0)
5174 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5176 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5177 fold_convert (ctype, t2)));
5179 break;
5181 case LSHIFT_EXPR: case RSHIFT_EXPR:
5182 /* If the second operand is constant, this is a multiplication
5183 or floor division, by a power of two, so we can treat it that
5184 way unless the multiplier or divisor overflows. Signed
5185 left-shift overflow is implementation-defined rather than
5186 undefined in C90, so do not convert signed left shift into
5187 multiplication. */
5188 if (TREE_CODE (op1) == INTEGER_CST
5189 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5190 /* const_binop may not detect overflow correctly,
5191 so check for it explicitly here. */
5192 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5193 && TREE_INT_CST_HIGH (op1) == 0
5194 && 0 != (t1 = fold_convert (ctype,
5195 const_binop (LSHIFT_EXPR,
5196 size_one_node,
5197 op1, 0)))
5198 && ! TREE_OVERFLOW (t1))
5199 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5200 ? MULT_EXPR : FLOOR_DIV_EXPR,
5201 ctype, fold_convert (ctype, op0), t1),
5202 c, code, wide_type);
5203 break;
5205 case PLUS_EXPR: case MINUS_EXPR:
5206 /* See if we can eliminate the operation on both sides. If we can, we
5207 can return a new PLUS or MINUS. If we can't, the only remaining
5208 cases where we can do anything are if the second operand is a
5209 constant. */
5210 t1 = extract_muldiv (op0, c, code, wide_type);
5211 t2 = extract_muldiv (op1, c, code, wide_type);
5212 if (t1 != 0 && t2 != 0
5213 && (code == MULT_EXPR
5214 /* If not multiplication, we can only do this if both operands
5215 are divisible by c. */
5216 || (multiple_of_p (ctype, op0, c)
5217 && multiple_of_p (ctype, op1, c))))
5218 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5219 fold_convert (ctype, t2)));
5221 /* If this was a subtraction, negate OP1 and set it to be an addition.
5222 This simplifies the logic below. */
5223 if (tcode == MINUS_EXPR)
5224 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5226 if (TREE_CODE (op1) != INTEGER_CST)
5227 break;
5229 /* If either OP1 or C are negative, this optimization is not safe for
5230 some of the division and remainder types while for others we need
5231 to change the code. */
5232 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5234 if (code == CEIL_DIV_EXPR)
5235 code = FLOOR_DIV_EXPR;
5236 else if (code == FLOOR_DIV_EXPR)
5237 code = CEIL_DIV_EXPR;
5238 else if (code != MULT_EXPR
5239 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5240 break;
5243 /* If it's a multiply or a division/modulus operation of a multiple
5244 of our constant, do the operation and verify it doesn't overflow. */
5245 if (code == MULT_EXPR
5246 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5248 op1 = const_binop (code, fold_convert (ctype, op1),
5249 fold_convert (ctype, c), 0);
5250 /* We allow the constant to overflow with wrapping semantics. */
5251 if (op1 == 0
5252 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5253 break;
5255 else
5256 break;
5258 /* If we have an unsigned type is not a sizetype, we cannot widen
5259 the operation since it will change the result if the original
5260 computation overflowed. */
5261 if (TYPE_UNSIGNED (ctype)
5262 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5263 && ctype != type)
5264 break;
5266 /* If we were able to eliminate our operation from the first side,
5267 apply our operation to the second side and reform the PLUS. */
5268 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5269 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5271 /* The last case is if we are a multiply. In that case, we can
5272 apply the distributive law to commute the multiply and addition
5273 if the multiplication of the constants doesn't overflow. */
5274 if (code == MULT_EXPR)
5275 return fold (build2 (tcode, ctype,
5276 fold (build2 (code, ctype,
5277 fold_convert (ctype, op0),
5278 fold_convert (ctype, c))),
5279 op1));
5281 break;
5283 case MULT_EXPR:
5284 /* We have a special case here if we are doing something like
5285 (C * 8) % 4 since we know that's zero. */
5286 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5287 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5288 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5289 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5290 return omit_one_operand (type, integer_zero_node, op0);
5292 /* ... fall through ... */
5294 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5295 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5296 /* If we can extract our operation from the LHS, do so and return a
5297 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5298 do something only if the second operand is a constant. */
5299 if (same_p
5300 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5301 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5302 fold_convert (ctype, op1)));
5303 else if (tcode == MULT_EXPR && code == MULT_EXPR
5304 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5305 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5306 fold_convert (ctype, t1)));
5307 else if (TREE_CODE (op1) != INTEGER_CST)
5308 return 0;
5310 /* If these are the same operation types, we can associate them
5311 assuming no overflow. */
5312 if (tcode == code
5313 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5314 fold_convert (ctype, c), 0))
5315 && ! TREE_OVERFLOW (t1))
5316 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5318 /* If these operations "cancel" each other, we have the main
5319 optimizations of this pass, which occur when either constant is a
5320 multiple of the other, in which case we replace this with either an
5321 operation or CODE or TCODE.
5323 If we have an unsigned type that is not a sizetype, we cannot do
5324 this since it will change the result if the original computation
5325 overflowed. */
5326 if ((! TYPE_UNSIGNED (ctype)
5327 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5328 && ! flag_wrapv
5329 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5330 || (tcode == MULT_EXPR
5331 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5332 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5334 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5335 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5336 fold_convert (ctype,
5337 const_binop (TRUNC_DIV_EXPR,
5338 op1, c, 0))));
5339 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5340 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5341 fold_convert (ctype,
5342 const_binop (TRUNC_DIV_EXPR,
5343 c, op1, 0))));
5345 break;
5347 default:
5348 break;
5351 return 0;
5354 /* Return a node which has the indicated constant VALUE (either 0 or
5355 1), and is of the indicated TYPE. */
5357 tree
5358 constant_boolean_node (int value, tree type)
5360 if (type == integer_type_node)
5361 return value ? integer_one_node : integer_zero_node;
5362 else if (type == boolean_type_node)
5363 return value ? boolean_true_node : boolean_false_node;
5364 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5365 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5366 : integer_zero_node);
5367 else
5368 return build_int_cst (type, value);
5371 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5372 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5373 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5374 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5375 COND is the first argument to CODE; otherwise (as in the example
5376 given here), it is the second argument. TYPE is the type of the
5377 original expression. Return NULL_TREE if no simplification is
5378 possible. */
5380 static tree
5381 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
5382 tree cond, tree arg, int cond_first_p)
5384 tree test, true_value, false_value;
5385 tree lhs = NULL_TREE;
5386 tree rhs = NULL_TREE;
5388 /* This transformation is only worthwhile if we don't have to wrap
5389 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5390 one of the branches once its pushed inside the COND_EXPR. */
5391 if (!TREE_CONSTANT (arg))
5392 return NULL_TREE;
5394 if (TREE_CODE (cond) == COND_EXPR)
5396 test = TREE_OPERAND (cond, 0);
5397 true_value = TREE_OPERAND (cond, 1);
5398 false_value = TREE_OPERAND (cond, 2);
5399 /* If this operand throws an expression, then it does not make
5400 sense to try to perform a logical or arithmetic operation
5401 involving it. */
5402 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5403 lhs = true_value;
5404 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5405 rhs = false_value;
5407 else
5409 tree testtype = TREE_TYPE (cond);
5410 test = cond;
5411 true_value = constant_boolean_node (true, testtype);
5412 false_value = constant_boolean_node (false, testtype);
5415 if (lhs == 0)
5416 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5417 : build2 (code, type, arg, true_value));
5418 if (rhs == 0)
5419 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5420 : build2 (code, type, arg, false_value));
5422 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5423 return fold_convert (type, test);
5427 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5429 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5430 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5431 ADDEND is the same as X.
5433 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5434 and finite. The problematic cases are when X is zero, and its mode
5435 has signed zeros. In the case of rounding towards -infinity,
5436 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5437 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5439 static bool
5440 fold_real_zero_addition_p (tree type, tree addend, int negate)
5442 if (!real_zerop (addend))
5443 return false;
5445 /* Don't allow the fold with -fsignaling-nans. */
5446 if (HONOR_SNANS (TYPE_MODE (type)))
5447 return false;
5449 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5450 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5451 return true;
5453 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5454 if (TREE_CODE (addend) == REAL_CST
5455 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5456 negate = !negate;
5458 /* The mode has signed zeros, and we have to honor their sign.
5459 In this situation, there is only one case we can return true for.
5460 X - 0 is the same as X unless rounding towards -infinity is
5461 supported. */
5462 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5465 /* Subroutine of fold() that checks comparisons of built-in math
5466 functions against real constants.
5468 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5469 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5470 is the type of the result and ARG0 and ARG1 are the operands of the
5471 comparison. ARG1 must be a TREE_REAL_CST.
5473 The function returns the constant folded tree if a simplification
5474 can be made, and NULL_TREE otherwise. */
5476 static tree
5477 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5478 tree type, tree arg0, tree arg1)
5480 REAL_VALUE_TYPE c;
5482 if (BUILTIN_SQRT_P (fcode))
5484 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5485 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5487 c = TREE_REAL_CST (arg1);
5488 if (REAL_VALUE_NEGATIVE (c))
5490 /* sqrt(x) < y is always false, if y is negative. */
5491 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5492 return omit_one_operand (type, integer_zero_node, arg);
5494 /* sqrt(x) > y is always true, if y is negative and we
5495 don't care about NaNs, i.e. negative values of x. */
5496 if (code == NE_EXPR || !HONOR_NANS (mode))
5497 return omit_one_operand (type, integer_one_node, arg);
5499 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5500 return fold (build2 (GE_EXPR, type, arg,
5501 build_real (TREE_TYPE (arg), dconst0)));
5503 else if (code == GT_EXPR || code == GE_EXPR)
5505 REAL_VALUE_TYPE c2;
5507 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5508 real_convert (&c2, mode, &c2);
5510 if (REAL_VALUE_ISINF (c2))
5512 /* sqrt(x) > y is x == +Inf, when y is very large. */
5513 if (HONOR_INFINITIES (mode))
5514 return fold (build2 (EQ_EXPR, type, arg,
5515 build_real (TREE_TYPE (arg), c2)));
5517 /* sqrt(x) > y is always false, when y is very large
5518 and we don't care about infinities. */
5519 return omit_one_operand (type, integer_zero_node, arg);
5522 /* sqrt(x) > c is the same as x > c*c. */
5523 return fold (build2 (code, type, arg,
5524 build_real (TREE_TYPE (arg), c2)));
5526 else if (code == LT_EXPR || code == LE_EXPR)
5528 REAL_VALUE_TYPE c2;
5530 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5531 real_convert (&c2, mode, &c2);
5533 if (REAL_VALUE_ISINF (c2))
5535 /* sqrt(x) < y is always true, when y is a very large
5536 value and we don't care about NaNs or Infinities. */
5537 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5538 return omit_one_operand (type, integer_one_node, arg);
5540 /* sqrt(x) < y is x != +Inf when y is very large and we
5541 don't care about NaNs. */
5542 if (! HONOR_NANS (mode))
5543 return fold (build2 (NE_EXPR, type, arg,
5544 build_real (TREE_TYPE (arg), c2)));
5546 /* sqrt(x) < y is x >= 0 when y is very large and we
5547 don't care about Infinities. */
5548 if (! HONOR_INFINITIES (mode))
5549 return fold (build2 (GE_EXPR, type, arg,
5550 build_real (TREE_TYPE (arg), dconst0)));
5552 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5553 if (lang_hooks.decls.global_bindings_p () != 0
5554 || CONTAINS_PLACEHOLDER_P (arg))
5555 return NULL_TREE;
5557 arg = save_expr (arg);
5558 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5559 fold (build2 (GE_EXPR, type, arg,
5560 build_real (TREE_TYPE (arg),
5561 dconst0))),
5562 fold (build2 (NE_EXPR, type, arg,
5563 build_real (TREE_TYPE (arg),
5564 c2)))));
5567 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5568 if (! HONOR_NANS (mode))
5569 return fold (build2 (code, type, arg,
5570 build_real (TREE_TYPE (arg), c2)));
5572 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5573 if (lang_hooks.decls.global_bindings_p () == 0
5574 && ! CONTAINS_PLACEHOLDER_P (arg))
5576 arg = save_expr (arg);
5577 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5578 fold (build2 (GE_EXPR, type, arg,
5579 build_real (TREE_TYPE (arg),
5580 dconst0))),
5581 fold (build2 (code, type, arg,
5582 build_real (TREE_TYPE (arg),
5583 c2)))));
5588 return NULL_TREE;
5591 /* Subroutine of fold() that optimizes comparisons against Infinities,
5592 either +Inf or -Inf.
5594 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5595 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5596 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5598 The function returns the constant folded tree if a simplification
5599 can be made, and NULL_TREE otherwise. */
5601 static tree
5602 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5604 enum machine_mode mode;
5605 REAL_VALUE_TYPE max;
5606 tree temp;
5607 bool neg;
5609 mode = TYPE_MODE (TREE_TYPE (arg0));
5611 /* For negative infinity swap the sense of the comparison. */
5612 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5613 if (neg)
5614 code = swap_tree_comparison (code);
5616 switch (code)
5618 case GT_EXPR:
5619 /* x > +Inf is always false, if with ignore sNANs. */
5620 if (HONOR_SNANS (mode))
5621 return NULL_TREE;
5622 return omit_one_operand (type, integer_zero_node, arg0);
5624 case LE_EXPR:
5625 /* x <= +Inf is always true, if we don't case about NaNs. */
5626 if (! HONOR_NANS (mode))
5627 return omit_one_operand (type, integer_one_node, arg0);
5629 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5630 if (lang_hooks.decls.global_bindings_p () == 0
5631 && ! CONTAINS_PLACEHOLDER_P (arg0))
5633 arg0 = save_expr (arg0);
5634 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5636 break;
5638 case EQ_EXPR:
5639 case GE_EXPR:
5640 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5641 real_maxval (&max, neg, mode);
5642 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5643 arg0, build_real (TREE_TYPE (arg0), max)));
5645 case LT_EXPR:
5646 /* x < +Inf is always equal to x <= DBL_MAX. */
5647 real_maxval (&max, neg, mode);
5648 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5649 arg0, build_real (TREE_TYPE (arg0), max)));
5651 case NE_EXPR:
5652 /* x != +Inf is always equal to !(x > DBL_MAX). */
5653 real_maxval (&max, neg, mode);
5654 if (! HONOR_NANS (mode))
5655 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5656 arg0, build_real (TREE_TYPE (arg0), max)));
5658 /* The transformation below creates non-gimple code and thus is
5659 not appropriate if we are in gimple form. */
5660 if (in_gimple_form)
5661 return NULL_TREE;
5663 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5664 arg0, build_real (TREE_TYPE (arg0), max)));
5665 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5667 default:
5668 break;
5671 return NULL_TREE;
5674 /* Subroutine of fold() that optimizes comparisons of a division by
5675 a nonzero integer constant against an integer constant, i.e.
5676 X/C1 op C2.
5678 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5679 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5680 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5682 The function returns the constant folded tree if a simplification
5683 can be made, and NULL_TREE otherwise. */
5685 static tree
5686 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5688 tree prod, tmp, hi, lo;
5689 tree arg00 = TREE_OPERAND (arg0, 0);
5690 tree arg01 = TREE_OPERAND (arg0, 1);
5691 unsigned HOST_WIDE_INT lpart;
5692 HOST_WIDE_INT hpart;
5693 int overflow;
5695 /* We have to do this the hard way to detect unsigned overflow.
5696 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5697 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5698 TREE_INT_CST_HIGH (arg01),
5699 TREE_INT_CST_LOW (arg1),
5700 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5701 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5702 prod = force_fit_type (prod, -1, overflow, false);
5704 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5706 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5707 lo = prod;
5709 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5710 overflow = add_double (TREE_INT_CST_LOW (prod),
5711 TREE_INT_CST_HIGH (prod),
5712 TREE_INT_CST_LOW (tmp),
5713 TREE_INT_CST_HIGH (tmp),
5714 &lpart, &hpart);
5715 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5716 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5717 TREE_CONSTANT_OVERFLOW (prod));
5719 else if (tree_int_cst_sgn (arg01) >= 0)
5721 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5722 switch (tree_int_cst_sgn (arg1))
5724 case -1:
5725 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5726 hi = prod;
5727 break;
5729 case 0:
5730 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5731 hi = tmp;
5732 break;
5734 case 1:
5735 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5736 lo = prod;
5737 break;
5739 default:
5740 abort ();
5743 else
5745 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5746 switch (tree_int_cst_sgn (arg1))
5748 case -1:
5749 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5750 lo = prod;
5751 break;
5753 case 0:
5754 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5755 lo = tmp;
5756 break;
5758 case 1:
5759 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5760 hi = prod;
5761 break;
5763 default:
5764 abort ();
5768 switch (code)
5770 case EQ_EXPR:
5771 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5772 return omit_one_operand (type, integer_zero_node, arg00);
5773 if (TREE_OVERFLOW (hi))
5774 return fold (build2 (GE_EXPR, type, arg00, lo));
5775 if (TREE_OVERFLOW (lo))
5776 return fold (build2 (LE_EXPR, type, arg00, hi));
5777 return build_range_check (type, arg00, 1, lo, hi);
5779 case NE_EXPR:
5780 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5781 return omit_one_operand (type, integer_one_node, arg00);
5782 if (TREE_OVERFLOW (hi))
5783 return fold (build2 (LT_EXPR, type, arg00, lo));
5784 if (TREE_OVERFLOW (lo))
5785 return fold (build2 (GT_EXPR, type, arg00, hi));
5786 return build_range_check (type, arg00, 0, lo, hi);
5788 case LT_EXPR:
5789 if (TREE_OVERFLOW (lo))
5790 return omit_one_operand (type, integer_zero_node, arg00);
5791 return fold (build2 (LT_EXPR, type, arg00, lo));
5793 case LE_EXPR:
5794 if (TREE_OVERFLOW (hi))
5795 return omit_one_operand (type, integer_one_node, arg00);
5796 return fold (build2 (LE_EXPR, type, arg00, hi));
5798 case GT_EXPR:
5799 if (TREE_OVERFLOW (hi))
5800 return omit_one_operand (type, integer_zero_node, arg00);
5801 return fold (build2 (GT_EXPR, type, arg00, hi));
5803 case GE_EXPR:
5804 if (TREE_OVERFLOW (lo))
5805 return omit_one_operand (type, integer_one_node, arg00);
5806 return fold (build2 (GE_EXPR, type, arg00, lo));
5808 default:
5809 break;
5812 return NULL_TREE;
5816 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5817 equality/inequality test, then return a simplified form of
5818 the test using shifts and logical operations. Otherwise return
5819 NULL. TYPE is the desired result type. */
5821 tree
5822 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5823 tree result_type)
5825 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5826 operand 0. */
5827 if (code == TRUTH_NOT_EXPR)
5829 code = TREE_CODE (arg0);
5830 if (code != NE_EXPR && code != EQ_EXPR)
5831 return NULL_TREE;
5833 /* Extract the arguments of the EQ/NE. */
5834 arg1 = TREE_OPERAND (arg0, 1);
5835 arg0 = TREE_OPERAND (arg0, 0);
5837 /* This requires us to invert the code. */
5838 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5841 /* If this is testing a single bit, we can optimize the test. */
5842 if ((code == NE_EXPR || code == EQ_EXPR)
5843 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5844 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5846 tree inner = TREE_OPERAND (arg0, 0);
5847 tree type = TREE_TYPE (arg0);
5848 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5849 enum machine_mode operand_mode = TYPE_MODE (type);
5850 int ops_unsigned;
5851 tree signed_type, unsigned_type, intermediate_type;
5852 tree arg00;
5854 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5855 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5856 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5857 if (arg00 != NULL_TREE
5858 /* This is only a win if casting to a signed type is cheap,
5859 i.e. when arg00's type is not a partial mode. */
5860 && TYPE_PRECISION (TREE_TYPE (arg00))
5861 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5863 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5864 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5865 result_type, fold_convert (stype, arg00),
5866 fold_convert (stype, integer_zero_node)));
5869 /* Otherwise we have (A & C) != 0 where C is a single bit,
5870 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5871 Similarly for (A & C) == 0. */
5873 /* If INNER is a right shift of a constant and it plus BITNUM does
5874 not overflow, adjust BITNUM and INNER. */
5875 if (TREE_CODE (inner) == RSHIFT_EXPR
5876 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5877 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5878 && bitnum < TYPE_PRECISION (type)
5879 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5880 bitnum - TYPE_PRECISION (type)))
5882 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5883 inner = TREE_OPERAND (inner, 0);
5886 /* If we are going to be able to omit the AND below, we must do our
5887 operations as unsigned. If we must use the AND, we have a choice.
5888 Normally unsigned is faster, but for some machines signed is. */
5889 #ifdef LOAD_EXTEND_OP
5890 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5891 #else
5892 ops_unsigned = 1;
5893 #endif
5895 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5896 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5897 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5898 inner = fold_convert (intermediate_type, inner);
5900 if (bitnum != 0)
5901 inner = build2 (RSHIFT_EXPR, intermediate_type,
5902 inner, size_int (bitnum));
5904 if (code == EQ_EXPR)
5905 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5906 inner, integer_one_node));
5908 /* Put the AND last so it can combine with more things. */
5909 inner = build2 (BIT_AND_EXPR, intermediate_type,
5910 inner, integer_one_node);
5912 /* Make sure to return the proper type. */
5913 inner = fold_convert (result_type, inner);
5915 return inner;
5917 return NULL_TREE;
5920 /* Check whether we are allowed to reorder operands arg0 and arg1,
5921 such that the evaluation of arg1 occurs before arg0. */
5923 static bool
5924 reorder_operands_p (tree arg0, tree arg1)
5926 if (! flag_evaluation_order)
5927 return true;
5928 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5929 return true;
5930 return ! TREE_SIDE_EFFECTS (arg0)
5931 && ! TREE_SIDE_EFFECTS (arg1);
5934 /* Test whether it is preferable two swap two operands, ARG0 and
5935 ARG1, for example because ARG0 is an integer constant and ARG1
5936 isn't. If REORDER is true, only recommend swapping if we can
5937 evaluate the operands in reverse order. */
5939 bool
5940 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5942 STRIP_SIGN_NOPS (arg0);
5943 STRIP_SIGN_NOPS (arg1);
5945 if (TREE_CODE (arg1) == INTEGER_CST)
5946 return 0;
5947 if (TREE_CODE (arg0) == INTEGER_CST)
5948 return 1;
5950 if (TREE_CODE (arg1) == REAL_CST)
5951 return 0;
5952 if (TREE_CODE (arg0) == REAL_CST)
5953 return 1;
5955 if (TREE_CODE (arg1) == COMPLEX_CST)
5956 return 0;
5957 if (TREE_CODE (arg0) == COMPLEX_CST)
5958 return 1;
5960 if (TREE_CONSTANT (arg1))
5961 return 0;
5962 if (TREE_CONSTANT (arg0))
5963 return 1;
5965 if (optimize_size)
5966 return 0;
5968 if (reorder && flag_evaluation_order
5969 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5970 return 0;
5972 if (DECL_P (arg1))
5973 return 0;
5974 if (DECL_P (arg0))
5975 return 1;
5977 if (reorder && flag_evaluation_order
5978 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5979 return 0;
5981 if (DECL_P (arg1))
5982 return 0;
5983 if (DECL_P (arg0))
5984 return 1;
5986 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5987 for commutative and comparison operators. Ensuring a canonical
5988 form allows the optimizers to find additional redundancies without
5989 having to explicitly check for both orderings. */
5990 if (TREE_CODE (arg0) == SSA_NAME
5991 && TREE_CODE (arg1) == SSA_NAME
5992 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5993 return 1;
5995 return 0;
5998 /* Perform constant folding and related simplification of EXPR.
5999 The related simplifications include x*1 => x, x*0 => 0, etc.,
6000 and application of the associative law.
6001 NOP_EXPR conversions may be removed freely (as long as we
6002 are careful not to change the type of the overall expression).
6003 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
6004 but we can constant-fold them if they have constant operands. */
6006 #ifdef ENABLE_FOLD_CHECKING
6007 # define fold(x) fold_1 (x)
6008 static tree fold_1 (tree);
6009 static
6010 #endif
6011 tree
6012 fold (tree expr)
6014 const tree t = expr;
6015 const tree type = TREE_TYPE (expr);
6016 tree t1 = NULL_TREE;
6017 tree tem;
6018 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
6019 enum tree_code code = TREE_CODE (t);
6020 int kind = TREE_CODE_CLASS (code);
6022 /* WINS will be nonzero when the switch is done
6023 if all operands are constant. */
6024 int wins = 1;
6026 /* Return right away if a constant. */
6027 if (kind == 'c')
6028 return t;
6030 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6032 tree subop;
6034 /* Special case for conversion ops that can have fixed point args. */
6035 arg0 = TREE_OPERAND (t, 0);
6037 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6038 if (arg0 != 0)
6039 STRIP_SIGN_NOPS (arg0);
6041 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
6042 subop = TREE_REALPART (arg0);
6043 else
6044 subop = arg0;
6046 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
6047 && TREE_CODE (subop) != REAL_CST)
6048 /* Note that TREE_CONSTANT isn't enough:
6049 static var addresses are constant but we can't
6050 do arithmetic on them. */
6051 wins = 0;
6053 else if (IS_EXPR_CODE_CLASS (kind))
6055 int len = first_rtl_op (code);
6056 int i;
6057 for (i = 0; i < len; i++)
6059 tree op = TREE_OPERAND (t, i);
6060 tree subop;
6062 if (op == 0)
6063 continue; /* Valid for CALL_EXPR, at least. */
6065 /* Strip any conversions that don't change the mode. This is
6066 safe for every expression, except for a comparison expression
6067 because its signedness is derived from its operands. So, in
6068 the latter case, only strip conversions that don't change the
6069 signedness.
6071 Note that this is done as an internal manipulation within the
6072 constant folder, in order to find the simplest representation
6073 of the arguments so that their form can be studied. In any
6074 cases, the appropriate type conversions should be put back in
6075 the tree that will get out of the constant folder. */
6076 if (kind == '<')
6077 STRIP_SIGN_NOPS (op);
6078 else
6079 STRIP_NOPS (op);
6081 if (TREE_CODE (op) == COMPLEX_CST)
6082 subop = TREE_REALPART (op);
6083 else
6084 subop = op;
6086 if (TREE_CODE (subop) != INTEGER_CST
6087 && TREE_CODE (subop) != REAL_CST)
6088 /* Note that TREE_CONSTANT isn't enough:
6089 static var addresses are constant but we can't
6090 do arithmetic on them. */
6091 wins = 0;
6093 if (i == 0)
6094 arg0 = op;
6095 else if (i == 1)
6096 arg1 = op;
6100 /* If this is a commutative operation, and ARG0 is a constant, move it
6101 to ARG1 to reduce the number of tests below. */
6102 if (commutative_tree_code (code)
6103 && tree_swap_operands_p (arg0, arg1, true))
6104 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6105 TREE_OPERAND (t, 0)));
6107 /* Now WINS is set as described above,
6108 ARG0 is the first operand of EXPR,
6109 and ARG1 is the second operand (if it has more than one operand).
6111 First check for cases where an arithmetic operation is applied to a
6112 compound, conditional, or comparison operation. Push the arithmetic
6113 operation inside the compound or conditional to see if any folding
6114 can then be done. Convert comparison to conditional for this purpose.
6115 The also optimizes non-constant cases that used to be done in
6116 expand_expr.
6118 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6119 one of the operands is a comparison and the other is a comparison, a
6120 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6121 code below would make the expression more complex. Change it to a
6122 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6123 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6125 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6126 || code == EQ_EXPR || code == NE_EXPR)
6127 && ((truth_value_p (TREE_CODE (arg0))
6128 && (truth_value_p (TREE_CODE (arg1))
6129 || (TREE_CODE (arg1) == BIT_AND_EXPR
6130 && integer_onep (TREE_OPERAND (arg1, 1)))))
6131 || (truth_value_p (TREE_CODE (arg1))
6132 && (truth_value_p (TREE_CODE (arg0))
6133 || (TREE_CODE (arg0) == BIT_AND_EXPR
6134 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6136 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6137 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6138 : TRUTH_XOR_EXPR,
6139 type, fold_convert (boolean_type_node, arg0),
6140 fold_convert (boolean_type_node, arg1)));
6142 if (code == EQ_EXPR)
6143 tem = invert_truthvalue (tem);
6145 return tem;
6148 if (TREE_CODE_CLASS (code) == '1')
6150 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6151 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6152 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6153 else if (TREE_CODE (arg0) == COND_EXPR)
6155 tree arg01 = TREE_OPERAND (arg0, 1);
6156 tree arg02 = TREE_OPERAND (arg0, 2);
6157 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6158 arg01 = fold (build1 (code, type, arg01));
6159 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6160 arg02 = fold (build1 (code, type, arg02));
6161 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6162 arg01, arg02));
6164 /* If this was a conversion, and all we did was to move into
6165 inside the COND_EXPR, bring it back out. But leave it if
6166 it is a conversion from integer to integer and the
6167 result precision is no wider than a word since such a
6168 conversion is cheap and may be optimized away by combine,
6169 while it couldn't if it were outside the COND_EXPR. Then return
6170 so we don't get into an infinite recursion loop taking the
6171 conversion out and then back in. */
6173 if ((code == NOP_EXPR || code == CONVERT_EXPR
6174 || code == NON_LVALUE_EXPR)
6175 && TREE_CODE (tem) == COND_EXPR
6176 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6177 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6178 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6179 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6180 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6181 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6182 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6183 && (INTEGRAL_TYPE_P
6184 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6185 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
6186 tem = build1 (code, type,
6187 build3 (COND_EXPR,
6188 TREE_TYPE (TREE_OPERAND
6189 (TREE_OPERAND (tem, 1), 0)),
6190 TREE_OPERAND (tem, 0),
6191 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6192 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6193 return tem;
6195 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6197 if (TREE_CODE (type) == BOOLEAN_TYPE)
6199 arg0 = copy_node (arg0);
6200 TREE_TYPE (arg0) = type;
6201 return arg0;
6203 else if (TREE_CODE (type) != INTEGER_TYPE)
6204 return fold (build3 (COND_EXPR, type, arg0,
6205 fold (build1 (code, type,
6206 integer_one_node)),
6207 fold (build1 (code, type,
6208 integer_zero_node))));
6211 else if (TREE_CODE_CLASS (code) == '<'
6212 && TREE_CODE (arg0) == COMPOUND_EXPR)
6213 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6214 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6215 else if (TREE_CODE_CLASS (code) == '<'
6216 && TREE_CODE (arg1) == COMPOUND_EXPR)
6217 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6218 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6219 else if (TREE_CODE_CLASS (code) == '2'
6220 || TREE_CODE_CLASS (code) == '<')
6222 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6223 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6224 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6225 arg1)));
6226 if (TREE_CODE (arg1) == COMPOUND_EXPR
6227 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6228 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6229 fold (build2 (code, type,
6230 arg0, TREE_OPERAND (arg1, 1))));
6232 if (TREE_CODE (arg0) == COND_EXPR
6233 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6235 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
6236 /*cond_first_p=*/1);
6237 if (tem != NULL_TREE)
6238 return tem;
6241 if (TREE_CODE (arg1) == COND_EXPR
6242 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
6244 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
6245 /*cond_first_p=*/0);
6246 if (tem != NULL_TREE)
6247 return tem;
6251 switch (code)
6253 case CONST_DECL:
6254 return fold (DECL_INITIAL (t));
6256 case NOP_EXPR:
6257 case FLOAT_EXPR:
6258 case CONVERT_EXPR:
6259 case FIX_TRUNC_EXPR:
6260 case FIX_CEIL_EXPR:
6261 case FIX_FLOOR_EXPR:
6262 case FIX_ROUND_EXPR:
6263 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6264 return TREE_OPERAND (t, 0);
6266 /* Handle cases of two conversions in a row. */
6267 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6268 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6270 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6271 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6272 int inside_int = INTEGRAL_TYPE_P (inside_type);
6273 int inside_ptr = POINTER_TYPE_P (inside_type);
6274 int inside_float = FLOAT_TYPE_P (inside_type);
6275 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6276 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6277 int inter_int = INTEGRAL_TYPE_P (inter_type);
6278 int inter_ptr = POINTER_TYPE_P (inter_type);
6279 int inter_float = FLOAT_TYPE_P (inter_type);
6280 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6281 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6282 int final_int = INTEGRAL_TYPE_P (type);
6283 int final_ptr = POINTER_TYPE_P (type);
6284 int final_float = FLOAT_TYPE_P (type);
6285 unsigned int final_prec = TYPE_PRECISION (type);
6286 int final_unsignedp = TYPE_UNSIGNED (type);
6288 /* In addition to the cases of two conversions in a row
6289 handled below, if we are converting something to its own
6290 type via an object of identical or wider precision, neither
6291 conversion is needed. */
6292 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6293 && ((inter_int && final_int) || (inter_float && final_float))
6294 && inter_prec >= final_prec)
6295 return fold (build1 (code, type,
6296 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6298 /* Likewise, if the intermediate and final types are either both
6299 float or both integer, we don't need the middle conversion if
6300 it is wider than the final type and doesn't change the signedness
6301 (for integers). Avoid this if the final type is a pointer
6302 since then we sometimes need the inner conversion. Likewise if
6303 the outer has a precision not equal to the size of its mode. */
6304 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6305 || (inter_float && inside_float))
6306 && inter_prec >= inside_prec
6307 && (inter_float || inter_unsignedp == inside_unsignedp)
6308 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6309 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6310 && ! final_ptr)
6311 return fold (build1 (code, type,
6312 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6314 /* If we have a sign-extension of a zero-extended value, we can
6315 replace that by a single zero-extension. */
6316 if (inside_int && inter_int && final_int
6317 && inside_prec < inter_prec && inter_prec < final_prec
6318 && inside_unsignedp && !inter_unsignedp)
6319 return fold (build1 (code, type,
6320 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6322 /* Two conversions in a row are not needed unless:
6323 - some conversion is floating-point (overstrict for now), or
6324 - the intermediate type is narrower than both initial and
6325 final, or
6326 - the intermediate type and innermost type differ in signedness,
6327 and the outermost type is wider than the intermediate, or
6328 - the initial type is a pointer type and the precisions of the
6329 intermediate and final types differ, or
6330 - the final type is a pointer type and the precisions of the
6331 initial and intermediate types differ. */
6332 if (! inside_float && ! inter_float && ! final_float
6333 && (inter_prec > inside_prec || inter_prec > final_prec)
6334 && ! (inside_int && inter_int
6335 && inter_unsignedp != inside_unsignedp
6336 && inter_prec < final_prec)
6337 && ((inter_unsignedp && inter_prec > inside_prec)
6338 == (final_unsignedp && final_prec > inter_prec))
6339 && ! (inside_ptr && inter_prec != final_prec)
6340 && ! (final_ptr && inside_prec != inter_prec)
6341 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6342 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6343 && ! final_ptr)
6344 return fold (build1 (code, type,
6345 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6348 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6349 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6350 /* Detect assigning a bitfield. */
6351 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6352 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6354 /* Don't leave an assignment inside a conversion
6355 unless assigning a bitfield. */
6356 tree prev = TREE_OPERAND (t, 0);
6357 tem = copy_node (t);
6358 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6359 /* First do the assignment, then return converted constant. */
6360 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6361 TREE_NO_WARNING (tem) = 1;
6362 TREE_USED (tem) = 1;
6363 return tem;
6366 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6367 constants (if x has signed type, the sign bit cannot be set
6368 in c). This folds extension into the BIT_AND_EXPR. */
6369 if (INTEGRAL_TYPE_P (type)
6370 && TREE_CODE (type) != BOOLEAN_TYPE
6371 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6372 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6374 tree and = TREE_OPERAND (t, 0);
6375 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6376 int change = 0;
6378 if (TYPE_UNSIGNED (TREE_TYPE (and))
6379 || (TYPE_PRECISION (type)
6380 <= TYPE_PRECISION (TREE_TYPE (and))))
6381 change = 1;
6382 else if (TYPE_PRECISION (TREE_TYPE (and1))
6383 <= HOST_BITS_PER_WIDE_INT
6384 && host_integerp (and1, 1))
6386 unsigned HOST_WIDE_INT cst;
6388 cst = tree_low_cst (and1, 1);
6389 cst &= (HOST_WIDE_INT) -1
6390 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6391 change = (cst == 0);
6392 #ifdef LOAD_EXTEND_OP
6393 if (change
6394 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6395 == ZERO_EXTEND))
6397 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6398 and0 = fold_convert (uns, and0);
6399 and1 = fold_convert (uns, and1);
6401 #endif
6403 if (change)
6404 return fold (build2 (BIT_AND_EXPR, type,
6405 fold_convert (type, and0),
6406 fold_convert (type, and1)));
6409 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6410 T2 being pointers to types of the same size. */
6411 if (POINTER_TYPE_P (TREE_TYPE (t))
6412 && TREE_CODE_CLASS (TREE_CODE (arg0)) == '2'
6413 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6414 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6416 tree arg00 = TREE_OPERAND (arg0, 0);
6417 tree t0 = TREE_TYPE (t);
6418 tree t1 = TREE_TYPE (arg00);
6419 tree tt0 = TREE_TYPE (t0);
6420 tree tt1 = TREE_TYPE (t1);
6421 tree s0 = TYPE_SIZE (tt0);
6422 tree s1 = TYPE_SIZE (tt1);
6424 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6425 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6426 TREE_OPERAND (arg0, 1));
6429 tem = fold_convert_const (code, type, arg0);
6430 return tem ? tem : t;
6432 case VIEW_CONVERT_EXPR:
6433 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6434 return build1 (VIEW_CONVERT_EXPR, type,
6435 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6436 return t;
6438 case COMPONENT_REF:
6439 if (TREE_CODE (arg0) == CONSTRUCTOR
6440 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6442 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6443 if (m)
6444 return TREE_VALUE (m);
6446 return t;
6448 case RANGE_EXPR:
6449 if (TREE_CONSTANT (t) != wins)
6451 tem = copy_node (t);
6452 TREE_CONSTANT (tem) = wins;
6453 TREE_INVARIANT (tem) = wins;
6454 return tem;
6456 return t;
6458 case NEGATE_EXPR:
6459 if (negate_expr_p (arg0))
6460 return fold_convert (type, negate_expr (arg0));
6461 return t;
6463 case ABS_EXPR:
6464 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6465 return fold_abs_const (arg0, type);
6466 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6467 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6468 /* Convert fabs((double)float) into (double)fabsf(float). */
6469 else if (TREE_CODE (arg0) == NOP_EXPR
6470 && TREE_CODE (type) == REAL_TYPE)
6472 tree targ0 = strip_float_extensions (arg0);
6473 if (targ0 != arg0)
6474 return fold_convert (type, fold (build1 (ABS_EXPR,
6475 TREE_TYPE (targ0),
6476 targ0)));
6478 else if (tree_expr_nonnegative_p (arg0))
6479 return arg0;
6480 return t;
6482 case CONJ_EXPR:
6483 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6484 return fold_convert (type, arg0);
6485 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6486 return build2 (COMPLEX_EXPR, type,
6487 TREE_OPERAND (arg0, 0),
6488 negate_expr (TREE_OPERAND (arg0, 1)));
6489 else if (TREE_CODE (arg0) == COMPLEX_CST)
6490 return build_complex (type, TREE_REALPART (arg0),
6491 negate_expr (TREE_IMAGPART (arg0)));
6492 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6493 return fold (build2 (TREE_CODE (arg0), type,
6494 fold (build1 (CONJ_EXPR, type,
6495 TREE_OPERAND (arg0, 0))),
6496 fold (build1 (CONJ_EXPR, type,
6497 TREE_OPERAND (arg0, 1)))));
6498 else if (TREE_CODE (arg0) == CONJ_EXPR)
6499 return TREE_OPERAND (arg0, 0);
6500 return t;
6502 case BIT_NOT_EXPR:
6503 if (TREE_CODE (arg0) == INTEGER_CST)
6504 return fold_not_const (arg0, type);
6505 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6506 return TREE_OPERAND (arg0, 0);
6507 return t;
6509 case PLUS_EXPR:
6510 /* A + (-B) -> A - B */
6511 if (TREE_CODE (arg1) == NEGATE_EXPR)
6512 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6513 /* (-A) + B -> B - A */
6514 if (TREE_CODE (arg0) == NEGATE_EXPR
6515 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6516 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6517 if (! FLOAT_TYPE_P (type))
6519 if (integer_zerop (arg1))
6520 return non_lvalue (fold_convert (type, arg0));
6522 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6523 with a constant, and the two constants have no bits in common,
6524 we should treat this as a BIT_IOR_EXPR since this may produce more
6525 simplifications. */
6526 if (TREE_CODE (arg0) == BIT_AND_EXPR
6527 && TREE_CODE (arg1) == BIT_AND_EXPR
6528 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6529 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6530 && integer_zerop (const_binop (BIT_AND_EXPR,
6531 TREE_OPERAND (arg0, 1),
6532 TREE_OPERAND (arg1, 1), 0)))
6534 code = BIT_IOR_EXPR;
6535 goto bit_ior;
6538 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6539 (plus (plus (mult) (mult)) (foo)) so that we can
6540 take advantage of the factoring cases below. */
6541 if ((TREE_CODE (arg0) == PLUS_EXPR
6542 && TREE_CODE (arg1) == MULT_EXPR)
6543 || (TREE_CODE (arg1) == PLUS_EXPR
6544 && TREE_CODE (arg0) == MULT_EXPR))
6546 tree parg0, parg1, parg, marg;
6548 if (TREE_CODE (arg0) == PLUS_EXPR)
6549 parg = arg0, marg = arg1;
6550 else
6551 parg = arg1, marg = arg0;
6552 parg0 = TREE_OPERAND (parg, 0);
6553 parg1 = TREE_OPERAND (parg, 1);
6554 STRIP_NOPS (parg0);
6555 STRIP_NOPS (parg1);
6557 if (TREE_CODE (parg0) == MULT_EXPR
6558 && TREE_CODE (parg1) != MULT_EXPR)
6559 return fold (build2 (PLUS_EXPR, type,
6560 fold (build2 (PLUS_EXPR, type,
6561 fold_convert (type, parg0),
6562 fold_convert (type, marg))),
6563 fold_convert (type, parg1)));
6564 if (TREE_CODE (parg0) != MULT_EXPR
6565 && TREE_CODE (parg1) == MULT_EXPR)
6566 return fold (build2 (PLUS_EXPR, type,
6567 fold (build2 (PLUS_EXPR, type,
6568 fold_convert (type, parg1),
6569 fold_convert (type, marg))),
6570 fold_convert (type, parg0)));
6573 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6575 tree arg00, arg01, arg10, arg11;
6576 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6578 /* (A * C) + (B * C) -> (A+B) * C.
6579 We are most concerned about the case where C is a constant,
6580 but other combinations show up during loop reduction. Since
6581 it is not difficult, try all four possibilities. */
6583 arg00 = TREE_OPERAND (arg0, 0);
6584 arg01 = TREE_OPERAND (arg0, 1);
6585 arg10 = TREE_OPERAND (arg1, 0);
6586 arg11 = TREE_OPERAND (arg1, 1);
6587 same = NULL_TREE;
6589 if (operand_equal_p (arg01, arg11, 0))
6590 same = arg01, alt0 = arg00, alt1 = arg10;
6591 else if (operand_equal_p (arg00, arg10, 0))
6592 same = arg00, alt0 = arg01, alt1 = arg11;
6593 else if (operand_equal_p (arg00, arg11, 0))
6594 same = arg00, alt0 = arg01, alt1 = arg10;
6595 else if (operand_equal_p (arg01, arg10, 0))
6596 same = arg01, alt0 = arg00, alt1 = arg11;
6598 /* No identical multiplicands; see if we can find a common
6599 power-of-two factor in non-power-of-two multiplies. This
6600 can help in multi-dimensional array access. */
6601 else if (TREE_CODE (arg01) == INTEGER_CST
6602 && TREE_CODE (arg11) == INTEGER_CST
6603 && TREE_INT_CST_HIGH (arg01) == 0
6604 && TREE_INT_CST_HIGH (arg11) == 0)
6606 HOST_WIDE_INT int01, int11, tmp;
6607 int01 = TREE_INT_CST_LOW (arg01);
6608 int11 = TREE_INT_CST_LOW (arg11);
6610 /* Move min of absolute values to int11. */
6611 if ((int01 >= 0 ? int01 : -int01)
6612 < (int11 >= 0 ? int11 : -int11))
6614 tmp = int01, int01 = int11, int11 = tmp;
6615 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6616 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6619 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6621 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6622 build_int_cst (NULL_TREE,
6623 int01 / int11)));
6624 alt1 = arg10;
6625 same = arg11;
6629 if (same)
6630 return fold (build2 (MULT_EXPR, type,
6631 fold (build2 (PLUS_EXPR, type,
6632 alt0, alt1)),
6633 same));
6636 else
6638 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6639 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6640 return non_lvalue (fold_convert (type, arg0));
6642 /* Likewise if the operands are reversed. */
6643 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6644 return non_lvalue (fold_convert (type, arg1));
6646 /* Convert X + -C into X - C. */
6647 if (TREE_CODE (arg1) == REAL_CST
6648 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
6650 tem = fold_negate_const (arg1, type);
6651 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
6652 return fold (build2 (MINUS_EXPR, type,
6653 fold_convert (type, arg0),
6654 fold_convert (type, tem)));
6657 /* Convert x+x into x*2.0. */
6658 if (operand_equal_p (arg0, arg1, 0)
6659 && SCALAR_FLOAT_TYPE_P (type))
6660 return fold (build2 (MULT_EXPR, type, arg0,
6661 build_real (type, dconst2)));
6663 /* Convert x*c+x into x*(c+1). */
6664 if (flag_unsafe_math_optimizations
6665 && TREE_CODE (arg0) == MULT_EXPR
6666 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6667 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6668 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6670 REAL_VALUE_TYPE c;
6672 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6673 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6674 return fold (build2 (MULT_EXPR, type, arg1,
6675 build_real (type, c)));
6678 /* Convert x+x*c into x*(c+1). */
6679 if (flag_unsafe_math_optimizations
6680 && TREE_CODE (arg1) == MULT_EXPR
6681 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6682 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6683 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6685 REAL_VALUE_TYPE c;
6687 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6688 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6689 return fold (build2 (MULT_EXPR, type, arg0,
6690 build_real (type, c)));
6693 /* Convert x*c1+x*c2 into x*(c1+c2). */
6694 if (flag_unsafe_math_optimizations
6695 && TREE_CODE (arg0) == MULT_EXPR
6696 && TREE_CODE (arg1) == MULT_EXPR
6697 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6698 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6699 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6700 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6701 && operand_equal_p (TREE_OPERAND (arg0, 0),
6702 TREE_OPERAND (arg1, 0), 0))
6704 REAL_VALUE_TYPE c1, c2;
6706 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6707 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6708 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6709 return fold (build2 (MULT_EXPR, type,
6710 TREE_OPERAND (arg0, 0),
6711 build_real (type, c1)));
6713 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6714 if (flag_unsafe_math_optimizations
6715 && TREE_CODE (arg1) == PLUS_EXPR
6716 && TREE_CODE (arg0) != MULT_EXPR)
6718 tree tree10 = TREE_OPERAND (arg1, 0);
6719 tree tree11 = TREE_OPERAND (arg1, 1);
6720 if (TREE_CODE (tree11) == MULT_EXPR
6721 && TREE_CODE (tree10) == MULT_EXPR)
6723 tree tree0;
6724 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6725 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6728 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6729 if (flag_unsafe_math_optimizations
6730 && TREE_CODE (arg0) == PLUS_EXPR
6731 && TREE_CODE (arg1) != MULT_EXPR)
6733 tree tree00 = TREE_OPERAND (arg0, 0);
6734 tree tree01 = TREE_OPERAND (arg0, 1);
6735 if (TREE_CODE (tree01) == MULT_EXPR
6736 && TREE_CODE (tree00) == MULT_EXPR)
6738 tree tree0;
6739 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6740 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6745 bit_rotate:
6746 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6747 is a rotate of A by C1 bits. */
6748 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6749 is a rotate of A by B bits. */
6751 enum tree_code code0, code1;
6752 code0 = TREE_CODE (arg0);
6753 code1 = TREE_CODE (arg1);
6754 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6755 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6756 && operand_equal_p (TREE_OPERAND (arg0, 0),
6757 TREE_OPERAND (arg1, 0), 0)
6758 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6760 tree tree01, tree11;
6761 enum tree_code code01, code11;
6763 tree01 = TREE_OPERAND (arg0, 1);
6764 tree11 = TREE_OPERAND (arg1, 1);
6765 STRIP_NOPS (tree01);
6766 STRIP_NOPS (tree11);
6767 code01 = TREE_CODE (tree01);
6768 code11 = TREE_CODE (tree11);
6769 if (code01 == INTEGER_CST
6770 && code11 == INTEGER_CST
6771 && TREE_INT_CST_HIGH (tree01) == 0
6772 && TREE_INT_CST_HIGH (tree11) == 0
6773 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6774 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6775 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6776 code0 == LSHIFT_EXPR ? tree01 : tree11);
6777 else if (code11 == MINUS_EXPR)
6779 tree tree110, tree111;
6780 tree110 = TREE_OPERAND (tree11, 0);
6781 tree111 = TREE_OPERAND (tree11, 1);
6782 STRIP_NOPS (tree110);
6783 STRIP_NOPS (tree111);
6784 if (TREE_CODE (tree110) == INTEGER_CST
6785 && 0 == compare_tree_int (tree110,
6786 TYPE_PRECISION
6787 (TREE_TYPE (TREE_OPERAND
6788 (arg0, 0))))
6789 && operand_equal_p (tree01, tree111, 0))
6790 return build2 ((code0 == LSHIFT_EXPR
6791 ? LROTATE_EXPR
6792 : RROTATE_EXPR),
6793 type, TREE_OPERAND (arg0, 0), tree01);
6795 else if (code01 == MINUS_EXPR)
6797 tree tree010, tree011;
6798 tree010 = TREE_OPERAND (tree01, 0);
6799 tree011 = TREE_OPERAND (tree01, 1);
6800 STRIP_NOPS (tree010);
6801 STRIP_NOPS (tree011);
6802 if (TREE_CODE (tree010) == INTEGER_CST
6803 && 0 == compare_tree_int (tree010,
6804 TYPE_PRECISION
6805 (TREE_TYPE (TREE_OPERAND
6806 (arg0, 0))))
6807 && operand_equal_p (tree11, tree011, 0))
6808 return build2 ((code0 != LSHIFT_EXPR
6809 ? LROTATE_EXPR
6810 : RROTATE_EXPR),
6811 type, TREE_OPERAND (arg0, 0), tree11);
6816 associate:
6817 /* In most languages, can't associate operations on floats through
6818 parentheses. Rather than remember where the parentheses were, we
6819 don't associate floats at all, unless the user has specified
6820 -funsafe-math-optimizations. */
6822 if (! wins
6823 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6825 tree var0, con0, lit0, minus_lit0;
6826 tree var1, con1, lit1, minus_lit1;
6828 /* Split both trees into variables, constants, and literals. Then
6829 associate each group together, the constants with literals,
6830 then the result with variables. This increases the chances of
6831 literals being recombined later and of generating relocatable
6832 expressions for the sum of a constant and literal. */
6833 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6834 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6835 code == MINUS_EXPR);
6837 /* Only do something if we found more than two objects. Otherwise,
6838 nothing has changed and we risk infinite recursion. */
6839 if (2 < ((var0 != 0) + (var1 != 0)
6840 + (con0 != 0) + (con1 != 0)
6841 + (lit0 != 0) + (lit1 != 0)
6842 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6844 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6845 if (code == MINUS_EXPR)
6846 code = PLUS_EXPR;
6848 var0 = associate_trees (var0, var1, code, type);
6849 con0 = associate_trees (con0, con1, code, type);
6850 lit0 = associate_trees (lit0, lit1, code, type);
6851 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6853 /* Preserve the MINUS_EXPR if the negative part of the literal is
6854 greater than the positive part. Otherwise, the multiplicative
6855 folding code (i.e extract_muldiv) may be fooled in case
6856 unsigned constants are subtracted, like in the following
6857 example: ((X*2 + 4) - 8U)/2. */
6858 if (minus_lit0 && lit0)
6860 if (TREE_CODE (lit0) == INTEGER_CST
6861 && TREE_CODE (minus_lit0) == INTEGER_CST
6862 && tree_int_cst_lt (lit0, minus_lit0))
6864 minus_lit0 = associate_trees (minus_lit0, lit0,
6865 MINUS_EXPR, type);
6866 lit0 = 0;
6868 else
6870 lit0 = associate_trees (lit0, minus_lit0,
6871 MINUS_EXPR, type);
6872 minus_lit0 = 0;
6875 if (minus_lit0)
6877 if (con0 == 0)
6878 return fold_convert (type,
6879 associate_trees (var0, minus_lit0,
6880 MINUS_EXPR, type));
6881 else
6883 con0 = associate_trees (con0, minus_lit0,
6884 MINUS_EXPR, type);
6885 return fold_convert (type,
6886 associate_trees (var0, con0,
6887 PLUS_EXPR, type));
6891 con0 = associate_trees (con0, lit0, code, type);
6892 return fold_convert (type, associate_trees (var0, con0,
6893 code, type));
6897 binary:
6898 if (wins)
6899 t1 = const_binop (code, arg0, arg1, 0);
6900 if (t1 != NULL_TREE)
6902 /* The return value should always have
6903 the same type as the original expression. */
6904 if (TREE_TYPE (t1) != type)
6905 t1 = fold_convert (type, t1);
6907 return t1;
6909 return t;
6911 case MINUS_EXPR:
6912 /* A - (-B) -> A + B */
6913 if (TREE_CODE (arg1) == NEGATE_EXPR)
6914 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6915 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6916 if (TREE_CODE (arg0) == NEGATE_EXPR
6917 && (FLOAT_TYPE_P (type)
6918 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6919 && negate_expr_p (arg1)
6920 && reorder_operands_p (arg0, arg1))
6921 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
6922 TREE_OPERAND (arg0, 0)));
6924 if (! FLOAT_TYPE_P (type))
6926 if (! wins && integer_zerop (arg0))
6927 return negate_expr (fold_convert (type, arg1));
6928 if (integer_zerop (arg1))
6929 return non_lvalue (fold_convert (type, arg0));
6931 /* Fold A - (A & B) into ~B & A. */
6932 if (!TREE_SIDE_EFFECTS (arg0)
6933 && TREE_CODE (arg1) == BIT_AND_EXPR)
6935 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6936 return fold (build2 (BIT_AND_EXPR, type,
6937 fold (build1 (BIT_NOT_EXPR, type,
6938 TREE_OPERAND (arg1, 0))),
6939 arg0));
6940 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6941 return fold (build2 (BIT_AND_EXPR, type,
6942 fold (build1 (BIT_NOT_EXPR, type,
6943 TREE_OPERAND (arg1, 1))),
6944 arg0));
6947 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6948 any power of 2 minus 1. */
6949 if (TREE_CODE (arg0) == BIT_AND_EXPR
6950 && TREE_CODE (arg1) == BIT_AND_EXPR
6951 && operand_equal_p (TREE_OPERAND (arg0, 0),
6952 TREE_OPERAND (arg1, 0), 0))
6954 tree mask0 = TREE_OPERAND (arg0, 1);
6955 tree mask1 = TREE_OPERAND (arg1, 1);
6956 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6958 if (operand_equal_p (tem, mask1, 0))
6960 tem = fold (build2 (BIT_XOR_EXPR, type,
6961 TREE_OPERAND (arg0, 0), mask1));
6962 return fold (build2 (MINUS_EXPR, type, tem, mask1));
6967 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6968 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6969 return non_lvalue (fold_convert (type, arg0));
6971 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6972 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6973 (-ARG1 + ARG0) reduces to -ARG1. */
6974 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6975 return negate_expr (fold_convert (type, arg1));
6977 /* Fold &x - &x. This can happen from &x.foo - &x.
6978 This is unsafe for certain floats even in non-IEEE formats.
6979 In IEEE, it is unsafe because it does wrong for NaNs.
6980 Also note that operand_equal_p is always false if an operand
6981 is volatile. */
6983 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6984 && operand_equal_p (arg0, arg1, 0))
6985 return fold_convert (type, integer_zero_node);
6987 /* A - B -> A + (-B) if B is easily negatable. */
6988 if (!wins && negate_expr_p (arg1)
6989 && ((FLOAT_TYPE_P (type)
6990 /* Avoid this transformation if B is a positive REAL_CST. */
6991 && (TREE_CODE (arg1) != REAL_CST
6992 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
6993 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6994 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6996 if (TREE_CODE (arg0) == MULT_EXPR
6997 && TREE_CODE (arg1) == MULT_EXPR
6998 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
7000 /* (A * C) - (B * C) -> (A-B) * C. */
7001 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7002 TREE_OPERAND (arg1, 1), 0))
7003 return fold (build2 (MULT_EXPR, type,
7004 fold (build2 (MINUS_EXPR, type,
7005 TREE_OPERAND (arg0, 0),
7006 TREE_OPERAND (arg1, 0))),
7007 TREE_OPERAND (arg0, 1)));
7008 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7009 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7010 TREE_OPERAND (arg1, 0), 0))
7011 return fold (build2 (MULT_EXPR, type,
7012 TREE_OPERAND (arg0, 0),
7013 fold (build2 (MINUS_EXPR, type,
7014 TREE_OPERAND (arg0, 1),
7015 TREE_OPERAND (arg1, 1)))));
7018 goto associate;
7020 case MULT_EXPR:
7021 /* (-A) * (-B) -> A * B */
7022 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7023 return fold (build2 (MULT_EXPR, type,
7024 TREE_OPERAND (arg0, 0),
7025 negate_expr (arg1)));
7026 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7027 return fold (build2 (MULT_EXPR, type,
7028 negate_expr (arg0),
7029 TREE_OPERAND (arg1, 0)));
7031 if (! FLOAT_TYPE_P (type))
7033 if (integer_zerop (arg1))
7034 return omit_one_operand (type, arg1, arg0);
7035 if (integer_onep (arg1))
7036 return non_lvalue (fold_convert (type, arg0));
7038 /* (a * (1 << b)) is (a << b) */
7039 if (TREE_CODE (arg1) == LSHIFT_EXPR
7040 && integer_onep (TREE_OPERAND (arg1, 0)))
7041 return fold (build2 (LSHIFT_EXPR, type, arg0,
7042 TREE_OPERAND (arg1, 1)));
7043 if (TREE_CODE (arg0) == LSHIFT_EXPR
7044 && integer_onep (TREE_OPERAND (arg0, 0)))
7045 return fold (build2 (LSHIFT_EXPR, type, arg1,
7046 TREE_OPERAND (arg0, 1)));
7048 if (TREE_CODE (arg1) == INTEGER_CST
7049 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
7050 fold_convert (type, arg1),
7051 code, NULL_TREE)))
7052 return fold_convert (type, tem);
7055 else
7057 /* Maybe fold x * 0 to 0. The expressions aren't the same
7058 when x is NaN, since x * 0 is also NaN. Nor are they the
7059 same in modes with signed zeros, since multiplying a
7060 negative value by 0 gives -0, not +0. */
7061 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7062 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7063 && real_zerop (arg1))
7064 return omit_one_operand (type, arg1, arg0);
7065 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7066 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7067 && real_onep (arg1))
7068 return non_lvalue (fold_convert (type, arg0));
7070 /* Transform x * -1.0 into -x. */
7071 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7072 && real_minus_onep (arg1))
7073 return fold_convert (type, negate_expr (arg0));
7075 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7076 if (flag_unsafe_math_optimizations
7077 && TREE_CODE (arg0) == RDIV_EXPR
7078 && TREE_CODE (arg1) == REAL_CST
7079 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7081 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7082 arg1, 0);
7083 if (tem)
7084 return fold (build2 (RDIV_EXPR, type, tem,
7085 TREE_OPERAND (arg0, 1)));
7088 if (flag_unsafe_math_optimizations)
7090 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7091 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7093 /* Optimizations of root(...)*root(...). */
7094 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7096 tree rootfn, arg, arglist;
7097 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7098 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7100 /* Optimize sqrt(x)*sqrt(x) as x. */
7101 if (BUILTIN_SQRT_P (fcode0)
7102 && operand_equal_p (arg00, arg10, 0)
7103 && ! HONOR_SNANS (TYPE_MODE (type)))
7104 return arg00;
7106 /* Optimize root(x)*root(y) as root(x*y). */
7107 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7108 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7109 arglist = build_tree_list (NULL_TREE, arg);
7110 return build_function_call_expr (rootfn, arglist);
7113 /* Optimize expN(x)*expN(y) as expN(x+y). */
7114 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7116 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7117 tree arg = build2 (PLUS_EXPR, type,
7118 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7119 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7120 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7121 return build_function_call_expr (expfn, arglist);
7124 /* Optimizations of pow(...)*pow(...). */
7125 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7126 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7127 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7129 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7130 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7131 1)));
7132 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7133 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7134 1)));
7136 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7137 if (operand_equal_p (arg01, arg11, 0))
7139 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7140 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7141 tree arglist = tree_cons (NULL_TREE, fold (arg),
7142 build_tree_list (NULL_TREE,
7143 arg01));
7144 return build_function_call_expr (powfn, arglist);
7147 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7148 if (operand_equal_p (arg00, arg10, 0))
7150 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7151 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7152 tree arglist = tree_cons (NULL_TREE, arg00,
7153 build_tree_list (NULL_TREE,
7154 arg));
7155 return build_function_call_expr (powfn, arglist);
7159 /* Optimize tan(x)*cos(x) as sin(x). */
7160 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7161 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7162 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7163 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7164 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7165 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7166 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7167 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7169 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7171 if (sinfn != NULL_TREE)
7172 return build_function_call_expr (sinfn,
7173 TREE_OPERAND (arg0, 1));
7176 /* Optimize x*pow(x,c) as pow(x,c+1). */
7177 if (fcode1 == BUILT_IN_POW
7178 || fcode1 == BUILT_IN_POWF
7179 || fcode1 == BUILT_IN_POWL)
7181 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7182 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7183 1)));
7184 if (TREE_CODE (arg11) == REAL_CST
7185 && ! TREE_CONSTANT_OVERFLOW (arg11)
7186 && operand_equal_p (arg0, arg10, 0))
7188 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7189 REAL_VALUE_TYPE c;
7190 tree arg, arglist;
7192 c = TREE_REAL_CST (arg11);
7193 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7194 arg = build_real (type, c);
7195 arglist = build_tree_list (NULL_TREE, arg);
7196 arglist = tree_cons (NULL_TREE, arg0, arglist);
7197 return build_function_call_expr (powfn, arglist);
7201 /* Optimize pow(x,c)*x as pow(x,c+1). */
7202 if (fcode0 == BUILT_IN_POW
7203 || fcode0 == BUILT_IN_POWF
7204 || fcode0 == BUILT_IN_POWL)
7206 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7207 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7208 1)));
7209 if (TREE_CODE (arg01) == REAL_CST
7210 && ! TREE_CONSTANT_OVERFLOW (arg01)
7211 && operand_equal_p (arg1, arg00, 0))
7213 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7214 REAL_VALUE_TYPE c;
7215 tree arg, arglist;
7217 c = TREE_REAL_CST (arg01);
7218 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7219 arg = build_real (type, c);
7220 arglist = build_tree_list (NULL_TREE, arg);
7221 arglist = tree_cons (NULL_TREE, arg1, arglist);
7222 return build_function_call_expr (powfn, arglist);
7226 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7227 if (! optimize_size
7228 && operand_equal_p (arg0, arg1, 0))
7230 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7232 if (powfn)
7234 tree arg = build_real (type, dconst2);
7235 tree arglist = build_tree_list (NULL_TREE, arg);
7236 arglist = tree_cons (NULL_TREE, arg0, arglist);
7237 return build_function_call_expr (powfn, arglist);
7242 goto associate;
7244 case BIT_IOR_EXPR:
7245 bit_ior:
7246 if (integer_all_onesp (arg1))
7247 return omit_one_operand (type, arg1, arg0);
7248 if (integer_zerop (arg1))
7249 return non_lvalue (fold_convert (type, arg0));
7250 if (operand_equal_p (arg0, arg1, 0))
7251 return non_lvalue (fold_convert (type, arg0));
7253 /* ~X | X is -1. */
7254 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7255 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7257 t1 = build_int_cst (type, -1);
7258 t1 = force_fit_type (t1, 0, false, false);
7259 return omit_one_operand (type, t1, arg1);
7262 /* X | ~X is -1. */
7263 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7264 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7266 t1 = build_int_cst (type, -1);
7267 t1 = force_fit_type (t1, 0, false, false);
7268 return omit_one_operand (type, t1, arg0);
7271 t1 = distribute_bit_expr (code, type, arg0, arg1);
7272 if (t1 != NULL_TREE)
7273 return t1;
7275 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7277 This results in more efficient code for machines without a NAND
7278 instruction. Combine will canonicalize to the first form
7279 which will allow use of NAND instructions provided by the
7280 backend if they exist. */
7281 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7282 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7284 return fold (build1 (BIT_NOT_EXPR, type,
7285 build2 (BIT_AND_EXPR, type,
7286 TREE_OPERAND (arg0, 0),
7287 TREE_OPERAND (arg1, 0))));
7290 /* See if this can be simplified into a rotate first. If that
7291 is unsuccessful continue in the association code. */
7292 goto bit_rotate;
7294 case BIT_XOR_EXPR:
7295 if (integer_zerop (arg1))
7296 return non_lvalue (fold_convert (type, arg0));
7297 if (integer_all_onesp (arg1))
7298 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7299 if (operand_equal_p (arg0, arg1, 0))
7300 return omit_one_operand (type, integer_zero_node, arg0);
7302 /* ~X ^ X is -1. */
7303 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7304 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7306 t1 = build_int_cst (type, -1);
7307 t1 = force_fit_type (t1, 0, false, false);
7308 return omit_one_operand (type, t1, arg1);
7311 /* X ^ ~X is -1. */
7312 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7313 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7315 t1 = build_int_cst (type, -1);
7316 t1 = force_fit_type (t1, 0, false, false);
7317 return omit_one_operand (type, t1, arg0);
7320 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7321 with a constant, and the two constants have no bits in common,
7322 we should treat this as a BIT_IOR_EXPR since this may produce more
7323 simplifications. */
7324 if (TREE_CODE (arg0) == BIT_AND_EXPR
7325 && TREE_CODE (arg1) == BIT_AND_EXPR
7326 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7327 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7328 && integer_zerop (const_binop (BIT_AND_EXPR,
7329 TREE_OPERAND (arg0, 1),
7330 TREE_OPERAND (arg1, 1), 0)))
7332 code = BIT_IOR_EXPR;
7333 goto bit_ior;
7336 /* See if this can be simplified into a rotate first. If that
7337 is unsuccessful continue in the association code. */
7338 goto bit_rotate;
7340 case BIT_AND_EXPR:
7341 if (integer_all_onesp (arg1))
7342 return non_lvalue (fold_convert (type, arg0));
7343 if (integer_zerop (arg1))
7344 return omit_one_operand (type, arg1, arg0);
7345 if (operand_equal_p (arg0, arg1, 0))
7346 return non_lvalue (fold_convert (type, arg0));
7348 /* ~X & X is always zero. */
7349 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7350 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7351 return omit_one_operand (type, integer_zero_node, arg1);
7353 /* X & ~X is always zero. */
7354 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7355 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7356 return omit_one_operand (type, integer_zero_node, arg0);
7358 t1 = distribute_bit_expr (code, type, arg0, arg1);
7359 if (t1 != NULL_TREE)
7360 return t1;
7361 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7362 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7363 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7365 unsigned int prec
7366 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7368 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7369 && (~TREE_INT_CST_LOW (arg1)
7370 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7371 return fold_convert (type, TREE_OPERAND (arg0, 0));
7374 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7376 This results in more efficient code for machines without a NOR
7377 instruction. Combine will canonicalize to the first form
7378 which will allow use of NOR instructions provided by the
7379 backend if they exist. */
7380 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7381 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7383 return fold (build1 (BIT_NOT_EXPR, type,
7384 build2 (BIT_IOR_EXPR, type,
7385 TREE_OPERAND (arg0, 0),
7386 TREE_OPERAND (arg1, 0))));
7389 goto associate;
7391 case RDIV_EXPR:
7392 /* Don't touch a floating-point divide by zero unless the mode
7393 of the constant can represent infinity. */
7394 if (TREE_CODE (arg1) == REAL_CST
7395 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7396 && real_zerop (arg1))
7397 return t;
7399 /* (-A) / (-B) -> A / B */
7400 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7401 return fold (build2 (RDIV_EXPR, type,
7402 TREE_OPERAND (arg0, 0),
7403 negate_expr (arg1)));
7404 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7405 return fold (build2 (RDIV_EXPR, type,
7406 negate_expr (arg0),
7407 TREE_OPERAND (arg1, 0)));
7409 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7410 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7411 && real_onep (arg1))
7412 return non_lvalue (fold_convert (type, arg0));
7414 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7415 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7416 && real_minus_onep (arg1))
7417 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7419 /* If ARG1 is a constant, we can convert this to a multiply by the
7420 reciprocal. This does not have the same rounding properties,
7421 so only do this if -funsafe-math-optimizations. We can actually
7422 always safely do it if ARG1 is a power of two, but it's hard to
7423 tell if it is or not in a portable manner. */
7424 if (TREE_CODE (arg1) == REAL_CST)
7426 if (flag_unsafe_math_optimizations
7427 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7428 arg1, 0)))
7429 return fold (build2 (MULT_EXPR, type, arg0, tem));
7430 /* Find the reciprocal if optimizing and the result is exact. */
7431 if (optimize)
7433 REAL_VALUE_TYPE r;
7434 r = TREE_REAL_CST (arg1);
7435 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7437 tem = build_real (type, r);
7438 return fold (build2 (MULT_EXPR, type, arg0, tem));
7442 /* Convert A/B/C to A/(B*C). */
7443 if (flag_unsafe_math_optimizations
7444 && TREE_CODE (arg0) == RDIV_EXPR)
7445 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7446 fold (build2 (MULT_EXPR, type,
7447 TREE_OPERAND (arg0, 1), arg1))));
7449 /* Convert A/(B/C) to (A/B)*C. */
7450 if (flag_unsafe_math_optimizations
7451 && TREE_CODE (arg1) == RDIV_EXPR)
7452 return fold (build2 (MULT_EXPR, type,
7453 fold (build2 (RDIV_EXPR, type, arg0,
7454 TREE_OPERAND (arg1, 0))),
7455 TREE_OPERAND (arg1, 1)));
7457 /* Convert C1/(X*C2) into (C1/C2)/X. */
7458 if (flag_unsafe_math_optimizations
7459 && TREE_CODE (arg1) == MULT_EXPR
7460 && TREE_CODE (arg0) == REAL_CST
7461 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7463 tree tem = const_binop (RDIV_EXPR, arg0,
7464 TREE_OPERAND (arg1, 1), 0);
7465 if (tem)
7466 return fold (build2 (RDIV_EXPR, type, tem,
7467 TREE_OPERAND (arg1, 0)));
7470 if (flag_unsafe_math_optimizations)
7472 enum built_in_function fcode = builtin_mathfn_code (arg1);
7473 /* Optimize x/expN(y) into x*expN(-y). */
7474 if (BUILTIN_EXPONENT_P (fcode))
7476 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7477 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7478 tree arglist = build_tree_list (NULL_TREE,
7479 fold_convert (type, arg));
7480 arg1 = build_function_call_expr (expfn, arglist);
7481 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7484 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7485 if (fcode == BUILT_IN_POW
7486 || fcode == BUILT_IN_POWF
7487 || fcode == BUILT_IN_POWL)
7489 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7490 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7491 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7492 tree neg11 = fold_convert (type, negate_expr (arg11));
7493 tree arglist = tree_cons(NULL_TREE, arg10,
7494 build_tree_list (NULL_TREE, neg11));
7495 arg1 = build_function_call_expr (powfn, arglist);
7496 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7500 if (flag_unsafe_math_optimizations)
7502 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7503 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7505 /* Optimize sin(x)/cos(x) as tan(x). */
7506 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7507 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7508 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7509 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7510 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7512 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7514 if (tanfn != NULL_TREE)
7515 return build_function_call_expr (tanfn,
7516 TREE_OPERAND (arg0, 1));
7519 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7520 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7521 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7522 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7523 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7524 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7526 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7528 if (tanfn != NULL_TREE)
7530 tree tmp = TREE_OPERAND (arg0, 1);
7531 tmp = build_function_call_expr (tanfn, tmp);
7532 return fold (build2 (RDIV_EXPR, type,
7533 build_real (type, dconst1), tmp));
7537 /* Optimize pow(x,c)/x as pow(x,c-1). */
7538 if (fcode0 == BUILT_IN_POW
7539 || fcode0 == BUILT_IN_POWF
7540 || fcode0 == BUILT_IN_POWL)
7542 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7543 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7544 if (TREE_CODE (arg01) == REAL_CST
7545 && ! TREE_CONSTANT_OVERFLOW (arg01)
7546 && operand_equal_p (arg1, arg00, 0))
7548 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7549 REAL_VALUE_TYPE c;
7550 tree arg, arglist;
7552 c = TREE_REAL_CST (arg01);
7553 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7554 arg = build_real (type, c);
7555 arglist = build_tree_list (NULL_TREE, arg);
7556 arglist = tree_cons (NULL_TREE, arg1, arglist);
7557 return build_function_call_expr (powfn, arglist);
7561 goto binary;
7563 case TRUNC_DIV_EXPR:
7564 case ROUND_DIV_EXPR:
7565 case FLOOR_DIV_EXPR:
7566 case CEIL_DIV_EXPR:
7567 case EXACT_DIV_EXPR:
7568 if (integer_onep (arg1))
7569 return non_lvalue (fold_convert (type, arg0));
7570 if (integer_zerop (arg1))
7571 return t;
7572 /* X / -1 is -X. */
7573 if (!TYPE_UNSIGNED (type)
7574 && TREE_CODE (arg1) == INTEGER_CST
7575 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7576 && TREE_INT_CST_HIGH (arg1) == -1)
7577 return fold_convert (type, negate_expr (arg0));
7579 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7580 operation, EXACT_DIV_EXPR.
7582 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7583 At one time others generated faster code, it's not clear if they do
7584 after the last round to changes to the DIV code in expmed.c. */
7585 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7586 && multiple_of_p (type, arg0, arg1))
7587 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7589 if (TREE_CODE (arg1) == INTEGER_CST
7590 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7591 code, NULL_TREE)))
7592 return fold_convert (type, tem);
7594 goto binary;
7596 case CEIL_MOD_EXPR:
7597 case FLOOR_MOD_EXPR:
7598 case ROUND_MOD_EXPR:
7599 case TRUNC_MOD_EXPR:
7600 if (integer_onep (arg1))
7601 return omit_one_operand (type, integer_zero_node, arg0);
7602 if (integer_zerop (arg1))
7603 return t;
7605 /* X % -1 is zero. */
7606 if (!TYPE_UNSIGNED (type)
7607 && TREE_CODE (arg1) == INTEGER_CST
7608 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7609 && TREE_INT_CST_HIGH (arg1) == -1)
7610 return omit_one_operand (type, integer_zero_node, arg0);
7612 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7613 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7614 if (code == TRUNC_MOD_EXPR
7615 && TYPE_UNSIGNED (type)
7616 && integer_pow2p (arg1))
7618 unsigned HOST_WIDE_INT high, low;
7619 tree mask;
7620 int l;
7622 l = tree_log2 (arg1);
7623 if (l >= HOST_BITS_PER_WIDE_INT)
7625 high = ((unsigned HOST_WIDE_INT) 1
7626 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
7627 low = -1;
7629 else
7631 high = 0;
7632 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
7635 mask = build_int_cst_wide (type, low, high);
7636 return fold (build2 (BIT_AND_EXPR, type,
7637 fold_convert (type, arg0), mask));
7640 /* X % -C is the same as X % C. */
7641 if (code == TRUNC_MOD_EXPR
7642 && !TYPE_UNSIGNED (type)
7643 && TREE_CODE (arg1) == INTEGER_CST
7644 && TREE_INT_CST_HIGH (arg1) < 0
7645 && !flag_trapv
7646 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
7647 && !sign_bit_p (arg1, arg1))
7648 return fold (build2 (code, type, fold_convert (type, arg0),
7649 fold_convert (type, negate_expr (arg1))));
7651 /* X % -Y is the same as X % Y. */
7652 if (code == TRUNC_MOD_EXPR
7653 && !TYPE_UNSIGNED (type)
7654 && TREE_CODE (arg1) == NEGATE_EXPR
7655 && !flag_trapv)
7656 return fold (build2 (code, type, fold_convert (type, arg0),
7657 fold_convert (type, TREE_OPERAND (arg1, 0))));
7659 if (TREE_CODE (arg1) == INTEGER_CST
7660 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7661 code, NULL_TREE)))
7662 return fold_convert (type, tem);
7664 goto binary;
7666 case LROTATE_EXPR:
7667 case RROTATE_EXPR:
7668 if (integer_all_onesp (arg0))
7669 return omit_one_operand (type, arg0, arg1);
7670 goto shift;
7672 case RSHIFT_EXPR:
7673 /* Optimize -1 >> x for arithmetic right shifts. */
7674 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7675 return omit_one_operand (type, arg0, arg1);
7676 /* ... fall through ... */
7678 case LSHIFT_EXPR:
7679 shift:
7680 if (integer_zerop (arg1))
7681 return non_lvalue (fold_convert (type, arg0));
7682 if (integer_zerop (arg0))
7683 return omit_one_operand (type, arg0, arg1);
7685 /* Since negative shift count is not well-defined,
7686 don't try to compute it in the compiler. */
7687 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7688 return t;
7689 /* Rewrite an LROTATE_EXPR by a constant into an
7690 RROTATE_EXPR by a new constant. */
7691 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7693 tree tem = build_int_cst (NULL_TREE,
7694 GET_MODE_BITSIZE (TYPE_MODE (type)));
7695 tem = fold_convert (TREE_TYPE (arg1), tem);
7696 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7697 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7700 /* If we have a rotate of a bit operation with the rotate count and
7701 the second operand of the bit operation both constant,
7702 permute the two operations. */
7703 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7704 && (TREE_CODE (arg0) == BIT_AND_EXPR
7705 || TREE_CODE (arg0) == BIT_IOR_EXPR
7706 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7707 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7708 return fold (build2 (TREE_CODE (arg0), type,
7709 fold (build2 (code, type,
7710 TREE_OPERAND (arg0, 0), arg1)),
7711 fold (build2 (code, type,
7712 TREE_OPERAND (arg0, 1), arg1))));
7714 /* Two consecutive rotates adding up to the width of the mode can
7715 be ignored. */
7716 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7717 && TREE_CODE (arg0) == RROTATE_EXPR
7718 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7719 && TREE_INT_CST_HIGH (arg1) == 0
7720 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7721 && ((TREE_INT_CST_LOW (arg1)
7722 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7723 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7724 return TREE_OPERAND (arg0, 0);
7726 goto binary;
7728 case MIN_EXPR:
7729 if (operand_equal_p (arg0, arg1, 0))
7730 return omit_one_operand (type, arg0, arg1);
7731 if (INTEGRAL_TYPE_P (type)
7732 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
7733 return omit_one_operand (type, arg1, arg0);
7734 goto associate;
7736 case MAX_EXPR:
7737 if (operand_equal_p (arg0, arg1, 0))
7738 return omit_one_operand (type, arg0, arg1);
7739 if (INTEGRAL_TYPE_P (type)
7740 && TYPE_MAX_VALUE (type)
7741 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
7742 return omit_one_operand (type, arg1, arg0);
7743 goto associate;
7745 case TRUTH_NOT_EXPR:
7746 /* The argument to invert_truthvalue must have Boolean type. */
7747 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7748 arg0 = fold_convert (boolean_type_node, arg0);
7750 /* Note that the operand of this must be an int
7751 and its values must be 0 or 1.
7752 ("true" is a fixed value perhaps depending on the language,
7753 but we don't handle values other than 1 correctly yet.) */
7754 tem = invert_truthvalue (arg0);
7755 /* Avoid infinite recursion. */
7756 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7758 tem = fold_single_bit_test (code, arg0, arg1, type);
7759 if (tem)
7760 return tem;
7761 return t;
7763 return fold_convert (type, tem);
7765 case TRUTH_ANDIF_EXPR:
7766 /* Note that the operands of this must be ints
7767 and their values must be 0 or 1.
7768 ("true" is a fixed value perhaps depending on the language.) */
7769 /* If first arg is constant zero, return it. */
7770 if (integer_zerop (arg0))
7771 return fold_convert (type, arg0);
7772 case TRUTH_AND_EXPR:
7773 /* If either arg is constant true, drop it. */
7774 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7775 return non_lvalue (fold_convert (type, arg1));
7776 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7777 /* Preserve sequence points. */
7778 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7779 return non_lvalue (fold_convert (type, arg0));
7780 /* If second arg is constant zero, result is zero, but first arg
7781 must be evaluated. */
7782 if (integer_zerop (arg1))
7783 return omit_one_operand (type, arg1, arg0);
7784 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7785 case will be handled here. */
7786 if (integer_zerop (arg0))
7787 return omit_one_operand (type, arg0, arg1);
7789 /* !X && X is always false. */
7790 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7791 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7792 return omit_one_operand (type, integer_zero_node, arg1);
7793 /* X && !X is always false. */
7794 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7795 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7796 return omit_one_operand (type, integer_zero_node, arg0);
7798 truth_andor:
7799 /* We only do these simplifications if we are optimizing. */
7800 if (!optimize)
7801 return t;
7803 /* Check for things like (A || B) && (A || C). We can convert this
7804 to A || (B && C). Note that either operator can be any of the four
7805 truth and/or operations and the transformation will still be
7806 valid. Also note that we only care about order for the
7807 ANDIF and ORIF operators. If B contains side effects, this
7808 might change the truth-value of A. */
7809 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7810 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7811 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7812 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7813 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7814 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7816 tree a00 = TREE_OPERAND (arg0, 0);
7817 tree a01 = TREE_OPERAND (arg0, 1);
7818 tree a10 = TREE_OPERAND (arg1, 0);
7819 tree a11 = TREE_OPERAND (arg1, 1);
7820 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7821 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7822 && (code == TRUTH_AND_EXPR
7823 || code == TRUTH_OR_EXPR));
7825 if (operand_equal_p (a00, a10, 0))
7826 return fold (build2 (TREE_CODE (arg0), type, a00,
7827 fold (build2 (code, type, a01, a11))));
7828 else if (commutative && operand_equal_p (a00, a11, 0))
7829 return fold (build2 (TREE_CODE (arg0), type, a00,
7830 fold (build2 (code, type, a01, a10))));
7831 else if (commutative && operand_equal_p (a01, a10, 0))
7832 return fold (build2 (TREE_CODE (arg0), type, a01,
7833 fold (build2 (code, type, a00, a11))));
7835 /* This case if tricky because we must either have commutative
7836 operators or else A10 must not have side-effects. */
7838 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7839 && operand_equal_p (a01, a11, 0))
7840 return fold (build2 (TREE_CODE (arg0), type,
7841 fold (build2 (code, type, a00, a10)),
7842 a01));
7845 /* See if we can build a range comparison. */
7846 if (0 != (tem = fold_range_test (t)))
7847 return tem;
7849 /* Check for the possibility of merging component references. If our
7850 lhs is another similar operation, try to merge its rhs with our
7851 rhs. Then try to merge our lhs and rhs. */
7852 if (TREE_CODE (arg0) == code
7853 && 0 != (tem = fold_truthop (code, type,
7854 TREE_OPERAND (arg0, 1), arg1)))
7855 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7857 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7858 return tem;
7860 return t;
7862 case TRUTH_ORIF_EXPR:
7863 /* Note that the operands of this must be ints
7864 and their values must be 0 or true.
7865 ("true" is a fixed value perhaps depending on the language.) */
7866 /* If first arg is constant true, return it. */
7867 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7868 return fold_convert (type, arg0);
7869 case TRUTH_OR_EXPR:
7870 /* If either arg is constant zero, drop it. */
7871 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7872 return non_lvalue (fold_convert (type, arg1));
7873 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7874 /* Preserve sequence points. */
7875 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7876 return non_lvalue (fold_convert (type, arg0));
7877 /* If second arg is constant true, result is true, but we must
7878 evaluate first arg. */
7879 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7880 return omit_one_operand (type, arg1, arg0);
7881 /* Likewise for first arg, but note this only occurs here for
7882 TRUTH_OR_EXPR. */
7883 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7884 return omit_one_operand (type, arg0, arg1);
7886 /* !X || X is always true. */
7887 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7888 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7889 return omit_one_operand (type, integer_one_node, arg1);
7890 /* X || !X is always true. */
7891 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7892 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7893 return omit_one_operand (type, integer_one_node, arg0);
7895 goto truth_andor;
7897 case TRUTH_XOR_EXPR:
7898 /* If the second arg is constant zero, drop it. */
7899 if (integer_zerop (arg1))
7900 return non_lvalue (fold_convert (type, arg0));
7901 /* If the second arg is constant true, this is a logical inversion. */
7902 if (integer_onep (arg1))
7903 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7904 /* Identical arguments cancel to zero. */
7905 if (operand_equal_p (arg0, arg1, 0))
7906 return omit_one_operand (type, integer_zero_node, arg0);
7908 /* !X ^ X is always true. */
7909 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7910 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7911 return omit_one_operand (type, integer_one_node, arg1);
7913 /* X ^ !X is always true. */
7914 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7915 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7916 return omit_one_operand (type, integer_one_node, arg0);
7918 return t;
7920 case EQ_EXPR:
7921 case NE_EXPR:
7922 case LT_EXPR:
7923 case GT_EXPR:
7924 case LE_EXPR:
7925 case GE_EXPR:
7926 /* If one arg is a real or integer constant, put it last. */
7927 if (tree_swap_operands_p (arg0, arg1, true))
7928 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
7930 /* If this is an equality comparison of the address of a non-weak
7931 object against zero, then we know the result. */
7932 if ((code == EQ_EXPR || code == NE_EXPR)
7933 && TREE_CODE (arg0) == ADDR_EXPR
7934 && DECL_P (TREE_OPERAND (arg0, 0))
7935 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7936 && integer_zerop (arg1))
7937 return constant_boolean_node (code != EQ_EXPR, type);
7939 /* If this is an equality comparison of the address of two non-weak,
7940 unaliased symbols neither of which are extern (since we do not
7941 have access to attributes for externs), then we know the result. */
7942 if ((code == EQ_EXPR || code == NE_EXPR)
7943 && TREE_CODE (arg0) == ADDR_EXPR
7944 && DECL_P (TREE_OPERAND (arg0, 0))
7945 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7946 && ! lookup_attribute ("alias",
7947 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7948 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7949 && TREE_CODE (arg1) == ADDR_EXPR
7950 && DECL_P (TREE_OPERAND (arg1, 0))
7951 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7952 && ! lookup_attribute ("alias",
7953 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7954 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7955 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
7956 ? code == EQ_EXPR : code != EQ_EXPR,
7957 type);
7959 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7961 tree targ0 = strip_float_extensions (arg0);
7962 tree targ1 = strip_float_extensions (arg1);
7963 tree newtype = TREE_TYPE (targ0);
7965 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7966 newtype = TREE_TYPE (targ1);
7968 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7969 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7970 return fold (build2 (code, type, fold_convert (newtype, targ0),
7971 fold_convert (newtype, targ1)));
7973 /* (-a) CMP (-b) -> b CMP a */
7974 if (TREE_CODE (arg0) == NEGATE_EXPR
7975 && TREE_CODE (arg1) == NEGATE_EXPR)
7976 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
7977 TREE_OPERAND (arg0, 0)));
7979 if (TREE_CODE (arg1) == REAL_CST)
7981 REAL_VALUE_TYPE cst;
7982 cst = TREE_REAL_CST (arg1);
7984 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7985 if (TREE_CODE (arg0) == NEGATE_EXPR)
7986 return
7987 fold (build2 (swap_tree_comparison (code), type,
7988 TREE_OPERAND (arg0, 0),
7989 build_real (TREE_TYPE (arg1),
7990 REAL_VALUE_NEGATE (cst))));
7992 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7993 /* a CMP (-0) -> a CMP 0 */
7994 if (REAL_VALUE_MINUS_ZERO (cst))
7995 return fold (build2 (code, type, arg0,
7996 build_real (TREE_TYPE (arg1), dconst0)));
7998 /* x != NaN is always true, other ops are always false. */
7999 if (REAL_VALUE_ISNAN (cst)
8000 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8002 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8003 return omit_one_operand (type, tem, arg0);
8006 /* Fold comparisons against infinity. */
8007 if (REAL_VALUE_ISINF (cst))
8009 tem = fold_inf_compare (code, type, arg0, arg1);
8010 if (tem != NULL_TREE)
8011 return tem;
8015 /* If this is a comparison of a real constant with a PLUS_EXPR
8016 or a MINUS_EXPR of a real constant, we can convert it into a
8017 comparison with a revised real constant as long as no overflow
8018 occurs when unsafe_math_optimizations are enabled. */
8019 if (flag_unsafe_math_optimizations
8020 && TREE_CODE (arg1) == REAL_CST
8021 && (TREE_CODE (arg0) == PLUS_EXPR
8022 || TREE_CODE (arg0) == MINUS_EXPR)
8023 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8024 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8025 ? MINUS_EXPR : PLUS_EXPR,
8026 arg1, TREE_OPERAND (arg0, 1), 0))
8027 && ! TREE_CONSTANT_OVERFLOW (tem))
8028 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8030 /* Likewise, we can simplify a comparison of a real constant with
8031 a MINUS_EXPR whose first operand is also a real constant, i.e.
8032 (c1 - x) < c2 becomes x > c1-c2. */
8033 if (flag_unsafe_math_optimizations
8034 && TREE_CODE (arg1) == REAL_CST
8035 && TREE_CODE (arg0) == MINUS_EXPR
8036 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8037 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8038 arg1, 0))
8039 && ! TREE_CONSTANT_OVERFLOW (tem))
8040 return fold (build2 (swap_tree_comparison (code), type,
8041 TREE_OPERAND (arg0, 1), tem));
8043 /* Fold comparisons against built-in math functions. */
8044 if (TREE_CODE (arg1) == REAL_CST
8045 && flag_unsafe_math_optimizations
8046 && ! flag_errno_math)
8048 enum built_in_function fcode = builtin_mathfn_code (arg0);
8050 if (fcode != END_BUILTINS)
8052 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8053 if (tem != NULL_TREE)
8054 return tem;
8059 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8060 if (TREE_CONSTANT (arg1)
8061 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8062 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8063 /* This optimization is invalid for ordered comparisons
8064 if CONST+INCR overflows or if foo+incr might overflow.
8065 This optimization is invalid for floating point due to rounding.
8066 For pointer types we assume overflow doesn't happen. */
8067 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8068 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8069 && (code == EQ_EXPR || code == NE_EXPR))))
8071 tree varop, newconst;
8073 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8075 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
8076 arg1, TREE_OPERAND (arg0, 1)));
8077 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8078 TREE_OPERAND (arg0, 0),
8079 TREE_OPERAND (arg0, 1));
8081 else
8083 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
8084 arg1, TREE_OPERAND (arg0, 1)));
8085 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8086 TREE_OPERAND (arg0, 0),
8087 TREE_OPERAND (arg0, 1));
8091 /* If VAROP is a reference to a bitfield, we must mask
8092 the constant by the width of the field. */
8093 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8094 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8095 && host_integerp (DECL_SIZE (TREE_OPERAND
8096 (TREE_OPERAND (varop, 0), 1)), 1))
8098 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8099 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8100 tree folded_compare, shift;
8102 /* First check whether the comparison would come out
8103 always the same. If we don't do that we would
8104 change the meaning with the masking. */
8105 folded_compare = fold (build2 (code, type,
8106 TREE_OPERAND (varop, 0), arg1));
8107 if (integer_zerop (folded_compare)
8108 || integer_onep (folded_compare))
8109 return omit_one_operand (type, folded_compare, varop);
8111 shift = build_int_cst (NULL_TREE,
8112 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8113 shift = fold_convert (TREE_TYPE (varop), shift);
8114 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8115 newconst, shift));
8116 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8117 newconst, shift));
8120 return fold (build2 (code, type, varop, newconst));
8123 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8124 This transformation affects the cases which are handled in later
8125 optimizations involving comparisons with non-negative constants. */
8126 if (TREE_CODE (arg1) == INTEGER_CST
8127 && TREE_CODE (arg0) != INTEGER_CST
8128 && tree_int_cst_sgn (arg1) > 0)
8130 switch (code)
8132 case GE_EXPR:
8133 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8134 return fold (build2 (GT_EXPR, type, arg0, arg1));
8136 case LT_EXPR:
8137 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8138 return fold (build2 (LE_EXPR, type, arg0, arg1));
8140 default:
8141 break;
8145 /* Comparisons with the highest or lowest possible integer of
8146 the specified size will have known values.
8148 This is quite similar to fold_relational_hi_lo; however, my
8149 attempts to share the code have been nothing but trouble.
8150 I give up for now. */
8152 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8154 if (TREE_CODE (arg1) == INTEGER_CST
8155 && ! TREE_CONSTANT_OVERFLOW (arg1)
8156 && width <= HOST_BITS_PER_WIDE_INT
8157 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8158 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8160 unsigned HOST_WIDE_INT signed_max;
8161 unsigned HOST_WIDE_INT max, min;
8163 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
8165 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8167 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8168 min = 0;
8170 else
8172 max = signed_max;
8173 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8176 if (TREE_INT_CST_HIGH (arg1) == 0
8177 && TREE_INT_CST_LOW (arg1) == max)
8178 switch (code)
8180 case GT_EXPR:
8181 return omit_one_operand (type, integer_zero_node, arg0);
8183 case GE_EXPR:
8184 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8186 case LE_EXPR:
8187 return omit_one_operand (type, integer_one_node, arg0);
8189 case LT_EXPR:
8190 return fold (build2 (NE_EXPR, type, arg0, arg1));
8192 /* The GE_EXPR and LT_EXPR cases above are not normally
8193 reached because of previous transformations. */
8195 default:
8196 break;
8198 else if (TREE_INT_CST_HIGH (arg1) == 0
8199 && TREE_INT_CST_LOW (arg1) == max - 1)
8200 switch (code)
8202 case GT_EXPR:
8203 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8204 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8205 case LE_EXPR:
8206 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8207 return fold (build2 (NE_EXPR, type, arg0, arg1));
8208 default:
8209 break;
8211 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8212 && TREE_INT_CST_LOW (arg1) == min)
8213 switch (code)
8215 case LT_EXPR:
8216 return omit_one_operand (type, integer_zero_node, arg0);
8218 case LE_EXPR:
8219 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8221 case GE_EXPR:
8222 return omit_one_operand (type, integer_one_node, arg0);
8224 case GT_EXPR:
8225 return fold (build2 (NE_EXPR, type, arg0, arg1));
8227 default:
8228 break;
8230 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8231 && TREE_INT_CST_LOW (arg1) == min + 1)
8232 switch (code)
8234 case GE_EXPR:
8235 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8236 return fold (build2 (NE_EXPR, type, arg0, arg1));
8237 case LT_EXPR:
8238 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8239 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8240 default:
8241 break;
8244 else if (!in_gimple_form
8245 && TREE_INT_CST_HIGH (arg1) == 0
8246 && TREE_INT_CST_LOW (arg1) == signed_max
8247 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8248 /* signed_type does not work on pointer types. */
8249 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8251 /* The following case also applies to X < signed_max+1
8252 and X >= signed_max+1 because previous transformations. */
8253 if (code == LE_EXPR || code == GT_EXPR)
8255 tree st0, st1;
8256 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8257 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8258 return fold
8259 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8260 type, fold_convert (st0, arg0),
8261 fold_convert (st1, integer_zero_node)));
8267 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8268 a MINUS_EXPR of a constant, we can convert it into a comparison with
8269 a revised constant as long as no overflow occurs. */
8270 if ((code == EQ_EXPR || code == NE_EXPR)
8271 && TREE_CODE (arg1) == INTEGER_CST
8272 && (TREE_CODE (arg0) == PLUS_EXPR
8273 || TREE_CODE (arg0) == MINUS_EXPR)
8274 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8275 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8276 ? MINUS_EXPR : PLUS_EXPR,
8277 arg1, TREE_OPERAND (arg0, 1), 0))
8278 && ! TREE_CONSTANT_OVERFLOW (tem))
8279 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8281 /* Similarly for a NEGATE_EXPR. */
8282 else if ((code == EQ_EXPR || code == NE_EXPR)
8283 && TREE_CODE (arg0) == NEGATE_EXPR
8284 && TREE_CODE (arg1) == INTEGER_CST
8285 && 0 != (tem = negate_expr (arg1))
8286 && TREE_CODE (tem) == INTEGER_CST
8287 && ! TREE_CONSTANT_OVERFLOW (tem))
8288 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8290 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8291 for !=. Don't do this for ordered comparisons due to overflow. */
8292 else if ((code == NE_EXPR || code == EQ_EXPR)
8293 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8294 return fold (build2 (code, type,
8295 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8297 /* If we are widening one operand of an integer comparison,
8298 see if the other operand is similarly being widened. Perhaps we
8299 can do the comparison in the narrower type. */
8300 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8301 && TREE_CODE (arg0) == NOP_EXPR
8302 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
8303 && (code == EQ_EXPR || code == NE_EXPR
8304 || TYPE_UNSIGNED (TREE_TYPE (arg0))
8305 == TYPE_UNSIGNED (TREE_TYPE (tem)))
8306 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
8307 && (TREE_TYPE (t1) == TREE_TYPE (tem)
8308 || (TREE_CODE (t1) == INTEGER_CST
8309 && int_fits_type_p (t1, TREE_TYPE (tem)))))
8310 return fold (build2 (code, type, tem,
8311 fold_convert (TREE_TYPE (tem), t1)));
8313 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8314 constant, we can simplify it. */
8315 else if (TREE_CODE (arg1) == INTEGER_CST
8316 && (TREE_CODE (arg0) == MIN_EXPR
8317 || TREE_CODE (arg0) == MAX_EXPR)
8318 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8319 return optimize_minmax_comparison (t);
8321 /* If we are comparing an ABS_EXPR with a constant, we can
8322 convert all the cases into explicit comparisons, but they may
8323 well not be faster than doing the ABS and one comparison.
8324 But ABS (X) <= C is a range comparison, which becomes a subtraction
8325 and a comparison, and is probably faster. */
8326 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8327 && TREE_CODE (arg0) == ABS_EXPR
8328 && ! TREE_SIDE_EFFECTS (arg0)
8329 && (0 != (tem = negate_expr (arg1)))
8330 && TREE_CODE (tem) == INTEGER_CST
8331 && ! TREE_CONSTANT_OVERFLOW (tem))
8332 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8333 build2 (GE_EXPR, type,
8334 TREE_OPERAND (arg0, 0), tem),
8335 build2 (LE_EXPR, type,
8336 TREE_OPERAND (arg0, 0), arg1)));
8338 /* If this is an EQ or NE comparison with zero and ARG0 is
8339 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8340 two operations, but the latter can be done in one less insn
8341 on machines that have only two-operand insns or on which a
8342 constant cannot be the first operand. */
8343 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8344 && TREE_CODE (arg0) == BIT_AND_EXPR)
8346 tree arg00 = TREE_OPERAND (arg0, 0);
8347 tree arg01 = TREE_OPERAND (arg0, 1);
8348 if (TREE_CODE (arg00) == LSHIFT_EXPR
8349 && integer_onep (TREE_OPERAND (arg00, 0)))
8350 return
8351 fold (build2 (code, type,
8352 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8353 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8354 arg01, TREE_OPERAND (arg00, 1)),
8355 fold_convert (TREE_TYPE (arg0),
8356 integer_one_node)),
8357 arg1));
8358 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8359 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8360 return
8361 fold (build2 (code, type,
8362 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8363 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8364 arg00, TREE_OPERAND (arg01, 1)),
8365 fold_convert (TREE_TYPE (arg0),
8366 integer_one_node)),
8367 arg1));
8370 /* If this is an NE or EQ comparison of zero against the result of a
8371 signed MOD operation whose second operand is a power of 2, make
8372 the MOD operation unsigned since it is simpler and equivalent. */
8373 if ((code == NE_EXPR || code == EQ_EXPR)
8374 && integer_zerop (arg1)
8375 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8376 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8377 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8378 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8379 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8380 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8382 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8383 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
8384 fold_convert (newtype,
8385 TREE_OPERAND (arg0, 0)),
8386 fold_convert (newtype,
8387 TREE_OPERAND (arg0, 1))));
8389 return fold (build2 (code, type, newmod,
8390 fold_convert (newtype, arg1)));
8393 /* If this is an NE comparison of zero with an AND of one, remove the
8394 comparison since the AND will give the correct value. */
8395 if (code == NE_EXPR && integer_zerop (arg1)
8396 && TREE_CODE (arg0) == BIT_AND_EXPR
8397 && integer_onep (TREE_OPERAND (arg0, 1)))
8398 return fold_convert (type, arg0);
8400 /* If we have (A & C) == C where C is a power of 2, convert this into
8401 (A & C) != 0. Similarly for NE_EXPR. */
8402 if ((code == EQ_EXPR || code == NE_EXPR)
8403 && TREE_CODE (arg0) == BIT_AND_EXPR
8404 && integer_pow2p (TREE_OPERAND (arg0, 1))
8405 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8406 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8407 arg0, fold_convert (TREE_TYPE (arg0),
8408 integer_zero_node)));
8410 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8411 2, then fold the expression into shifts and logical operations. */
8412 tem = fold_single_bit_test (code, arg0, arg1, type);
8413 if (tem)
8414 return tem;
8416 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8417 Similarly for NE_EXPR. */
8418 if ((code == EQ_EXPR || code == NE_EXPR)
8419 && TREE_CODE (arg0) == BIT_AND_EXPR
8420 && TREE_CODE (arg1) == INTEGER_CST
8421 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8423 tree dandnotc
8424 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8425 arg1, build1 (BIT_NOT_EXPR,
8426 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8427 TREE_OPERAND (arg0, 1))));
8428 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8429 if (integer_nonzerop (dandnotc))
8430 return omit_one_operand (type, rslt, arg0);
8433 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8434 Similarly for NE_EXPR. */
8435 if ((code == EQ_EXPR || code == NE_EXPR)
8436 && TREE_CODE (arg0) == BIT_IOR_EXPR
8437 && TREE_CODE (arg1) == INTEGER_CST
8438 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8440 tree candnotd
8441 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8442 TREE_OPERAND (arg0, 1),
8443 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
8444 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8445 if (integer_nonzerop (candnotd))
8446 return omit_one_operand (type, rslt, arg0);
8449 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8450 and similarly for >= into !=. */
8451 if ((code == LT_EXPR || code == GE_EXPR)
8452 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8453 && TREE_CODE (arg1) == LSHIFT_EXPR
8454 && integer_onep (TREE_OPERAND (arg1, 0)))
8455 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8456 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8457 TREE_OPERAND (arg1, 1)),
8458 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8460 else if ((code == LT_EXPR || code == GE_EXPR)
8461 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8462 && (TREE_CODE (arg1) == NOP_EXPR
8463 || TREE_CODE (arg1) == CONVERT_EXPR)
8464 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8465 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8466 return
8467 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8468 fold_convert (TREE_TYPE (arg0),
8469 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8470 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8471 1))),
8472 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8474 /* Simplify comparison of something with itself. (For IEEE
8475 floating-point, we can only do some of these simplifications.) */
8476 if (operand_equal_p (arg0, arg1, 0))
8478 switch (code)
8480 case EQ_EXPR:
8481 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8482 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8483 return constant_boolean_node (1, type);
8484 break;
8486 case GE_EXPR:
8487 case LE_EXPR:
8488 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8489 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8490 return constant_boolean_node (1, type);
8491 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8493 case NE_EXPR:
8494 /* For NE, we can only do this simplification if integer
8495 or we don't honor IEEE floating point NaNs. */
8496 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8497 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8498 break;
8499 /* ... fall through ... */
8500 case GT_EXPR:
8501 case LT_EXPR:
8502 return constant_boolean_node (0, type);
8503 default:
8504 abort ();
8508 /* If we are comparing an expression that just has comparisons
8509 of two integer values, arithmetic expressions of those comparisons,
8510 and constants, we can simplify it. There are only three cases
8511 to check: the two values can either be equal, the first can be
8512 greater, or the second can be greater. Fold the expression for
8513 those three values. Since each value must be 0 or 1, we have
8514 eight possibilities, each of which corresponds to the constant 0
8515 or 1 or one of the six possible comparisons.
8517 This handles common cases like (a > b) == 0 but also handles
8518 expressions like ((x > y) - (y > x)) > 0, which supposedly
8519 occur in macroized code. */
8521 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8523 tree cval1 = 0, cval2 = 0;
8524 int save_p = 0;
8526 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8527 /* Don't handle degenerate cases here; they should already
8528 have been handled anyway. */
8529 && cval1 != 0 && cval2 != 0
8530 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8531 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8532 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8533 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8534 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8535 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8536 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8538 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8539 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8541 /* We can't just pass T to eval_subst in case cval1 or cval2
8542 was the same as ARG1. */
8544 tree high_result
8545 = fold (build2 (code, type,
8546 eval_subst (arg0, cval1, maxval,
8547 cval2, minval),
8548 arg1));
8549 tree equal_result
8550 = fold (build2 (code, type,
8551 eval_subst (arg0, cval1, maxval,
8552 cval2, maxval),
8553 arg1));
8554 tree low_result
8555 = fold (build2 (code, type,
8556 eval_subst (arg0, cval1, minval,
8557 cval2, maxval),
8558 arg1));
8560 /* All three of these results should be 0 or 1. Confirm they
8561 are. Then use those values to select the proper code
8562 to use. */
8564 if ((integer_zerop (high_result)
8565 || integer_onep (high_result))
8566 && (integer_zerop (equal_result)
8567 || integer_onep (equal_result))
8568 && (integer_zerop (low_result)
8569 || integer_onep (low_result)))
8571 /* Make a 3-bit mask with the high-order bit being the
8572 value for `>', the next for '=', and the low for '<'. */
8573 switch ((integer_onep (high_result) * 4)
8574 + (integer_onep (equal_result) * 2)
8575 + integer_onep (low_result))
8577 case 0:
8578 /* Always false. */
8579 return omit_one_operand (type, integer_zero_node, arg0);
8580 case 1:
8581 code = LT_EXPR;
8582 break;
8583 case 2:
8584 code = EQ_EXPR;
8585 break;
8586 case 3:
8587 code = LE_EXPR;
8588 break;
8589 case 4:
8590 code = GT_EXPR;
8591 break;
8592 case 5:
8593 code = NE_EXPR;
8594 break;
8595 case 6:
8596 code = GE_EXPR;
8597 break;
8598 case 7:
8599 /* Always true. */
8600 return omit_one_operand (type, integer_one_node, arg0);
8603 tem = build2 (code, type, cval1, cval2);
8604 if (save_p)
8605 return save_expr (tem);
8606 else
8607 return fold (tem);
8612 /* If this is a comparison of a field, we may be able to simplify it. */
8613 if (((TREE_CODE (arg0) == COMPONENT_REF
8614 && lang_hooks.can_use_bit_fields_p ())
8615 || TREE_CODE (arg0) == BIT_FIELD_REF)
8616 && (code == EQ_EXPR || code == NE_EXPR)
8617 /* Handle the constant case even without -O
8618 to make sure the warnings are given. */
8619 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8621 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8622 if (t1)
8623 return t1;
8626 /* If this is a comparison of complex values and either or both sides
8627 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8628 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8629 This may prevent needless evaluations. */
8630 if ((code == EQ_EXPR || code == NE_EXPR)
8631 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8632 && (TREE_CODE (arg0) == COMPLEX_EXPR
8633 || TREE_CODE (arg1) == COMPLEX_EXPR
8634 || TREE_CODE (arg0) == COMPLEX_CST
8635 || TREE_CODE (arg1) == COMPLEX_CST))
8637 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8638 tree real0, imag0, real1, imag1;
8640 arg0 = save_expr (arg0);
8641 arg1 = save_expr (arg1);
8642 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8643 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8644 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8645 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8647 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8648 : TRUTH_ORIF_EXPR),
8649 type,
8650 fold (build2 (code, type, real0, real1)),
8651 fold (build2 (code, type, imag0, imag1))));
8654 /* Optimize comparisons of strlen vs zero to a compare of the
8655 first character of the string vs zero. To wit,
8656 strlen(ptr) == 0 => *ptr == 0
8657 strlen(ptr) != 0 => *ptr != 0
8658 Other cases should reduce to one of these two (or a constant)
8659 due to the return value of strlen being unsigned. */
8660 if ((code == EQ_EXPR || code == NE_EXPR)
8661 && integer_zerop (arg1)
8662 && TREE_CODE (arg0) == CALL_EXPR)
8664 tree fndecl = get_callee_fndecl (arg0);
8665 tree arglist;
8667 if (fndecl
8668 && DECL_BUILT_IN (fndecl)
8669 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8670 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8671 && (arglist = TREE_OPERAND (arg0, 1))
8672 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8673 && ! TREE_CHAIN (arglist))
8674 return fold (build2 (code, type,
8675 build1 (INDIRECT_REF, char_type_node,
8676 TREE_VALUE (arglist)),
8677 fold_convert (char_type_node,
8678 integer_zero_node)));
8681 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8682 into a single range test. */
8683 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8684 && TREE_CODE (arg1) == INTEGER_CST
8685 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8686 && !integer_zerop (TREE_OPERAND (arg0, 1))
8687 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8688 && !TREE_OVERFLOW (arg1))
8690 t1 = fold_div_compare (code, type, arg0, arg1);
8691 if (t1 != NULL_TREE)
8692 return t1;
8695 if ((code == EQ_EXPR || code == NE_EXPR)
8696 && !TREE_SIDE_EFFECTS (arg0)
8697 && integer_zerop (arg1)
8698 && tree_expr_nonzero_p (arg0))
8699 return constant_boolean_node (code==NE_EXPR, type);
8701 t1 = fold_relational_const (code, type, arg0, arg1);
8702 return t1 == NULL_TREE ? t : t1;
8704 case UNORDERED_EXPR:
8705 case ORDERED_EXPR:
8706 case UNLT_EXPR:
8707 case UNLE_EXPR:
8708 case UNGT_EXPR:
8709 case UNGE_EXPR:
8710 case UNEQ_EXPR:
8711 case LTGT_EXPR:
8712 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8714 t1 = fold_relational_const (code, type, arg0, arg1);
8715 if (t1 != NULL_TREE)
8716 return t1;
8719 /* If the first operand is NaN, the result is constant. */
8720 if (TREE_CODE (arg0) == REAL_CST
8721 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8722 && (code != LTGT_EXPR || ! flag_trapping_math))
8724 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8725 ? integer_zero_node
8726 : integer_one_node;
8727 return omit_one_operand (type, t1, arg1);
8730 /* If the second operand is NaN, the result is constant. */
8731 if (TREE_CODE (arg1) == REAL_CST
8732 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
8733 && (code != LTGT_EXPR || ! flag_trapping_math))
8735 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8736 ? integer_zero_node
8737 : integer_one_node;
8738 return omit_one_operand (type, t1, arg0);
8741 /* Simplify unordered comparison of something with itself. */
8742 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
8743 && operand_equal_p (arg0, arg1, 0))
8744 return constant_boolean_node (1, type);
8746 if (code == LTGT_EXPR
8747 && !flag_trapping_math
8748 && operand_equal_p (arg0, arg1, 0))
8749 return constant_boolean_node (0, type);
8751 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8753 tree targ0 = strip_float_extensions (arg0);
8754 tree targ1 = strip_float_extensions (arg1);
8755 tree newtype = TREE_TYPE (targ0);
8757 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8758 newtype = TREE_TYPE (targ1);
8760 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8761 return fold (build2 (code, type, fold_convert (newtype, targ0),
8762 fold_convert (newtype, targ1)));
8765 return t;
8767 case COND_EXPR:
8768 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8769 so all simple results must be passed through pedantic_non_lvalue. */
8770 if (TREE_CODE (arg0) == INTEGER_CST)
8772 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8773 /* Only optimize constant conditions when the selected branch
8774 has the same type as the COND_EXPR. This avoids optimizing
8775 away "c ? x : throw", where the throw has a void type. */
8776 if (! VOID_TYPE_P (TREE_TYPE (tem))
8777 || VOID_TYPE_P (type))
8778 return pedantic_non_lvalue (tem);
8779 return t;
8781 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
8782 return pedantic_omit_one_operand (type, arg1, arg0);
8784 /* If we have A op B ? A : C, we may be able to convert this to a
8785 simpler expression, depending on the operation and the values
8786 of B and C. Signed zeros prevent all of these transformations,
8787 for reasons given above each one.
8789 Also try swapping the arguments and inverting the conditional. */
8790 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8791 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8792 arg1, TREE_OPERAND (arg0, 1))
8793 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8795 tem = fold_cond_expr_with_comparison (type, arg0,
8796 TREE_OPERAND (t, 1),
8797 TREE_OPERAND (t, 2));
8798 if (tem)
8799 return tem;
8802 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8803 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8804 TREE_OPERAND (t, 2),
8805 TREE_OPERAND (arg0, 1))
8806 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
8808 tem = invert_truthvalue (arg0);
8809 if (TREE_CODE_CLASS (TREE_CODE (tem)) == '<')
8811 tem = fold_cond_expr_with_comparison (type, tem,
8812 TREE_OPERAND (t, 2),
8813 TREE_OPERAND (t, 1));
8814 if (tem)
8815 return tem;
8819 /* If the second operand is simpler than the third, swap them
8820 since that produces better jump optimization results. */
8821 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8822 TREE_OPERAND (t, 2), false))
8824 /* See if this can be inverted. If it can't, possibly because
8825 it was a floating-point inequality comparison, don't do
8826 anything. */
8827 tem = invert_truthvalue (arg0);
8829 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8830 return fold (build3 (code, type, tem,
8831 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8834 /* Convert A ? 1 : 0 to simply A. */
8835 if (integer_onep (TREE_OPERAND (t, 1))
8836 && integer_zerop (TREE_OPERAND (t, 2))
8837 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8838 call to fold will try to move the conversion inside
8839 a COND, which will recurse. In that case, the COND_EXPR
8840 is probably the best choice, so leave it alone. */
8841 && type == TREE_TYPE (arg0))
8842 return pedantic_non_lvalue (arg0);
8844 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8845 over COND_EXPR in cases such as floating point comparisons. */
8846 if (integer_zerop (TREE_OPERAND (t, 1))
8847 && integer_onep (TREE_OPERAND (t, 2))
8848 && truth_value_p (TREE_CODE (arg0)))
8849 return pedantic_non_lvalue (fold_convert (type,
8850 invert_truthvalue (arg0)));
8852 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
8853 if (TREE_CODE (arg0) == LT_EXPR
8854 && integer_zerop (TREE_OPERAND (arg0, 1))
8855 && integer_zerop (TREE_OPERAND (t, 2))
8856 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
8857 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
8858 TREE_TYPE (tem), tem, arg1)));
8860 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
8861 already handled above. */
8862 if (TREE_CODE (arg0) == BIT_AND_EXPR
8863 && integer_onep (TREE_OPERAND (arg0, 1))
8864 && integer_zerop (TREE_OPERAND (t, 2))
8865 && integer_pow2p (arg1))
8867 tree tem = TREE_OPERAND (arg0, 0);
8868 STRIP_NOPS (tem);
8869 if (TREE_CODE (tem) == RSHIFT_EXPR
8870 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
8871 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
8872 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
8873 return fold (build2 (BIT_AND_EXPR, type,
8874 TREE_OPERAND (tem, 0), arg1));
8877 /* A & N ? N : 0 is simply A & N if N is a power of two. This
8878 is probably obsolete because the first operand should be a
8879 truth value (that's why we have the two cases above), but let's
8880 leave it in until we can confirm this for all front-ends. */
8881 if (integer_zerop (TREE_OPERAND (t, 2))
8882 && TREE_CODE (arg0) == NE_EXPR
8883 && integer_zerop (TREE_OPERAND (arg0, 1))
8884 && integer_pow2p (arg1)
8885 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8886 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8887 arg1, OEP_ONLY_CONST))
8888 return pedantic_non_lvalue (fold_convert (type,
8889 TREE_OPERAND (arg0, 0)));
8891 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8892 if (integer_zerop (TREE_OPERAND (t, 2))
8893 && truth_value_p (TREE_CODE (arg0))
8894 && truth_value_p (TREE_CODE (arg1)))
8895 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
8897 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8898 if (integer_onep (TREE_OPERAND (t, 2))
8899 && truth_value_p (TREE_CODE (arg0))
8900 && truth_value_p (TREE_CODE (arg1)))
8902 /* Only perform transformation if ARG0 is easily inverted. */
8903 tem = invert_truthvalue (arg0);
8904 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8905 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
8908 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
8909 if (integer_zerop (arg1)
8910 && truth_value_p (TREE_CODE (arg0))
8911 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8913 /* Only perform transformation if ARG0 is easily inverted. */
8914 tem = invert_truthvalue (arg0);
8915 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8916 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
8917 TREE_OPERAND (t, 2)));
8920 /* Convert A ? 1 : B into A || B if A and B are truth values. */
8921 if (integer_onep (arg1)
8922 && truth_value_p (TREE_CODE (arg0))
8923 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8924 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
8925 TREE_OPERAND (t, 2)));
8927 return t;
8929 case COMPOUND_EXPR:
8930 /* When pedantic, a compound expression can be neither an lvalue
8931 nor an integer constant expression. */
8932 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8933 return t;
8934 /* Don't let (0, 0) be null pointer constant. */
8935 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8936 : fold_convert (type, arg1);
8937 return pedantic_non_lvalue (tem);
8939 case COMPLEX_EXPR:
8940 if (wins)
8941 return build_complex (type, arg0, arg1);
8942 return t;
8944 case REALPART_EXPR:
8945 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8946 return t;
8947 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8948 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8949 TREE_OPERAND (arg0, 1));
8950 else if (TREE_CODE (arg0) == COMPLEX_CST)
8951 return TREE_REALPART (arg0);
8952 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8953 return fold (build2 (TREE_CODE (arg0), type,
8954 fold (build1 (REALPART_EXPR, type,
8955 TREE_OPERAND (arg0, 0))),
8956 fold (build1 (REALPART_EXPR, type,
8957 TREE_OPERAND (arg0, 1)))));
8958 return t;
8960 case IMAGPART_EXPR:
8961 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8962 return fold_convert (type, integer_zero_node);
8963 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8964 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8965 TREE_OPERAND (arg0, 0));
8966 else if (TREE_CODE (arg0) == COMPLEX_CST)
8967 return TREE_IMAGPART (arg0);
8968 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8969 return fold (build2 (TREE_CODE (arg0), type,
8970 fold (build1 (IMAGPART_EXPR, type,
8971 TREE_OPERAND (arg0, 0))),
8972 fold (build1 (IMAGPART_EXPR, type,
8973 TREE_OPERAND (arg0, 1)))));
8974 return t;
8976 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8977 appropriate. */
8978 case CLEANUP_POINT_EXPR:
8979 if (! has_cleanups (arg0))
8980 return TREE_OPERAND (t, 0);
8983 enum tree_code code0 = TREE_CODE (arg0);
8984 int kind0 = TREE_CODE_CLASS (code0);
8985 tree arg00 = TREE_OPERAND (arg0, 0);
8986 tree arg01;
8988 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8989 return fold (build1 (code0, type,
8990 fold (build1 (CLEANUP_POINT_EXPR,
8991 TREE_TYPE (arg00), arg00))));
8993 if (kind0 == '<' || kind0 == '2'
8994 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8995 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8996 || code0 == TRUTH_XOR_EXPR)
8998 arg01 = TREE_OPERAND (arg0, 1);
9000 if (TREE_CONSTANT (arg00)
9001 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
9002 && ! has_cleanups (arg00)))
9003 return fold (build2 (code0, type, arg00,
9004 fold (build1 (CLEANUP_POINT_EXPR,
9005 TREE_TYPE (arg01), arg01))));
9007 if (TREE_CONSTANT (arg01))
9008 return fold (build2 (code0, type,
9009 fold (build1 (CLEANUP_POINT_EXPR,
9010 TREE_TYPE (arg00), arg00)),
9011 arg01));
9014 return t;
9017 case CALL_EXPR:
9018 /* Check for a built-in function. */
9019 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
9020 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
9021 == FUNCTION_DECL)
9022 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
9024 tree tmp = fold_builtin (t, false);
9025 if (tmp)
9026 return tmp;
9028 return t;
9030 default:
9031 return t;
9032 } /* switch (code) */
9035 #ifdef ENABLE_FOLD_CHECKING
9036 #undef fold
9038 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
9039 static void fold_check_failed (tree, tree);
9040 void print_fold_checksum (tree);
9042 /* When --enable-checking=fold, compute a digest of expr before
9043 and after actual fold call to see if fold did not accidentally
9044 change original expr. */
9046 tree
9047 fold (tree expr)
9049 tree ret;
9050 struct md5_ctx ctx;
9051 unsigned char checksum_before[16], checksum_after[16];
9052 htab_t ht;
9054 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9055 md5_init_ctx (&ctx);
9056 fold_checksum_tree (expr, &ctx, ht);
9057 md5_finish_ctx (&ctx, checksum_before);
9058 htab_empty (ht);
9060 ret = fold_1 (expr);
9062 md5_init_ctx (&ctx);
9063 fold_checksum_tree (expr, &ctx, ht);
9064 md5_finish_ctx (&ctx, checksum_after);
9065 htab_delete (ht);
9067 if (memcmp (checksum_before, checksum_after, 16))
9068 fold_check_failed (expr, ret);
9070 return ret;
9073 void
9074 print_fold_checksum (tree expr)
9076 struct md5_ctx ctx;
9077 unsigned char checksum[16], cnt;
9078 htab_t ht;
9080 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9081 md5_init_ctx (&ctx);
9082 fold_checksum_tree (expr, &ctx, ht);
9083 md5_finish_ctx (&ctx, checksum);
9084 htab_delete (ht);
9085 for (cnt = 0; cnt < 16; ++cnt)
9086 fprintf (stderr, "%02x", checksum[cnt]);
9087 putc ('\n', stderr);
9090 static void
9091 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9093 internal_error ("fold check: original tree changed by fold");
9096 static void
9097 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9099 void **slot;
9100 enum tree_code code;
9101 char buf[sizeof (struct tree_decl)];
9102 int i, len;
9104 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
9105 > sizeof (struct tree_decl)
9106 || sizeof (struct tree_type) > sizeof (struct tree_decl))
9107 abort ();
9108 if (expr == NULL)
9109 return;
9110 slot = htab_find_slot (ht, expr, INSERT);
9111 if (*slot != NULL)
9112 return;
9113 *slot = expr;
9114 code = TREE_CODE (expr);
9115 if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
9117 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9118 memcpy (buf, expr, tree_size (expr));
9119 expr = (tree) buf;
9120 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9122 else if (TREE_CODE_CLASS (code) == 't'
9123 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
9125 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
9126 memcpy (buf, expr, tree_size (expr));
9127 expr = (tree) buf;
9128 TYPE_POINTER_TO (expr) = NULL;
9129 TYPE_REFERENCE_TO (expr) = NULL;
9131 md5_process_bytes (expr, tree_size (expr), ctx);
9132 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9133 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
9134 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9135 switch (TREE_CODE_CLASS (code))
9137 case 'c':
9138 switch (code)
9140 case STRING_CST:
9141 md5_process_bytes (TREE_STRING_POINTER (expr),
9142 TREE_STRING_LENGTH (expr), ctx);
9143 break;
9144 case COMPLEX_CST:
9145 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9146 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9147 break;
9148 case VECTOR_CST:
9149 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9150 break;
9151 default:
9152 break;
9154 break;
9155 case 'x':
9156 switch (code)
9158 case TREE_LIST:
9159 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9160 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9161 break;
9162 case TREE_VEC:
9163 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9164 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9165 break;
9166 default:
9167 break;
9169 break;
9170 case 'e':
9171 case 'r':
9172 case '<':
9173 case '1':
9174 case '2':
9175 case 's':
9176 len = first_rtl_op (code);
9177 for (i = 0; i < len; ++i)
9178 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9179 break;
9180 case 'd':
9181 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9182 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9183 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9184 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9185 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9186 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9187 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9188 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9189 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9190 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9191 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9192 break;
9193 case 't':
9194 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9195 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9196 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9197 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9198 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9199 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9200 if (INTEGRAL_TYPE_P (expr)
9201 || SCALAR_FLOAT_TYPE_P (expr))
9203 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9204 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9206 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9207 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9208 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9209 break;
9210 default:
9211 break;
9215 #endif
9217 /* Perform constant folding and related simplification of initializer
9218 expression EXPR. This behaves identically to "fold" but ignores
9219 potential run-time traps and exceptions that fold must preserve. */
9221 tree
9222 fold_initializer (tree expr)
9224 int saved_signaling_nans = flag_signaling_nans;
9225 int saved_trapping_math = flag_trapping_math;
9226 int saved_trapv = flag_trapv;
9227 tree result;
9229 flag_signaling_nans = 0;
9230 flag_trapping_math = 0;
9231 flag_trapv = 0;
9233 result = fold (expr);
9235 flag_signaling_nans = saved_signaling_nans;
9236 flag_trapping_math = saved_trapping_math;
9237 flag_trapv = saved_trapv;
9239 return result;
9242 /* Determine if first argument is a multiple of second argument. Return 0 if
9243 it is not, or we cannot easily determined it to be.
9245 An example of the sort of thing we care about (at this point; this routine
9246 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9247 fold cases do now) is discovering that
9249 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9251 is a multiple of
9253 SAVE_EXPR (J * 8)
9255 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9257 This code also handles discovering that
9259 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9261 is a multiple of 8 so we don't have to worry about dealing with a
9262 possible remainder.
9264 Note that we *look* inside a SAVE_EXPR only to determine how it was
9265 calculated; it is not safe for fold to do much of anything else with the
9266 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9267 at run time. For example, the latter example above *cannot* be implemented
9268 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9269 evaluation time of the original SAVE_EXPR is not necessarily the same at
9270 the time the new expression is evaluated. The only optimization of this
9271 sort that would be valid is changing
9273 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9275 divided by 8 to
9277 SAVE_EXPR (I) * SAVE_EXPR (J)
9279 (where the same SAVE_EXPR (J) is used in the original and the
9280 transformed version). */
9282 static int
9283 multiple_of_p (tree type, tree top, tree bottom)
9285 if (operand_equal_p (top, bottom, 0))
9286 return 1;
9288 if (TREE_CODE (type) != INTEGER_TYPE)
9289 return 0;
9291 switch (TREE_CODE (top))
9293 case MULT_EXPR:
9294 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9295 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9297 case PLUS_EXPR:
9298 case MINUS_EXPR:
9299 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9300 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9302 case LSHIFT_EXPR:
9303 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9305 tree op1, t1;
9307 op1 = TREE_OPERAND (top, 1);
9308 /* const_binop may not detect overflow correctly,
9309 so check for it explicitly here. */
9310 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9311 > TREE_INT_CST_LOW (op1)
9312 && TREE_INT_CST_HIGH (op1) == 0
9313 && 0 != (t1 = fold_convert (type,
9314 const_binop (LSHIFT_EXPR,
9315 size_one_node,
9316 op1, 0)))
9317 && ! TREE_OVERFLOW (t1))
9318 return multiple_of_p (type, t1, bottom);
9320 return 0;
9322 case NOP_EXPR:
9323 /* Can't handle conversions from non-integral or wider integral type. */
9324 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9325 || (TYPE_PRECISION (type)
9326 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9327 return 0;
9329 /* .. fall through ... */
9331 case SAVE_EXPR:
9332 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9334 case INTEGER_CST:
9335 if (TREE_CODE (bottom) != INTEGER_CST
9336 || (TYPE_UNSIGNED (type)
9337 && (tree_int_cst_sgn (top) < 0
9338 || tree_int_cst_sgn (bottom) < 0)))
9339 return 0;
9340 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9341 top, bottom, 0));
9343 default:
9344 return 0;
9348 /* Return true if `t' is known to be non-negative. */
9351 tree_expr_nonnegative_p (tree t)
9353 switch (TREE_CODE (t))
9355 case ABS_EXPR:
9356 return 1;
9358 case INTEGER_CST:
9359 return tree_int_cst_sgn (t) >= 0;
9361 case REAL_CST:
9362 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9364 case PLUS_EXPR:
9365 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9366 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9367 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9369 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9370 both unsigned and at least 2 bits shorter than the result. */
9371 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9372 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9373 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9375 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9376 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9377 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9378 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9380 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9381 TYPE_PRECISION (inner2)) + 1;
9382 return prec < TYPE_PRECISION (TREE_TYPE (t));
9385 break;
9387 case MULT_EXPR:
9388 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9390 /* x * x for floating point x is always non-negative. */
9391 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9392 return 1;
9393 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9394 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9397 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9398 both unsigned and their total bits is shorter than the result. */
9399 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9400 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9401 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9403 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9404 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9405 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9406 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9407 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9408 < TYPE_PRECISION (TREE_TYPE (t));
9410 return 0;
9412 case TRUNC_DIV_EXPR:
9413 case CEIL_DIV_EXPR:
9414 case FLOOR_DIV_EXPR:
9415 case ROUND_DIV_EXPR:
9416 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9417 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9419 case TRUNC_MOD_EXPR:
9420 case CEIL_MOD_EXPR:
9421 case FLOOR_MOD_EXPR:
9422 case ROUND_MOD_EXPR:
9423 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9425 case RDIV_EXPR:
9426 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9427 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9429 case BIT_AND_EXPR:
9430 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9431 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9432 case BIT_IOR_EXPR:
9433 case BIT_XOR_EXPR:
9434 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9435 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9437 case NOP_EXPR:
9439 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9440 tree outer_type = TREE_TYPE (t);
9442 if (TREE_CODE (outer_type) == REAL_TYPE)
9444 if (TREE_CODE (inner_type) == REAL_TYPE)
9445 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9446 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9448 if (TYPE_UNSIGNED (inner_type))
9449 return 1;
9450 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9453 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9455 if (TREE_CODE (inner_type) == REAL_TYPE)
9456 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9457 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9458 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9459 && TYPE_UNSIGNED (inner_type);
9462 break;
9464 case COND_EXPR:
9465 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9466 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9467 case COMPOUND_EXPR:
9468 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9469 case MIN_EXPR:
9470 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9471 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9472 case MAX_EXPR:
9473 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9474 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9475 case MODIFY_EXPR:
9476 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9477 case BIND_EXPR:
9478 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9479 case SAVE_EXPR:
9480 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9481 case NON_LVALUE_EXPR:
9482 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9483 case FLOAT_EXPR:
9484 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9486 case TARGET_EXPR:
9488 tree temp = TARGET_EXPR_SLOT (t);
9489 t = TARGET_EXPR_INITIAL (t);
9491 /* If the initializer is non-void, then it's a normal expression
9492 that will be assigned to the slot. */
9493 if (!VOID_TYPE_P (t))
9494 return tree_expr_nonnegative_p (t);
9496 /* Otherwise, the initializer sets the slot in some way. One common
9497 way is an assignment statement at the end of the initializer. */
9498 while (1)
9500 if (TREE_CODE (t) == BIND_EXPR)
9501 t = expr_last (BIND_EXPR_BODY (t));
9502 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9503 || TREE_CODE (t) == TRY_CATCH_EXPR)
9504 t = expr_last (TREE_OPERAND (t, 0));
9505 else if (TREE_CODE (t) == STATEMENT_LIST)
9506 t = expr_last (t);
9507 else
9508 break;
9510 if (TREE_CODE (t) == MODIFY_EXPR
9511 && TREE_OPERAND (t, 0) == temp)
9512 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9514 return 0;
9517 case CALL_EXPR:
9519 tree fndecl = get_callee_fndecl (t);
9520 tree arglist = TREE_OPERAND (t, 1);
9521 if (fndecl
9522 && DECL_BUILT_IN (fndecl)
9523 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9524 switch (DECL_FUNCTION_CODE (fndecl))
9526 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9527 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9528 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9529 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9531 CASE_BUILTIN_F (BUILT_IN_ACOS)
9532 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9533 CASE_BUILTIN_F (BUILT_IN_CABS)
9534 CASE_BUILTIN_F (BUILT_IN_COSH)
9535 CASE_BUILTIN_F (BUILT_IN_ERFC)
9536 CASE_BUILTIN_F (BUILT_IN_EXP)
9537 CASE_BUILTIN_F (BUILT_IN_EXP10)
9538 CASE_BUILTIN_F (BUILT_IN_EXP2)
9539 CASE_BUILTIN_F (BUILT_IN_FABS)
9540 CASE_BUILTIN_F (BUILT_IN_FDIM)
9541 CASE_BUILTIN_F (BUILT_IN_FREXP)
9542 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9543 CASE_BUILTIN_F (BUILT_IN_POW10)
9544 CASE_BUILTIN_I (BUILT_IN_FFS)
9545 CASE_BUILTIN_I (BUILT_IN_PARITY)
9546 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9547 /* Always true. */
9548 return 1;
9550 CASE_BUILTIN_F (BUILT_IN_SQRT)
9551 /* sqrt(-0.0) is -0.0. */
9552 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9553 return 1;
9554 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9556 CASE_BUILTIN_F (BUILT_IN_ASINH)
9557 CASE_BUILTIN_F (BUILT_IN_ATAN)
9558 CASE_BUILTIN_F (BUILT_IN_ATANH)
9559 CASE_BUILTIN_F (BUILT_IN_CBRT)
9560 CASE_BUILTIN_F (BUILT_IN_CEIL)
9561 CASE_BUILTIN_F (BUILT_IN_ERF)
9562 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9563 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9564 CASE_BUILTIN_F (BUILT_IN_FMOD)
9565 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9566 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9567 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9568 CASE_BUILTIN_F (BUILT_IN_LRINT)
9569 CASE_BUILTIN_F (BUILT_IN_LROUND)
9570 CASE_BUILTIN_F (BUILT_IN_MODF)
9571 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9572 CASE_BUILTIN_F (BUILT_IN_POW)
9573 CASE_BUILTIN_F (BUILT_IN_RINT)
9574 CASE_BUILTIN_F (BUILT_IN_ROUND)
9575 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9576 CASE_BUILTIN_F (BUILT_IN_SINH)
9577 CASE_BUILTIN_F (BUILT_IN_TANH)
9578 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9579 /* True if the 1st argument is nonnegative. */
9580 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9582 CASE_BUILTIN_F (BUILT_IN_FMAX)
9583 /* True if the 1st OR 2nd arguments are nonnegative. */
9584 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9585 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9587 CASE_BUILTIN_F (BUILT_IN_FMIN)
9588 /* True if the 1st AND 2nd arguments are nonnegative. */
9589 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9590 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9592 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9593 /* True if the 2nd argument is nonnegative. */
9594 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9596 default:
9597 break;
9598 #undef CASE_BUILTIN_F
9599 #undef CASE_BUILTIN_I
9603 /* ... fall through ... */
9605 default:
9606 if (truth_value_p (TREE_CODE (t)))
9607 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9608 return 1;
9611 /* We don't know sign of `t', so be conservative and return false. */
9612 return 0;
9615 /* Return true when T is an address and is known to be nonzero.
9616 For floating point we further ensure that T is not denormal.
9617 Similar logic is present in nonzero_address in rtlanal.h */
9619 static bool
9620 tree_expr_nonzero_p (tree t)
9622 tree type = TREE_TYPE (t);
9624 /* Doing something useful for floating point would need more work. */
9625 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9626 return false;
9628 switch (TREE_CODE (t))
9630 case ABS_EXPR:
9631 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9632 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9634 case INTEGER_CST:
9635 /* We used to test for !integer_zerop here. This does not work correctly
9636 if TREE_CONSTANT_OVERFLOW (t). */
9637 return (TREE_INT_CST_LOW (t) != 0
9638 || TREE_INT_CST_HIGH (t) != 0);
9640 case PLUS_EXPR:
9641 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9643 /* With the presence of negative values it is hard
9644 to say something. */
9645 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9646 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9647 return false;
9648 /* One of operands must be positive and the other non-negative. */
9649 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9650 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9652 break;
9654 case MULT_EXPR:
9655 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9657 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9658 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9660 break;
9662 case NOP_EXPR:
9664 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9665 tree outer_type = TREE_TYPE (t);
9667 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9668 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9670 break;
9672 case ADDR_EXPR:
9673 /* Weak declarations may link to NULL. */
9674 if (DECL_P (TREE_OPERAND (t, 0)))
9675 return !DECL_WEAK (TREE_OPERAND (t, 0));
9676 /* Constants and all other cases are never weak. */
9677 return true;
9679 case COND_EXPR:
9680 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9681 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9683 case MIN_EXPR:
9684 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9685 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9687 case MAX_EXPR:
9688 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9690 /* When both operands are nonzero, then MAX must be too. */
9691 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9692 return true;
9694 /* MAX where operand 0 is positive is positive. */
9695 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9697 /* MAX where operand 1 is positive is positive. */
9698 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9699 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9700 return true;
9701 break;
9703 case COMPOUND_EXPR:
9704 case MODIFY_EXPR:
9705 case BIND_EXPR:
9706 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9708 case SAVE_EXPR:
9709 case NON_LVALUE_EXPR:
9710 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9712 case BIT_IOR_EXPR:
9713 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9714 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9716 default:
9717 break;
9719 return false;
9722 /* See if we are applying CODE, a relational to the highest or lowest
9723 possible integer of TYPE. If so, then the result is a compile
9724 time constant. */
9726 static tree
9727 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9728 tree *op1_p)
9730 tree op0 = *op0_p;
9731 tree op1 = *op1_p;
9732 enum tree_code code = *code_p;
9733 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9735 if (TREE_CODE (op1) == INTEGER_CST
9736 && ! TREE_CONSTANT_OVERFLOW (op1)
9737 && width <= HOST_BITS_PER_WIDE_INT
9738 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9739 || POINTER_TYPE_P (TREE_TYPE (op1))))
9741 unsigned HOST_WIDE_INT signed_max;
9742 unsigned HOST_WIDE_INT max, min;
9744 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
9746 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
9748 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9749 min = 0;
9751 else
9753 max = signed_max;
9754 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9757 if (TREE_INT_CST_HIGH (op1) == 0
9758 && TREE_INT_CST_LOW (op1) == max)
9759 switch (code)
9761 case GT_EXPR:
9762 return omit_one_operand (type, integer_zero_node, op0);
9764 case GE_EXPR:
9765 *code_p = EQ_EXPR;
9766 break;
9767 case LE_EXPR:
9768 return omit_one_operand (type, integer_one_node, op0);
9770 case LT_EXPR:
9771 *code_p = NE_EXPR;
9772 break;
9774 /* The GE_EXPR and LT_EXPR cases above are not normally
9775 reached because of previous transformations. */
9777 default:
9778 break;
9780 else if (TREE_INT_CST_HIGH (op1) == 0
9781 && TREE_INT_CST_LOW (op1) == max - 1)
9782 switch (code)
9784 case GT_EXPR:
9785 *code_p = EQ_EXPR;
9786 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9787 break;
9788 case LE_EXPR:
9789 *code_p = NE_EXPR;
9790 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9791 break;
9792 default:
9793 break;
9795 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9796 && TREE_INT_CST_LOW (op1) == min)
9797 switch (code)
9799 case LT_EXPR:
9800 return omit_one_operand (type, integer_zero_node, op0);
9802 case LE_EXPR:
9803 *code_p = EQ_EXPR;
9804 break;
9806 case GE_EXPR:
9807 return omit_one_operand (type, integer_one_node, op0);
9809 case GT_EXPR:
9810 *code_p = NE_EXPR;
9811 break;
9813 default:
9814 break;
9816 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9817 && TREE_INT_CST_LOW (op1) == min + 1)
9818 switch (code)
9820 case GE_EXPR:
9821 *code_p = NE_EXPR;
9822 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9823 break;
9824 case LT_EXPR:
9825 *code_p = EQ_EXPR;
9826 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9827 break;
9828 default:
9829 break;
9832 else if (TREE_INT_CST_HIGH (op1) == 0
9833 && TREE_INT_CST_LOW (op1) == signed_max
9834 && TYPE_UNSIGNED (TREE_TYPE (op1))
9835 /* signed_type does not work on pointer types. */
9836 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
9838 /* The following case also applies to X < signed_max+1
9839 and X >= signed_max+1 because previous transformations. */
9840 if (code == LE_EXPR || code == GT_EXPR)
9842 tree st0, st1, exp, retval;
9843 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
9844 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
9846 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9847 type,
9848 fold_convert (st0, op0),
9849 fold_convert (st1, integer_zero_node));
9851 retval
9852 = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
9853 TREE_TYPE (exp),
9854 TREE_OPERAND (exp, 0),
9855 TREE_OPERAND (exp, 1));
9857 /* If we are in gimple form, then returning EXP would create
9858 non-gimple expressions. Clearing it is safe and insures
9859 we do not allow a non-gimple expression to escape. */
9860 if (in_gimple_form)
9861 exp = NULL;
9863 return (retval ? retval : exp);
9868 return NULL_TREE;
9872 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9873 attempt to fold the expression to a constant without modifying TYPE,
9874 OP0 or OP1.
9876 If the expression could be simplified to a constant, then return
9877 the constant. If the expression would not be simplified to a
9878 constant, then return NULL_TREE.
9880 Note this is primarily designed to be called after gimplification
9881 of the tree structures and when at least one operand is a constant.
9882 As a result of those simplifying assumptions this routine is far
9883 simpler than the generic fold routine. */
9885 tree
9886 nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
9887 tree op0, tree op1)
9889 int wins = 1;
9890 tree subop0;
9891 tree subop1;
9892 tree tem;
9894 /* If this is a commutative operation, and ARG0 is a constant, move it
9895 to ARG1 to reduce the number of tests below. */
9896 if (commutative_tree_code (code)
9897 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
9899 tem = op0;
9900 op0 = op1;
9901 op1 = tem;
9904 /* If either operand is a complex type, extract its real component. */
9905 if (TREE_CODE (op0) == COMPLEX_CST)
9906 subop0 = TREE_REALPART (op0);
9907 else
9908 subop0 = op0;
9910 if (TREE_CODE (op1) == COMPLEX_CST)
9911 subop1 = TREE_REALPART (op1);
9912 else
9913 subop1 = op1;
9915 /* Note if either argument is not a real or integer constant.
9916 With a few exceptions, simplification is limited to cases
9917 where both arguments are constants. */
9918 if ((TREE_CODE (subop0) != INTEGER_CST
9919 && TREE_CODE (subop0) != REAL_CST)
9920 || (TREE_CODE (subop1) != INTEGER_CST
9921 && TREE_CODE (subop1) != REAL_CST))
9922 wins = 0;
9924 switch (code)
9926 case PLUS_EXPR:
9927 /* (plus (address) (const_int)) is a constant. */
9928 if (TREE_CODE (op0) == PLUS_EXPR
9929 && TREE_CODE (op1) == INTEGER_CST
9930 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
9931 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
9932 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
9933 == ADDR_EXPR)))
9934 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9936 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
9937 const_binop (PLUS_EXPR, op1,
9938 TREE_OPERAND (op0, 1), 0));
9940 case BIT_XOR_EXPR:
9942 binary:
9943 if (!wins)
9944 return NULL_TREE;
9946 /* Both arguments are constants. Simplify. */
9947 tem = const_binop (code, op0, op1, 0);
9948 if (tem != NULL_TREE)
9950 /* The return value should always have the same type as
9951 the original expression. */
9952 if (TREE_TYPE (tem) != type)
9953 tem = fold_convert (type, tem);
9955 return tem;
9957 return NULL_TREE;
9959 case MINUS_EXPR:
9960 /* Fold &x - &x. This can happen from &x.foo - &x.
9961 This is unsafe for certain floats even in non-IEEE formats.
9962 In IEEE, it is unsafe because it does wrong for NaNs.
9963 Also note that operand_equal_p is always false if an
9964 operand is volatile. */
9965 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
9966 return fold_convert (type, integer_zero_node);
9968 goto binary;
9970 case MULT_EXPR:
9971 case BIT_AND_EXPR:
9972 /* Special case multiplication or bitwise AND where one argument
9973 is zero. */
9974 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
9975 return omit_one_operand (type, op1, op0);
9976 else
9977 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
9978 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
9979 && real_zerop (op1))
9980 return omit_one_operand (type, op1, op0);
9982 goto binary;
9984 case BIT_IOR_EXPR:
9985 /* Special case when we know the result will be all ones. */
9986 if (integer_all_onesp (op1))
9987 return omit_one_operand (type, op1, op0);
9989 goto binary;
9991 case TRUNC_DIV_EXPR:
9992 case ROUND_DIV_EXPR:
9993 case FLOOR_DIV_EXPR:
9994 case CEIL_DIV_EXPR:
9995 case EXACT_DIV_EXPR:
9996 case TRUNC_MOD_EXPR:
9997 case ROUND_MOD_EXPR:
9998 case FLOOR_MOD_EXPR:
9999 case CEIL_MOD_EXPR:
10000 case RDIV_EXPR:
10001 /* Division by zero is undefined. */
10002 if (integer_zerop (op1))
10003 return NULL_TREE;
10005 if (TREE_CODE (op1) == REAL_CST
10006 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
10007 && real_zerop (op1))
10008 return NULL_TREE;
10010 goto binary;
10012 case MIN_EXPR:
10013 if (INTEGRAL_TYPE_P (type)
10014 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10015 return omit_one_operand (type, op1, op0);
10017 goto binary;
10019 case MAX_EXPR:
10020 if (INTEGRAL_TYPE_P (type)
10021 && TYPE_MAX_VALUE (type)
10022 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10023 return omit_one_operand (type, op1, op0);
10025 goto binary;
10027 case RSHIFT_EXPR:
10028 /* Optimize -1 >> x for arithmetic right shifts. */
10029 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
10030 return omit_one_operand (type, op0, op1);
10031 /* ... fall through ... */
10033 case LSHIFT_EXPR:
10034 if (integer_zerop (op0))
10035 return omit_one_operand (type, op0, op1);
10037 /* Since negative shift count is not well-defined, don't
10038 try to compute it in the compiler. */
10039 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
10040 return NULL_TREE;
10042 goto binary;
10044 case LROTATE_EXPR:
10045 case RROTATE_EXPR:
10046 /* -1 rotated either direction by any amount is still -1. */
10047 if (integer_all_onesp (op0))
10048 return omit_one_operand (type, op0, op1);
10050 /* 0 rotated either direction by any amount is still zero. */
10051 if (integer_zerop (op0))
10052 return omit_one_operand (type, op0, op1);
10054 goto binary;
10056 case COMPLEX_EXPR:
10057 if (wins)
10058 return build_complex (type, op0, op1);
10059 return NULL_TREE;
10061 case LT_EXPR:
10062 case LE_EXPR:
10063 case GT_EXPR:
10064 case GE_EXPR:
10065 case EQ_EXPR:
10066 case NE_EXPR:
10067 /* If one arg is a real or integer constant, put it last. */
10068 if ((TREE_CODE (op0) == INTEGER_CST
10069 && TREE_CODE (op1) != INTEGER_CST)
10070 || (TREE_CODE (op0) == REAL_CST
10071 && TREE_CODE (op0) != REAL_CST))
10073 tree temp;
10075 temp = op0;
10076 op0 = op1;
10077 op1 = temp;
10078 code = swap_tree_comparison (code);
10081 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10082 This transformation affects the cases which are handled in later
10083 optimizations involving comparisons with non-negative constants. */
10084 if (TREE_CODE (op1) == INTEGER_CST
10085 && TREE_CODE (op0) != INTEGER_CST
10086 && tree_int_cst_sgn (op1) > 0)
10088 switch (code)
10090 case GE_EXPR:
10091 code = GT_EXPR;
10092 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10093 break;
10095 case LT_EXPR:
10096 code = LE_EXPR;
10097 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10098 break;
10100 default:
10101 break;
10105 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10106 if (tem)
10107 return tem;
10109 /* Fall through. */
10111 case ORDERED_EXPR:
10112 case UNORDERED_EXPR:
10113 case UNLT_EXPR:
10114 case UNLE_EXPR:
10115 case UNGT_EXPR:
10116 case UNGE_EXPR:
10117 case UNEQ_EXPR:
10118 case LTGT_EXPR:
10119 if (!wins)
10120 return NULL_TREE;
10122 return fold_relational_const (code, type, op0, op1);
10124 case RANGE_EXPR:
10125 /* This could probably be handled. */
10126 return NULL_TREE;
10128 case TRUTH_AND_EXPR:
10129 /* If second arg is constant zero, result is zero, but first arg
10130 must be evaluated. */
10131 if (integer_zerop (op1))
10132 return omit_one_operand (type, op1, op0);
10133 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10134 case will be handled here. */
10135 if (integer_zerop (op0))
10136 return omit_one_operand (type, op0, op1);
10137 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10138 return constant_boolean_node (true, type);
10139 return NULL_TREE;
10141 case TRUTH_OR_EXPR:
10142 /* If second arg is constant true, result is true, but we must
10143 evaluate first arg. */
10144 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10145 return omit_one_operand (type, op1, op0);
10146 /* Likewise for first arg, but note this only occurs here for
10147 TRUTH_OR_EXPR. */
10148 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10149 return omit_one_operand (type, op0, op1);
10150 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10151 return constant_boolean_node (false, type);
10152 return NULL_TREE;
10154 case TRUTH_XOR_EXPR:
10155 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10157 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10158 return constant_boolean_node (x, type);
10160 return NULL_TREE;
10162 default:
10163 return NULL_TREE;
10167 /* Given the components of a unary expression CODE, TYPE and OP0,
10168 attempt to fold the expression to a constant without modifying
10169 TYPE or OP0.
10171 If the expression could be simplified to a constant, then return
10172 the constant. If the expression would not be simplified to a
10173 constant, then return NULL_TREE.
10175 Note this is primarily designed to be called after gimplification
10176 of the tree structures and when op0 is a constant. As a result
10177 of those simplifying assumptions this routine is far simpler than
10178 the generic fold routine. */
10180 tree
10181 nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
10182 tree op0)
10184 /* Make sure we have a suitable constant argument. */
10185 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10187 tree subop;
10189 if (TREE_CODE (op0) == COMPLEX_CST)
10190 subop = TREE_REALPART (op0);
10191 else
10192 subop = op0;
10194 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10195 return NULL_TREE;
10198 switch (code)
10200 case NOP_EXPR:
10201 case FLOAT_EXPR:
10202 case CONVERT_EXPR:
10203 case FIX_TRUNC_EXPR:
10204 case FIX_FLOOR_EXPR:
10205 case FIX_CEIL_EXPR:
10206 return fold_convert_const (code, type, op0);
10208 case NEGATE_EXPR:
10209 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10210 return fold_negate_const (op0, type);
10211 else
10212 return NULL_TREE;
10214 case ABS_EXPR:
10215 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10216 return fold_abs_const (op0, type);
10217 else
10218 return NULL_TREE;
10220 case BIT_NOT_EXPR:
10221 if (TREE_CODE (op0) == INTEGER_CST)
10222 return fold_not_const (op0, type);
10223 else
10224 return NULL_TREE;
10226 case REALPART_EXPR:
10227 if (TREE_CODE (op0) == COMPLEX_CST)
10228 return TREE_REALPART (op0);
10229 else
10230 return NULL_TREE;
10232 case IMAGPART_EXPR:
10233 if (TREE_CODE (op0) == COMPLEX_CST)
10234 return TREE_IMAGPART (op0);
10235 else
10236 return NULL_TREE;
10238 case CONJ_EXPR:
10239 if (TREE_CODE (op0) == COMPLEX_CST
10240 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10241 return build_complex (type, TREE_REALPART (op0),
10242 negate_expr (TREE_IMAGPART (op0)));
10243 return NULL_TREE;
10245 default:
10246 return NULL_TREE;
10250 /* If EXP represents referencing an element in a constant string
10251 (either via pointer arithmetic or array indexing), return the
10252 tree representing the value accessed, otherwise return NULL. */
10254 tree
10255 fold_read_from_constant_string (tree exp)
10257 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10259 tree exp1 = TREE_OPERAND (exp, 0);
10260 tree index;
10261 tree string;
10263 if (TREE_CODE (exp) == INDIRECT_REF)
10264 string = string_constant (exp1, &index);
10265 else
10267 tree low_bound = array_ref_low_bound (exp);
10268 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10270 /* Optimize the special-case of a zero lower bound.
10272 We convert the low_bound to sizetype to avoid some problems
10273 with constant folding. (E.g. suppose the lower bound is 1,
10274 and its mode is QI. Without the conversion,l (ARRAY
10275 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10276 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10277 if (! integer_zerop (low_bound))
10278 index = size_diffop (index, fold_convert (sizetype, low_bound));
10280 string = exp1;
10283 if (string
10284 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10285 && TREE_CODE (string) == STRING_CST
10286 && TREE_CODE (index) == INTEGER_CST
10287 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10288 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10289 == MODE_INT)
10290 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10291 return fold_convert (TREE_TYPE (exp),
10292 build_int_cst (NULL_TREE,
10293 (TREE_STRING_POINTER (string)
10294 [TREE_INT_CST_LOW (index)])));
10296 return NULL;
10299 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10300 an integer constant or real constant.
10302 TYPE is the type of the result. */
10304 static tree
10305 fold_negate_const (tree arg0, tree type)
10307 tree t = NULL_TREE;
10309 if (TREE_CODE (arg0) == INTEGER_CST)
10311 unsigned HOST_WIDE_INT low;
10312 HOST_WIDE_INT high;
10313 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10314 TREE_INT_CST_HIGH (arg0),
10315 &low, &high);
10316 t = build_int_cst_wide (type, low, high);
10317 t = force_fit_type (t, 1,
10318 (overflow | TREE_OVERFLOW (arg0))
10319 && !TYPE_UNSIGNED (type),
10320 TREE_CONSTANT_OVERFLOW (arg0));
10322 else if (TREE_CODE (arg0) == REAL_CST)
10323 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10324 #ifdef ENABLE_CHECKING
10325 else
10326 abort ();
10327 #endif
10329 return t;
10332 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10333 an integer constant or real constant.
10335 TYPE is the type of the result. */
10337 tree
10338 fold_abs_const (tree arg0, tree type)
10340 tree t = NULL_TREE;
10342 if (TREE_CODE (arg0) == INTEGER_CST)
10344 /* If the value is unsigned, then the absolute value is
10345 the same as the ordinary value. */
10346 if (TYPE_UNSIGNED (type))
10347 return arg0;
10348 /* Similarly, if the value is non-negative. */
10349 else if (INT_CST_LT (integer_minus_one_node, arg0))
10350 return arg0;
10351 /* If the value is negative, then the absolute value is
10352 its negation. */
10353 else
10355 unsigned HOST_WIDE_INT low;
10356 HOST_WIDE_INT high;
10357 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10358 TREE_INT_CST_HIGH (arg0),
10359 &low, &high);
10360 t = build_int_cst_wide (type, low, high);
10361 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
10362 TREE_CONSTANT_OVERFLOW (arg0));
10363 return t;
10366 else if (TREE_CODE (arg0) == REAL_CST)
10368 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10369 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10370 else
10371 return arg0;
10373 #ifdef ENABLE_CHECKING
10374 else
10375 abort ();
10376 #endif
10378 return t;
10381 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10382 constant. TYPE is the type of the result. */
10384 static tree
10385 fold_not_const (tree arg0, tree type)
10387 tree t = NULL_TREE;
10389 if (TREE_CODE (arg0) == INTEGER_CST)
10391 t = build_int_cst_wide (type,
10392 ~ TREE_INT_CST_LOW (arg0),
10393 ~ TREE_INT_CST_HIGH (arg0));
10394 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
10395 TREE_CONSTANT_OVERFLOW (arg0));
10397 #ifdef ENABLE_CHECKING
10398 else
10399 abort ();
10400 #endif
10402 return t;
10405 /* Given CODE, a relational operator, the target type, TYPE and two
10406 constant operands OP0 and OP1, return the result of the
10407 relational operation. If the result is not a compile time
10408 constant, then return NULL_TREE. */
10410 static tree
10411 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10413 int result, invert;
10415 /* From here on, the only cases we handle are when the result is
10416 known to be a constant. */
10418 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10420 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
10421 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
10423 /* Handle the cases where either operand is a NaN. */
10424 if (real_isnan (c0) || real_isnan (c1))
10426 switch (code)
10428 case EQ_EXPR:
10429 case ORDERED_EXPR:
10430 result = 0;
10431 break;
10433 case NE_EXPR:
10434 case UNORDERED_EXPR:
10435 case UNLT_EXPR:
10436 case UNLE_EXPR:
10437 case UNGT_EXPR:
10438 case UNGE_EXPR:
10439 case UNEQ_EXPR:
10440 result = 1;
10441 break;
10443 case LT_EXPR:
10444 case LE_EXPR:
10445 case GT_EXPR:
10446 case GE_EXPR:
10447 case LTGT_EXPR:
10448 if (flag_trapping_math)
10449 return NULL_TREE;
10450 result = 0;
10451 break;
10453 default:
10454 abort ();
10457 return constant_boolean_node (result, type);
10460 return constant_boolean_node (real_compare (code, c0, c1), type);
10463 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10465 To compute GT, swap the arguments and do LT.
10466 To compute GE, do LT and invert the result.
10467 To compute LE, swap the arguments, do LT and invert the result.
10468 To compute NE, do EQ and invert the result.
10470 Therefore, the code below must handle only EQ and LT. */
10472 if (code == LE_EXPR || code == GT_EXPR)
10474 tree tem = op0;
10475 op0 = op1;
10476 op1 = tem;
10477 code = swap_tree_comparison (code);
10480 /* Note that it is safe to invert for real values here because we
10481 have already handled the one case that it matters. */
10483 invert = 0;
10484 if (code == NE_EXPR || code == GE_EXPR)
10486 invert = 1;
10487 code = invert_tree_comparison (code, false);
10490 /* Compute a result for LT or EQ if args permit;
10491 Otherwise return T. */
10492 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10494 if (code == EQ_EXPR)
10495 result = tree_int_cst_equal (op0, op1);
10496 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10497 result = INT_CST_LT_UNSIGNED (op0, op1);
10498 else
10499 result = INT_CST_LT (op0, op1);
10501 else
10502 return NULL_TREE;
10504 if (invert)
10505 result ^= 1;
10506 return constant_boolean_node (result, type);
10509 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10510 avoid confusing the gimplify process. */
10512 tree
10513 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10515 if (TREE_CODE (t) == INDIRECT_REF)
10517 t = TREE_OPERAND (t, 0);
10518 if (TREE_TYPE (t) != ptrtype)
10519 t = build1 (NOP_EXPR, ptrtype, t);
10521 else
10523 tree base = t;
10525 while (handled_component_p (base)
10526 || TREE_CODE (base) == REALPART_EXPR
10527 || TREE_CODE (base) == IMAGPART_EXPR)
10528 base = TREE_OPERAND (base, 0);
10529 if (DECL_P (base))
10530 TREE_ADDRESSABLE (base) = 1;
10532 t = build1 (ADDR_EXPR, ptrtype, t);
10535 return t;
10538 tree
10539 build_fold_addr_expr (tree t)
10541 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10544 /* Builds an expression for an indirection through T, simplifying some
10545 cases. */
10547 tree
10548 build_fold_indirect_ref (tree t)
10550 tree type = TREE_TYPE (TREE_TYPE (t));
10551 tree sub = t;
10552 tree subtype;
10554 STRIP_NOPS (sub);
10555 if (TREE_CODE (sub) == ADDR_EXPR)
10557 tree op = TREE_OPERAND (sub, 0);
10558 tree optype = TREE_TYPE (op);
10559 /* *&p => p */
10560 if (lang_hooks.types_compatible_p (type, optype))
10561 return op;
10562 /* *(foo *)&fooarray => fooarray[0] */
10563 else if (TREE_CODE (optype) == ARRAY_TYPE
10564 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10565 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
10568 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10569 subtype = TREE_TYPE (sub);
10570 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10571 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10573 sub = build_fold_indirect_ref (sub);
10574 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
10577 return build1 (INDIRECT_REF, type, t);
10580 /* Strip non-trapping, non-side-effecting tree nodes from an expression
10581 whose result is ignored. The type of the returned tree need not be
10582 the same as the original expression. */
10584 tree
10585 fold_ignored_result (tree t)
10587 if (!TREE_SIDE_EFFECTS (t))
10588 return integer_zero_node;
10590 for (;;)
10591 switch (TREE_CODE_CLASS (TREE_CODE (t)))
10593 case '1':
10594 t = TREE_OPERAND (t, 0);
10595 break;
10597 case '2':
10598 case '<':
10599 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10600 t = TREE_OPERAND (t, 0);
10601 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
10602 t = TREE_OPERAND (t, 1);
10603 else
10604 return t;
10605 break;
10607 case 'e':
10608 switch (TREE_CODE (t))
10610 case COMPOUND_EXPR:
10611 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10612 return t;
10613 t = TREE_OPERAND (t, 0);
10614 break;
10616 case COND_EXPR:
10617 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
10618 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
10619 return t;
10620 t = TREE_OPERAND (t, 0);
10621 break;
10623 default:
10624 return t;
10626 break;
10628 default:
10629 return t;
10633 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
10634 This can only be applied to objects of a sizetype. */
10636 tree
10637 round_up (tree value, int divisor)
10639 tree div = NULL_TREE;
10641 if (divisor <= 0)
10642 abort ();
10643 if (divisor == 1)
10644 return value;
10646 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10647 have to do anything. Only do this when we are not given a const,
10648 because in that case, this check is more expensive than just
10649 doing it. */
10650 if (TREE_CODE (value) != INTEGER_CST)
10652 div = size_int_type (divisor, TREE_TYPE (value));
10654 if (multiple_of_p (TREE_TYPE (value), value, div))
10655 return value;
10658 /* If divisor is a power of two, simplify this to bit manipulation. */
10659 if (divisor == (divisor & -divisor))
10661 tree t;
10663 t = build_int_cst (TREE_TYPE (value), divisor - 1);
10664 value = size_binop (PLUS_EXPR, value, t);
10665 t = build_int_cst (TREE_TYPE (value), -divisor);
10666 value = size_binop (BIT_AND_EXPR, value, t);
10668 else
10670 if (!div)
10671 div = size_int_type (divisor, TREE_TYPE (value));
10672 value = size_binop (CEIL_DIV_EXPR, value, div);
10673 value = size_binop (MULT_EXPR, value, div);
10676 return value;
10679 /* Likewise, but round down. */
10681 tree
10682 round_down (tree value, int divisor)
10684 tree div = NULL_TREE;
10686 if (divisor <= 0)
10687 abort ();
10688 if (divisor == 1)
10689 return value;
10691 div = size_int_type (divisor, TREE_TYPE (value));
10693 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10694 have to do anything. Only do this when we are not given a const,
10695 because in that case, this check is more expensive than just
10696 doing it. */
10697 if (TREE_CODE (value) != INTEGER_CST)
10699 div = size_int_type (divisor, TREE_TYPE (value));
10701 if (multiple_of_p (TREE_TYPE (value), value, div))
10702 return value;
10705 /* If divisor is a power of two, simplify this to bit manipulation. */
10706 if (divisor == (divisor & -divisor))
10708 tree t;
10710 t = build_int_cst (TREE_TYPE (value), -divisor);
10711 value = size_binop (BIT_AND_EXPR, value, t);
10713 else
10715 if (!div)
10716 div = size_int_type (divisor, TREE_TYPE (value));
10717 value = size_binop (FLOOR_DIV_EXPR, value, div);
10718 value = size_binop (MULT_EXPR, value, div);
10721 return value;
10723 #include "gt-fold-const.h"