* config.gcc <arm>: Add --with-abi=
[official-gcc.git] / gcc / fold-const.c
blob417d1a45c8ea930a4627a35cd8d707abfdfb2a11
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_mathfn_p (enum built_in_function);
64 static bool negate_expr_p (tree);
65 static tree negate_expr (tree);
66 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
67 static tree associate_trees (tree, tree, enum tree_code, tree);
68 static tree int_const_binop (enum tree_code, tree, tree, int);
69 static tree const_binop (enum tree_code, tree, tree, int);
70 static hashval_t size_htab_hash (const void *);
71 static int size_htab_eq (const void *, const void *);
72 static tree fold_convert_const (enum tree_code, tree, tree);
73 static tree fold_convert (tree, tree);
74 static enum tree_code invert_tree_comparison (enum tree_code);
75 static enum tree_code swap_tree_comparison (enum tree_code);
76 static int comparison_to_compcode (enum tree_code);
77 static enum tree_code compcode_to_comparison (int);
78 static int truth_value_p (enum tree_code);
79 static int operand_equal_for_comparison_p (tree, tree, tree);
80 static int twoval_comparison_p (tree, tree *, tree *, int *);
81 static tree eval_subst (tree, tree, tree, tree, tree);
82 static tree pedantic_omit_one_operand (tree, tree, tree);
83 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
84 static tree make_bit_field_ref (tree, tree, int, int, int);
85 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
86 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
87 enum machine_mode *, int *, int *,
88 tree *, tree *);
89 static int all_ones_mask_p (tree, int);
90 static tree sign_bit_p (tree, tree);
91 static int simple_operand_p (tree);
92 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
93 static tree make_range (tree, int *, tree *, tree *);
94 static tree build_range_check (tree, tree, int, tree, tree);
95 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
96 tree);
97 static tree fold_range_test (tree);
98 static tree unextend (tree, int, int, tree);
99 static tree fold_truthop (enum tree_code, tree, tree, tree);
100 static tree optimize_minmax_comparison (tree);
101 static tree extract_muldiv (tree, tree, enum tree_code, tree);
102 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
103 static tree strip_compound_expr (tree, tree);
104 static int multiple_of_p (tree, tree, tree);
105 static tree constant_boolean_node (int, tree);
106 static int count_cond (tree, int);
107 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
108 tree, int);
109 static bool fold_real_zero_addition_p (tree, tree, int);
110 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
111 tree, tree, tree);
112 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
113 static bool reorder_operands_p (tree, tree);
114 static bool tree_swap_operands_p (tree, tree, bool);
116 static tree fold_negate_const (tree, tree);
117 static tree fold_abs_const (tree, tree);
118 static tree fold_relational_const (enum tree_code, tree, tree, tree);
120 /* The following constants represent a bit based encoding of GCC's
121 comparison operators. This encoding simplifies transformations
122 on relational comparison operators, such as AND and OR. */
123 #define COMPCODE_FALSE 0
124 #define COMPCODE_LT 1
125 #define COMPCODE_EQ 2
126 #define COMPCODE_LE 3
127 #define COMPCODE_GT 4
128 #define COMPCODE_NE 5
129 #define COMPCODE_GE 6
130 #define COMPCODE_TRUE 7
132 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
133 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
134 and SUM1. Then this yields nonzero if overflow occurred during the
135 addition.
137 Overflow occurs if A and B have the same sign, but A and SUM differ in
138 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
139 sign. */
140 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
142 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
143 We do that by representing the two-word integer in 4 words, with only
144 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
145 number. The value of the word is LOWPART + HIGHPART * BASE. */
147 #define LOWPART(x) \
148 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
149 #define HIGHPART(x) \
150 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
151 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
153 /* Unpack a two-word integer into 4 words.
154 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
155 WORDS points to the array of HOST_WIDE_INTs. */
157 static void
158 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
160 words[0] = LOWPART (low);
161 words[1] = HIGHPART (low);
162 words[2] = LOWPART (hi);
163 words[3] = HIGHPART (hi);
166 /* Pack an array of 4 words into a two-word integer.
167 WORDS points to the array of words.
168 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
170 static void
171 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
172 HOST_WIDE_INT *hi)
174 *low = words[0] + words[1] * BASE;
175 *hi = words[2] + words[3] * BASE;
178 /* Make the integer constant T valid for its type by setting to 0 or 1 all
179 the bits in the constant that don't belong in the type.
181 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
182 nonzero, a signed overflow has already occurred in calculating T, so
183 propagate it. */
186 force_fit_type (tree t, int overflow)
188 unsigned HOST_WIDE_INT low;
189 HOST_WIDE_INT high;
190 unsigned int prec;
192 if (TREE_CODE (t) == REAL_CST)
194 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
195 Consider doing it via real_convert now. */
196 return overflow;
199 else if (TREE_CODE (t) != INTEGER_CST)
200 return overflow;
202 low = TREE_INT_CST_LOW (t);
203 high = TREE_INT_CST_HIGH (t);
205 if (POINTER_TYPE_P (TREE_TYPE (t))
206 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
207 prec = POINTER_SIZE;
208 else
209 prec = TYPE_PRECISION (TREE_TYPE (t));
211 /* First clear all bits that are beyond the type's precision. */
213 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
215 else if (prec > HOST_BITS_PER_WIDE_INT)
216 TREE_INT_CST_HIGH (t)
217 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
218 else
220 TREE_INT_CST_HIGH (t) = 0;
221 if (prec < HOST_BITS_PER_WIDE_INT)
222 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
225 /* Unsigned types do not suffer sign extension or overflow unless they
226 are a sizetype. */
227 if (TREE_UNSIGNED (TREE_TYPE (t))
228 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
229 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
230 return overflow;
232 /* If the value's sign bit is set, extend the sign. */
233 if (prec != 2 * HOST_BITS_PER_WIDE_INT
234 && (prec > HOST_BITS_PER_WIDE_INT
235 ? 0 != (TREE_INT_CST_HIGH (t)
236 & ((HOST_WIDE_INT) 1
237 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
238 : 0 != (TREE_INT_CST_LOW (t)
239 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
241 /* Value is negative:
242 set to 1 all the bits that are outside this type's precision. */
243 if (prec > HOST_BITS_PER_WIDE_INT)
244 TREE_INT_CST_HIGH (t)
245 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
246 else
248 TREE_INT_CST_HIGH (t) = -1;
249 if (prec < HOST_BITS_PER_WIDE_INT)
250 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
254 /* Return nonzero if signed overflow occurred. */
255 return
256 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
257 != 0);
260 /* Add two doubleword integers with doubleword result.
261 Each argument is given as two `HOST_WIDE_INT' pieces.
262 One argument is L1 and H1; the other, L2 and H2.
263 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
266 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
267 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
268 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
270 unsigned HOST_WIDE_INT l;
271 HOST_WIDE_INT h;
273 l = l1 + l2;
274 h = h1 + h2 + (l < l1);
276 *lv = l;
277 *hv = h;
278 return OVERFLOW_SUM_SIGN (h1, h2, h);
281 /* Negate a doubleword integer with doubleword result.
282 Return nonzero if the operation overflows, assuming it's signed.
283 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
284 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
287 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
288 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
290 if (l1 == 0)
292 *lv = 0;
293 *hv = - h1;
294 return (*hv & h1) < 0;
296 else
298 *lv = -l1;
299 *hv = ~h1;
300 return 0;
304 /* Multiply two doubleword integers with doubleword result.
305 Return nonzero if the operation overflows, assuming it's signed.
306 Each argument is given as two `HOST_WIDE_INT' pieces.
307 One argument is L1 and H1; the other, L2 and H2.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
313 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
315 HOST_WIDE_INT arg1[4];
316 HOST_WIDE_INT arg2[4];
317 HOST_WIDE_INT prod[4 * 2];
318 unsigned HOST_WIDE_INT carry;
319 int i, j, k;
320 unsigned HOST_WIDE_INT toplow, neglow;
321 HOST_WIDE_INT tophigh, neghigh;
323 encode (arg1, l1, h1);
324 encode (arg2, l2, h2);
326 memset (prod, 0, sizeof prod);
328 for (i = 0; i < 4; i++)
330 carry = 0;
331 for (j = 0; j < 4; j++)
333 k = i + j;
334 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
335 carry += arg1[i] * arg2[j];
336 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
337 carry += prod[k];
338 prod[k] = LOWPART (carry);
339 carry = HIGHPART (carry);
341 prod[i + 4] = carry;
344 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
346 /* Check for overflow by calculating the top half of the answer in full;
347 it should agree with the low half's sign bit. */
348 decode (prod + 4, &toplow, &tophigh);
349 if (h1 < 0)
351 neg_double (l2, h2, &neglow, &neghigh);
352 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
354 if (h2 < 0)
356 neg_double (l1, h1, &neglow, &neghigh);
357 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
359 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
362 /* Shift the doubleword integer in L1, H1 left by COUNT places
363 keeping only PREC bits of result.
364 Shift right if COUNT is negative.
365 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
366 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
368 void
369 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
370 HOST_WIDE_INT count, unsigned int prec,
371 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
373 unsigned HOST_WIDE_INT signmask;
375 if (count < 0)
377 rshift_double (l1, h1, -count, prec, lv, hv, arith);
378 return;
381 if (SHIFT_COUNT_TRUNCATED)
382 count %= prec;
384 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
386 /* Shifting by the host word size is undefined according to the
387 ANSI standard, so we must handle this as a special case. */
388 *hv = 0;
389 *lv = 0;
391 else if (count >= HOST_BITS_PER_WIDE_INT)
393 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
394 *lv = 0;
396 else
398 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
399 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
400 *lv = l1 << count;
403 /* Sign extend all bits that are beyond the precision. */
405 signmask = -((prec > HOST_BITS_PER_WIDE_INT
406 ? ((unsigned HOST_WIDE_INT) *hv
407 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
408 : (*lv >> (prec - 1))) & 1);
410 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
412 else if (prec >= HOST_BITS_PER_WIDE_INT)
414 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
415 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
417 else
419 *hv = signmask;
420 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
421 *lv |= signmask << prec;
425 /* Shift the doubleword integer in L1, H1 right by COUNT places
426 keeping only PREC bits of result. COUNT must be positive.
427 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
428 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
430 void
431 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
432 HOST_WIDE_INT count, unsigned int prec,
433 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
434 int arith)
436 unsigned HOST_WIDE_INT signmask;
438 signmask = (arith
439 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
440 : 0);
442 if (SHIFT_COUNT_TRUNCATED)
443 count %= prec;
445 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
447 /* Shifting by the host word size is undefined according to the
448 ANSI standard, so we must handle this as a special case. */
449 *hv = 0;
450 *lv = 0;
452 else if (count >= HOST_BITS_PER_WIDE_INT)
454 *hv = 0;
455 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
457 else
459 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
460 *lv = ((l1 >> count)
461 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
464 /* Zero / sign extend all bits that are beyond the precision. */
466 if (count >= (HOST_WIDE_INT)prec)
468 *hv = signmask;
469 *lv = signmask;
471 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
473 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
475 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
476 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
478 else
480 *hv = signmask;
481 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
482 *lv |= signmask << (prec - count);
486 /* Rotate the doubleword integer in L1, H1 left by COUNT places
487 keeping only PREC bits of result.
488 Rotate right if COUNT is negative.
489 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
491 void
492 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
493 HOST_WIDE_INT count, unsigned int prec,
494 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
496 unsigned HOST_WIDE_INT s1l, s2l;
497 HOST_WIDE_INT s1h, s2h;
499 count %= prec;
500 if (count < 0)
501 count += prec;
503 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
504 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
505 *lv = s1l | s2l;
506 *hv = s1h | s2h;
509 /* Rotate the doubleword integer in L1, H1 left by COUNT places
510 keeping only PREC bits of result. COUNT must be positive.
511 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
513 void
514 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
515 HOST_WIDE_INT count, unsigned int prec,
516 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
518 unsigned HOST_WIDE_INT s1l, s2l;
519 HOST_WIDE_INT s1h, s2h;
521 count %= prec;
522 if (count < 0)
523 count += prec;
525 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
526 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
527 *lv = s1l | s2l;
528 *hv = s1h | s2h;
531 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
532 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
533 CODE is a tree code for a kind of division, one of
534 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
535 or EXACT_DIV_EXPR
536 It controls how the quotient is rounded to an integer.
537 Return nonzero if the operation overflows.
538 UNS nonzero says do unsigned division. */
541 div_and_round_double (enum tree_code code, int uns,
542 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
543 HOST_WIDE_INT hnum_orig,
544 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
545 HOST_WIDE_INT hden_orig,
546 unsigned HOST_WIDE_INT *lquo,
547 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
548 HOST_WIDE_INT *hrem)
550 int quo_neg = 0;
551 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
552 HOST_WIDE_INT den[4], quo[4];
553 int i, j;
554 unsigned HOST_WIDE_INT work;
555 unsigned HOST_WIDE_INT carry = 0;
556 unsigned HOST_WIDE_INT lnum = lnum_orig;
557 HOST_WIDE_INT hnum = hnum_orig;
558 unsigned HOST_WIDE_INT lden = lden_orig;
559 HOST_WIDE_INT hden = hden_orig;
560 int overflow = 0;
562 if (hden == 0 && lden == 0)
563 overflow = 1, lden = 1;
565 /* Calculate quotient sign and convert operands to unsigned. */
566 if (!uns)
568 if (hnum < 0)
570 quo_neg = ~ quo_neg;
571 /* (minimum integer) / (-1) is the only overflow case. */
572 if (neg_double (lnum, hnum, &lnum, &hnum)
573 && ((HOST_WIDE_INT) lden & hden) == -1)
574 overflow = 1;
576 if (hden < 0)
578 quo_neg = ~ quo_neg;
579 neg_double (lden, hden, &lden, &hden);
583 if (hnum == 0 && hden == 0)
584 { /* single precision */
585 *hquo = *hrem = 0;
586 /* This unsigned division rounds toward zero. */
587 *lquo = lnum / lden;
588 goto finish_up;
591 if (hnum == 0)
592 { /* trivial case: dividend < divisor */
593 /* hden != 0 already checked. */
594 *hquo = *lquo = 0;
595 *hrem = hnum;
596 *lrem = lnum;
597 goto finish_up;
600 memset (quo, 0, sizeof quo);
602 memset (num, 0, sizeof num); /* to zero 9th element */
603 memset (den, 0, sizeof den);
605 encode (num, lnum, hnum);
606 encode (den, lden, hden);
608 /* Special code for when the divisor < BASE. */
609 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
611 /* hnum != 0 already checked. */
612 for (i = 4 - 1; i >= 0; i--)
614 work = num[i] + carry * BASE;
615 quo[i] = work / lden;
616 carry = work % lden;
619 else
621 /* Full double precision division,
622 with thanks to Don Knuth's "Seminumerical Algorithms". */
623 int num_hi_sig, den_hi_sig;
624 unsigned HOST_WIDE_INT quo_est, scale;
626 /* Find the highest nonzero divisor digit. */
627 for (i = 4 - 1;; i--)
628 if (den[i] != 0)
630 den_hi_sig = i;
631 break;
634 /* Insure that the first digit of the divisor is at least BASE/2.
635 This is required by the quotient digit estimation algorithm. */
637 scale = BASE / (den[den_hi_sig] + 1);
638 if (scale > 1)
639 { /* scale divisor and dividend */
640 carry = 0;
641 for (i = 0; i <= 4 - 1; i++)
643 work = (num[i] * scale) + carry;
644 num[i] = LOWPART (work);
645 carry = HIGHPART (work);
648 num[4] = carry;
649 carry = 0;
650 for (i = 0; i <= 4 - 1; i++)
652 work = (den[i] * scale) + carry;
653 den[i] = LOWPART (work);
654 carry = HIGHPART (work);
655 if (den[i] != 0) den_hi_sig = i;
659 num_hi_sig = 4;
661 /* Main loop */
662 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
664 /* Guess the next quotient digit, quo_est, by dividing the first
665 two remaining dividend digits by the high order quotient digit.
666 quo_est is never low and is at most 2 high. */
667 unsigned HOST_WIDE_INT tmp;
669 num_hi_sig = i + den_hi_sig + 1;
670 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
671 if (num[num_hi_sig] != den[den_hi_sig])
672 quo_est = work / den[den_hi_sig];
673 else
674 quo_est = BASE - 1;
676 /* Refine quo_est so it's usually correct, and at most one high. */
677 tmp = work - quo_est * den[den_hi_sig];
678 if (tmp < BASE
679 && (den[den_hi_sig - 1] * quo_est
680 > (tmp * BASE + num[num_hi_sig - 2])))
681 quo_est--;
683 /* Try QUO_EST as the quotient digit, by multiplying the
684 divisor by QUO_EST and subtracting from the remaining dividend.
685 Keep in mind that QUO_EST is the I - 1st digit. */
687 carry = 0;
688 for (j = 0; j <= den_hi_sig; j++)
690 work = quo_est * den[j] + carry;
691 carry = HIGHPART (work);
692 work = num[i + j] - LOWPART (work);
693 num[i + j] = LOWPART (work);
694 carry += HIGHPART (work) != 0;
697 /* If quo_est was high by one, then num[i] went negative and
698 we need to correct things. */
699 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
701 quo_est--;
702 carry = 0; /* add divisor back in */
703 for (j = 0; j <= den_hi_sig; j++)
705 work = num[i + j] + den[j] + carry;
706 carry = HIGHPART (work);
707 num[i + j] = LOWPART (work);
710 num [num_hi_sig] += carry;
713 /* Store the quotient digit. */
714 quo[i] = quo_est;
718 decode (quo, lquo, hquo);
720 finish_up:
721 /* If result is negative, make it so. */
722 if (quo_neg)
723 neg_double (*lquo, *hquo, lquo, hquo);
725 /* Compute trial remainder: rem = num - (quo * den) */
726 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
727 neg_double (*lrem, *hrem, lrem, hrem);
728 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
730 switch (code)
732 case TRUNC_DIV_EXPR:
733 case TRUNC_MOD_EXPR: /* round toward zero */
734 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
735 return overflow;
737 case FLOOR_DIV_EXPR:
738 case FLOOR_MOD_EXPR: /* round toward negative infinity */
739 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
741 /* quo = quo - 1; */
742 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
743 lquo, hquo);
745 else
746 return overflow;
747 break;
749 case CEIL_DIV_EXPR:
750 case CEIL_MOD_EXPR: /* round toward positive infinity */
751 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
753 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
754 lquo, hquo);
756 else
757 return overflow;
758 break;
760 case ROUND_DIV_EXPR:
761 case ROUND_MOD_EXPR: /* round to closest integer */
763 unsigned HOST_WIDE_INT labs_rem = *lrem;
764 HOST_WIDE_INT habs_rem = *hrem;
765 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
766 HOST_WIDE_INT habs_den = hden, htwice;
768 /* Get absolute values. */
769 if (*hrem < 0)
770 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
771 if (hden < 0)
772 neg_double (lden, hden, &labs_den, &habs_den);
774 /* If (2 * abs (lrem) >= abs (lden)) */
775 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
776 labs_rem, habs_rem, &ltwice, &htwice);
778 if (((unsigned HOST_WIDE_INT) habs_den
779 < (unsigned HOST_WIDE_INT) htwice)
780 || (((unsigned HOST_WIDE_INT) habs_den
781 == (unsigned HOST_WIDE_INT) htwice)
782 && (labs_den < ltwice)))
784 if (*hquo < 0)
785 /* quo = quo - 1; */
786 add_double (*lquo, *hquo,
787 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
788 else
789 /* quo = quo + 1; */
790 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
791 lquo, hquo);
793 else
794 return overflow;
796 break;
798 default:
799 abort ();
802 /* Compute true remainder: rem = num - (quo * den) */
803 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
804 neg_double (*lrem, *hrem, lrem, hrem);
805 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
806 return overflow;
809 /* Return true if built-in mathematical function specified by CODE
810 preserves the sign of it argument, i.e. -f(x) == f(-x). */
812 static bool
813 negate_mathfn_p (enum built_in_function code)
815 switch (code)
817 case BUILT_IN_ASIN:
818 case BUILT_IN_ASINF:
819 case BUILT_IN_ASINL:
820 case BUILT_IN_ATAN:
821 case BUILT_IN_ATANF:
822 case BUILT_IN_ATANL:
823 case BUILT_IN_SIN:
824 case BUILT_IN_SINF:
825 case BUILT_IN_SINL:
826 case BUILT_IN_TAN:
827 case BUILT_IN_TANF:
828 case BUILT_IN_TANL:
829 return true;
831 default:
832 break;
834 return false;
837 /* Determine whether an expression T can be cheaply negated using
838 the function negate_expr. */
840 static bool
841 negate_expr_p (tree t)
843 unsigned HOST_WIDE_INT val;
844 unsigned int prec;
845 tree type;
847 if (t == 0)
848 return false;
850 type = TREE_TYPE (t);
852 STRIP_SIGN_NOPS (t);
853 switch (TREE_CODE (t))
855 case INTEGER_CST:
856 if (TREE_UNSIGNED (type) || ! flag_trapv)
857 return true;
859 /* Check that -CST will not overflow type. */
860 prec = TYPE_PRECISION (type);
861 if (prec > HOST_BITS_PER_WIDE_INT)
863 if (TREE_INT_CST_LOW (t) != 0)
864 return true;
865 prec -= HOST_BITS_PER_WIDE_INT;
866 val = TREE_INT_CST_HIGH (t);
868 else
869 val = TREE_INT_CST_LOW (t);
870 if (prec < HOST_BITS_PER_WIDE_INT)
871 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
872 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
874 case REAL_CST:
875 case NEGATE_EXPR:
876 return true;
878 case COMPLEX_CST:
879 return negate_expr_p (TREE_REALPART (t))
880 && negate_expr_p (TREE_IMAGPART (t));
882 case PLUS_EXPR:
883 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
884 return false;
885 /* -(A + B) -> (-B) - A. */
886 if (negate_expr_p (TREE_OPERAND (t, 1))
887 && reorder_operands_p (TREE_OPERAND (t, 0),
888 TREE_OPERAND (t, 1)))
889 return true;
890 /* -(A + B) -> (-A) - B. */
891 return negate_expr_p (TREE_OPERAND (t, 0));
893 case MINUS_EXPR:
894 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
895 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
896 && reorder_operands_p (TREE_OPERAND (t, 0),
897 TREE_OPERAND (t, 1));
899 case MULT_EXPR:
900 if (TREE_UNSIGNED (TREE_TYPE (t)))
901 break;
903 /* Fall through. */
905 case RDIV_EXPR:
906 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
907 return negate_expr_p (TREE_OPERAND (t, 1))
908 || negate_expr_p (TREE_OPERAND (t, 0));
909 break;
911 case NOP_EXPR:
912 /* Negate -((double)float) as (double)(-float). */
913 if (TREE_CODE (type) == REAL_TYPE)
915 tree tem = strip_float_extensions (t);
916 if (tem != t)
917 return negate_expr_p (tem);
919 break;
921 case CALL_EXPR:
922 /* Negate -f(x) as f(-x). */
923 if (negate_mathfn_p (builtin_mathfn_code (t)))
924 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
925 break;
927 case RSHIFT_EXPR:
928 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
929 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
931 tree op1 = TREE_OPERAND (t, 1);
932 if (TREE_INT_CST_HIGH (op1) == 0
933 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
934 == TREE_INT_CST_LOW (op1))
935 return true;
937 break;
939 default:
940 break;
942 return false;
945 /* Given T, an expression, return the negation of T. Allow for T to be
946 null, in which case return null. */
948 static tree
949 negate_expr (tree t)
951 tree type;
952 tree tem;
954 if (t == 0)
955 return 0;
957 type = TREE_TYPE (t);
958 STRIP_SIGN_NOPS (t);
960 switch (TREE_CODE (t))
962 case INTEGER_CST:
963 tem = fold_negate_const (t, type);
964 if (! TREE_OVERFLOW (tem)
965 || TREE_UNSIGNED (type)
966 || ! flag_trapv)
967 return tem;
968 break;
970 case REAL_CST:
971 tem = fold_negate_const (t, type);
972 /* Two's complement FP formats, such as c4x, may overflow. */
973 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
974 return fold_convert (type, tem);
975 break;
977 case COMPLEX_CST:
979 tree rpart = negate_expr (TREE_REALPART (t));
980 tree ipart = negate_expr (TREE_IMAGPART (t));
982 if ((TREE_CODE (rpart) == REAL_CST
983 && TREE_CODE (ipart) == REAL_CST)
984 || (TREE_CODE (rpart) == INTEGER_CST
985 && TREE_CODE (ipart) == INTEGER_CST))
986 return build_complex (type, rpart, ipart);
988 break;
990 case NEGATE_EXPR:
991 return fold_convert (type, TREE_OPERAND (t, 0));
993 case PLUS_EXPR:
994 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
996 /* -(A + B) -> (-B) - A. */
997 if (negate_expr_p (TREE_OPERAND (t, 1))
998 && reorder_operands_p (TREE_OPERAND (t, 0),
999 TREE_OPERAND (t, 1)))
1000 return fold_convert (type,
1001 fold (build (MINUS_EXPR, TREE_TYPE (t),
1002 negate_expr (TREE_OPERAND (t, 1)),
1003 TREE_OPERAND (t, 0))));
1004 /* -(A + B) -> (-A) - B. */
1005 if (negate_expr_p (TREE_OPERAND (t, 0)))
1006 return fold_convert (type,
1007 fold (build (MINUS_EXPR, TREE_TYPE (t),
1008 negate_expr (TREE_OPERAND (t, 0)),
1009 TREE_OPERAND (t, 1))));
1011 break;
1013 case MINUS_EXPR:
1014 /* - (A - B) -> B - A */
1015 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1016 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1017 return fold_convert (type,
1018 fold (build (MINUS_EXPR, TREE_TYPE (t),
1019 TREE_OPERAND (t, 1),
1020 TREE_OPERAND (t, 0))));
1021 break;
1023 case MULT_EXPR:
1024 if (TREE_UNSIGNED (TREE_TYPE (t)))
1025 break;
1027 /* Fall through. */
1029 case RDIV_EXPR:
1030 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1032 tem = TREE_OPERAND (t, 1);
1033 if (negate_expr_p (tem))
1034 return fold_convert (type,
1035 fold (build (TREE_CODE (t), TREE_TYPE (t),
1036 TREE_OPERAND (t, 0),
1037 negate_expr (tem))));
1038 tem = TREE_OPERAND (t, 0);
1039 if (negate_expr_p (tem))
1040 return fold_convert (type,
1041 fold (build (TREE_CODE (t), TREE_TYPE (t),
1042 negate_expr (tem),
1043 TREE_OPERAND (t, 1))));
1045 break;
1047 case NOP_EXPR:
1048 /* Convert -((double)float) into (double)(-float). */
1049 if (TREE_CODE (type) == REAL_TYPE)
1051 tem = strip_float_extensions (t);
1052 if (tem != t && negate_expr_p (tem))
1053 return fold_convert (type, negate_expr (tem));
1055 break;
1057 case CALL_EXPR:
1058 /* Negate -f(x) as f(-x). */
1059 if (negate_mathfn_p (builtin_mathfn_code (t))
1060 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1062 tree fndecl, arg, arglist;
1064 fndecl = get_callee_fndecl (t);
1065 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1066 arglist = build_tree_list (NULL_TREE, arg);
1067 return build_function_call_expr (fndecl, arglist);
1069 break;
1071 case RSHIFT_EXPR:
1072 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1073 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1075 tree op1 = TREE_OPERAND (t, 1);
1076 if (TREE_INT_CST_HIGH (op1) == 0
1077 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1078 == TREE_INT_CST_LOW (op1))
1080 tree ntype = TREE_UNSIGNED (type)
1081 ? lang_hooks.types.signed_type (type)
1082 : lang_hooks.types.unsigned_type (type);
1083 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1084 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1085 return fold_convert (type, temp);
1088 break;
1090 default:
1091 break;
1094 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1095 return fold_convert (type, tem);
1098 /* Split a tree IN into a constant, literal and variable parts that could be
1099 combined with CODE to make IN. "constant" means an expression with
1100 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1101 commutative arithmetic operation. Store the constant part into *CONP,
1102 the literal in *LITP and return the variable part. If a part isn't
1103 present, set it to null. If the tree does not decompose in this way,
1104 return the entire tree as the variable part and the other parts as null.
1106 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1107 case, we negate an operand that was subtracted. Except if it is a
1108 literal for which we use *MINUS_LITP instead.
1110 If NEGATE_P is true, we are negating all of IN, again except a literal
1111 for which we use *MINUS_LITP instead.
1113 If IN is itself a literal or constant, return it as appropriate.
1115 Note that we do not guarantee that any of the three values will be the
1116 same type as IN, but they will have the same signedness and mode. */
1118 static tree
1119 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1120 tree *minus_litp, int negate_p)
1122 tree var = 0;
1124 *conp = 0;
1125 *litp = 0;
1126 *minus_litp = 0;
1128 /* Strip any conversions that don't change the machine mode or signedness. */
1129 STRIP_SIGN_NOPS (in);
1131 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1132 *litp = in;
1133 else if (TREE_CODE (in) == code
1134 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1135 /* We can associate addition and subtraction together (even
1136 though the C standard doesn't say so) for integers because
1137 the value is not affected. For reals, the value might be
1138 affected, so we can't. */
1139 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1140 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1142 tree op0 = TREE_OPERAND (in, 0);
1143 tree op1 = TREE_OPERAND (in, 1);
1144 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1145 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1147 /* First see if either of the operands is a literal, then a constant. */
1148 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1149 *litp = op0, op0 = 0;
1150 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1151 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1153 if (op0 != 0 && TREE_CONSTANT (op0))
1154 *conp = op0, op0 = 0;
1155 else if (op1 != 0 && TREE_CONSTANT (op1))
1156 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1158 /* If we haven't dealt with either operand, this is not a case we can
1159 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1160 if (op0 != 0 && op1 != 0)
1161 var = in;
1162 else if (op0 != 0)
1163 var = op0;
1164 else
1165 var = op1, neg_var_p = neg1_p;
1167 /* Now do any needed negations. */
1168 if (neg_litp_p)
1169 *minus_litp = *litp, *litp = 0;
1170 if (neg_conp_p)
1171 *conp = negate_expr (*conp);
1172 if (neg_var_p)
1173 var = negate_expr (var);
1175 else if (TREE_CONSTANT (in))
1176 *conp = in;
1177 else
1178 var = in;
1180 if (negate_p)
1182 if (*litp)
1183 *minus_litp = *litp, *litp = 0;
1184 else if (*minus_litp)
1185 *litp = *minus_litp, *minus_litp = 0;
1186 *conp = negate_expr (*conp);
1187 var = negate_expr (var);
1190 return var;
1193 /* Re-associate trees split by the above function. T1 and T2 are either
1194 expressions to associate or null. Return the new expression, if any. If
1195 we build an operation, do it in TYPE and with CODE. */
1197 static tree
1198 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1200 if (t1 == 0)
1201 return t2;
1202 else if (t2 == 0)
1203 return t1;
1205 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1206 try to fold this since we will have infinite recursion. But do
1207 deal with any NEGATE_EXPRs. */
1208 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1209 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1211 if (code == PLUS_EXPR)
1213 if (TREE_CODE (t1) == NEGATE_EXPR)
1214 return build (MINUS_EXPR, type, fold_convert (type, t2),
1215 fold_convert (type, TREE_OPERAND (t1, 0)));
1216 else if (TREE_CODE (t2) == NEGATE_EXPR)
1217 return build (MINUS_EXPR, type, fold_convert (type, t1),
1218 fold_convert (type, TREE_OPERAND (t2, 0)));
1220 return build (code, type, fold_convert (type, t1),
1221 fold_convert (type, t2));
1224 return fold (build (code, type, fold_convert (type, t1),
1225 fold_convert (type, t2)));
1228 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1229 to produce a new constant.
1231 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1233 static tree
1234 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1236 unsigned HOST_WIDE_INT int1l, int2l;
1237 HOST_WIDE_INT int1h, int2h;
1238 unsigned HOST_WIDE_INT low;
1239 HOST_WIDE_INT hi;
1240 unsigned HOST_WIDE_INT garbagel;
1241 HOST_WIDE_INT garbageh;
1242 tree t;
1243 tree type = TREE_TYPE (arg1);
1244 int uns = TREE_UNSIGNED (type);
1245 int is_sizetype
1246 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1247 int overflow = 0;
1248 int no_overflow = 0;
1250 int1l = TREE_INT_CST_LOW (arg1);
1251 int1h = TREE_INT_CST_HIGH (arg1);
1252 int2l = TREE_INT_CST_LOW (arg2);
1253 int2h = TREE_INT_CST_HIGH (arg2);
1255 switch (code)
1257 case BIT_IOR_EXPR:
1258 low = int1l | int2l, hi = int1h | int2h;
1259 break;
1261 case BIT_XOR_EXPR:
1262 low = int1l ^ int2l, hi = int1h ^ int2h;
1263 break;
1265 case BIT_AND_EXPR:
1266 low = int1l & int2l, hi = int1h & int2h;
1267 break;
1269 case RSHIFT_EXPR:
1270 int2l = -int2l;
1271 case LSHIFT_EXPR:
1272 /* It's unclear from the C standard whether shifts can overflow.
1273 The following code ignores overflow; perhaps a C standard
1274 interpretation ruling is needed. */
1275 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1276 &low, &hi, !uns);
1277 no_overflow = 1;
1278 break;
1280 case RROTATE_EXPR:
1281 int2l = - int2l;
1282 case LROTATE_EXPR:
1283 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1284 &low, &hi);
1285 break;
1287 case PLUS_EXPR:
1288 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1289 break;
1291 case MINUS_EXPR:
1292 neg_double (int2l, int2h, &low, &hi);
1293 add_double (int1l, int1h, low, hi, &low, &hi);
1294 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1295 break;
1297 case MULT_EXPR:
1298 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1299 break;
1301 case TRUNC_DIV_EXPR:
1302 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1303 case EXACT_DIV_EXPR:
1304 /* This is a shortcut for a common special case. */
1305 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1306 && ! TREE_CONSTANT_OVERFLOW (arg1)
1307 && ! TREE_CONSTANT_OVERFLOW (arg2)
1308 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1310 if (code == CEIL_DIV_EXPR)
1311 int1l += int2l - 1;
1313 low = int1l / int2l, hi = 0;
1314 break;
1317 /* ... fall through ... */
1319 case ROUND_DIV_EXPR:
1320 if (int2h == 0 && int2l == 1)
1322 low = int1l, hi = int1h;
1323 break;
1325 if (int1l == int2l && int1h == int2h
1326 && ! (int1l == 0 && int1h == 0))
1328 low = 1, hi = 0;
1329 break;
1331 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1332 &low, &hi, &garbagel, &garbageh);
1333 break;
1335 case TRUNC_MOD_EXPR:
1336 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1337 /* This is a shortcut for a common special case. */
1338 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1339 && ! TREE_CONSTANT_OVERFLOW (arg1)
1340 && ! TREE_CONSTANT_OVERFLOW (arg2)
1341 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1343 if (code == CEIL_MOD_EXPR)
1344 int1l += int2l - 1;
1345 low = int1l % int2l, hi = 0;
1346 break;
1349 /* ... fall through ... */
1351 case ROUND_MOD_EXPR:
1352 overflow = div_and_round_double (code, uns,
1353 int1l, int1h, int2l, int2h,
1354 &garbagel, &garbageh, &low, &hi);
1355 break;
1357 case MIN_EXPR:
1358 case MAX_EXPR:
1359 if (uns)
1360 low = (((unsigned HOST_WIDE_INT) int1h
1361 < (unsigned HOST_WIDE_INT) int2h)
1362 || (((unsigned HOST_WIDE_INT) int1h
1363 == (unsigned HOST_WIDE_INT) int2h)
1364 && int1l < int2l));
1365 else
1366 low = (int1h < int2h
1367 || (int1h == int2h && int1l < int2l));
1369 if (low == (code == MIN_EXPR))
1370 low = int1l, hi = int1h;
1371 else
1372 low = int2l, hi = int2h;
1373 break;
1375 default:
1376 abort ();
1379 /* If this is for a sizetype, can be represented as one (signed)
1380 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1381 constants. */
1382 if (is_sizetype
1383 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1384 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1385 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1386 return size_int_type_wide (low, type);
1387 else
1389 t = build_int_2 (low, hi);
1390 TREE_TYPE (t) = TREE_TYPE (arg1);
1393 TREE_OVERFLOW (t)
1394 = ((notrunc
1395 ? (!uns || is_sizetype) && overflow
1396 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1397 && ! no_overflow))
1398 | TREE_OVERFLOW (arg1)
1399 | TREE_OVERFLOW (arg2));
1401 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1402 So check if force_fit_type truncated the value. */
1403 if (is_sizetype
1404 && ! TREE_OVERFLOW (t)
1405 && (TREE_INT_CST_HIGH (t) != hi
1406 || TREE_INT_CST_LOW (t) != low))
1407 TREE_OVERFLOW (t) = 1;
1409 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1410 | TREE_CONSTANT_OVERFLOW (arg1)
1411 | TREE_CONSTANT_OVERFLOW (arg2));
1412 return t;
1415 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1416 constant. We assume ARG1 and ARG2 have the same data type, or at least
1417 are the same kind of constant and the same machine mode.
1419 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1421 static tree
1422 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1424 STRIP_NOPS (arg1);
1425 STRIP_NOPS (arg2);
1427 if (TREE_CODE (arg1) == INTEGER_CST)
1428 return int_const_binop (code, arg1, arg2, notrunc);
1430 if (TREE_CODE (arg1) == REAL_CST)
1432 enum machine_mode mode;
1433 REAL_VALUE_TYPE d1;
1434 REAL_VALUE_TYPE d2;
1435 REAL_VALUE_TYPE value;
1436 tree t, type;
1438 d1 = TREE_REAL_CST (arg1);
1439 d2 = TREE_REAL_CST (arg2);
1441 type = TREE_TYPE (arg1);
1442 mode = TYPE_MODE (type);
1444 /* Don't perform operation if we honor signaling NaNs and
1445 either operand is a NaN. */
1446 if (HONOR_SNANS (mode)
1447 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1448 return NULL_TREE;
1450 /* Don't perform operation if it would raise a division
1451 by zero exception. */
1452 if (code == RDIV_EXPR
1453 && REAL_VALUES_EQUAL (d2, dconst0)
1454 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1455 return NULL_TREE;
1457 /* If either operand is a NaN, just return it. Otherwise, set up
1458 for floating-point trap; we return an overflow. */
1459 if (REAL_VALUE_ISNAN (d1))
1460 return arg1;
1461 else if (REAL_VALUE_ISNAN (d2))
1462 return arg2;
1464 REAL_ARITHMETIC (value, code, d1, d2);
1466 t = build_real (type, real_value_truncate (mode, value));
1468 TREE_OVERFLOW (t)
1469 = (force_fit_type (t, 0)
1470 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1471 TREE_CONSTANT_OVERFLOW (t)
1472 = TREE_OVERFLOW (t)
1473 | TREE_CONSTANT_OVERFLOW (arg1)
1474 | TREE_CONSTANT_OVERFLOW (arg2);
1475 return t;
1477 if (TREE_CODE (arg1) == COMPLEX_CST)
1479 tree type = TREE_TYPE (arg1);
1480 tree r1 = TREE_REALPART (arg1);
1481 tree i1 = TREE_IMAGPART (arg1);
1482 tree r2 = TREE_REALPART (arg2);
1483 tree i2 = TREE_IMAGPART (arg2);
1484 tree t;
1486 switch (code)
1488 case PLUS_EXPR:
1489 t = build_complex (type,
1490 const_binop (PLUS_EXPR, r1, r2, notrunc),
1491 const_binop (PLUS_EXPR, i1, i2, notrunc));
1492 break;
1494 case MINUS_EXPR:
1495 t = build_complex (type,
1496 const_binop (MINUS_EXPR, r1, r2, notrunc),
1497 const_binop (MINUS_EXPR, i1, i2, notrunc));
1498 break;
1500 case MULT_EXPR:
1501 t = build_complex (type,
1502 const_binop (MINUS_EXPR,
1503 const_binop (MULT_EXPR,
1504 r1, r2, notrunc),
1505 const_binop (MULT_EXPR,
1506 i1, i2, notrunc),
1507 notrunc),
1508 const_binop (PLUS_EXPR,
1509 const_binop (MULT_EXPR,
1510 r1, i2, notrunc),
1511 const_binop (MULT_EXPR,
1512 i1, r2, notrunc),
1513 notrunc));
1514 break;
1516 case RDIV_EXPR:
1518 tree magsquared
1519 = const_binop (PLUS_EXPR,
1520 const_binop (MULT_EXPR, r2, r2, notrunc),
1521 const_binop (MULT_EXPR, i2, i2, notrunc),
1522 notrunc);
1524 t = build_complex (type,
1525 const_binop
1526 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1527 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1528 const_binop (PLUS_EXPR,
1529 const_binop (MULT_EXPR, r1, r2,
1530 notrunc),
1531 const_binop (MULT_EXPR, i1, i2,
1532 notrunc),
1533 notrunc),
1534 magsquared, notrunc),
1535 const_binop
1536 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1537 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1538 const_binop (MINUS_EXPR,
1539 const_binop (MULT_EXPR, i1, r2,
1540 notrunc),
1541 const_binop (MULT_EXPR, r1, i2,
1542 notrunc),
1543 notrunc),
1544 magsquared, notrunc));
1546 break;
1548 default:
1549 abort ();
1551 return t;
1553 return 0;
1556 /* These are the hash table functions for the hash table of INTEGER_CST
1557 nodes of a sizetype. */
1559 /* Return the hash code code X, an INTEGER_CST. */
1561 static hashval_t
1562 size_htab_hash (const void *x)
1564 tree t = (tree) x;
1566 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1567 ^ htab_hash_pointer (TREE_TYPE (t))
1568 ^ (TREE_OVERFLOW (t) << 20));
1571 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1572 is the same as that given by *Y, which is the same. */
1574 static int
1575 size_htab_eq (const void *x, const void *y)
1577 tree xt = (tree) x;
1578 tree yt = (tree) y;
1580 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1581 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1582 && TREE_TYPE (xt) == TREE_TYPE (yt)
1583 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1586 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1587 bits are given by NUMBER and of the sizetype represented by KIND. */
1589 tree
1590 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1592 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1595 /* Likewise, but the desired type is specified explicitly. */
1597 static GTY (()) tree new_const;
1598 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1599 htab_t size_htab;
1601 tree
1602 size_int_type_wide (HOST_WIDE_INT number, tree type)
1604 void **slot;
1606 if (size_htab == 0)
1608 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1609 new_const = make_node (INTEGER_CST);
1612 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1613 hash table, we return the value from the hash table. Otherwise, we
1614 place that in the hash table and make a new node for the next time. */
1615 TREE_INT_CST_LOW (new_const) = number;
1616 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1617 TREE_TYPE (new_const) = type;
1618 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1619 = force_fit_type (new_const, 0);
1621 slot = htab_find_slot (size_htab, new_const, INSERT);
1622 if (*slot == 0)
1624 tree t = new_const;
1626 *slot = new_const;
1627 new_const = make_node (INTEGER_CST);
1628 return t;
1630 else
1631 return (tree) *slot;
1634 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1635 is a tree code. The type of the result is taken from the operands.
1636 Both must be the same type integer type and it must be a size type.
1637 If the operands are constant, so is the result. */
1639 tree
1640 size_binop (enum tree_code code, tree arg0, tree arg1)
1642 tree type = TREE_TYPE (arg0);
1644 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1645 || type != TREE_TYPE (arg1))
1646 abort ();
1648 /* Handle the special case of two integer constants faster. */
1649 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1651 /* And some specific cases even faster than that. */
1652 if (code == PLUS_EXPR && integer_zerop (arg0))
1653 return arg1;
1654 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1655 && integer_zerop (arg1))
1656 return arg0;
1657 else if (code == MULT_EXPR && integer_onep (arg0))
1658 return arg1;
1660 /* Handle general case of two integer constants. */
1661 return int_const_binop (code, arg0, arg1, 0);
1664 if (arg0 == error_mark_node || arg1 == error_mark_node)
1665 return error_mark_node;
1667 return fold (build (code, type, arg0, arg1));
1670 /* Given two values, either both of sizetype or both of bitsizetype,
1671 compute the difference between the two values. Return the value
1672 in signed type corresponding to the type of the operands. */
1674 tree
1675 size_diffop (tree arg0, tree arg1)
1677 tree type = TREE_TYPE (arg0);
1678 tree ctype;
1680 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1681 || type != TREE_TYPE (arg1))
1682 abort ();
1684 /* If the type is already signed, just do the simple thing. */
1685 if (! TREE_UNSIGNED (type))
1686 return size_binop (MINUS_EXPR, arg0, arg1);
1688 ctype = (type == bitsizetype || type == ubitsizetype
1689 ? sbitsizetype : ssizetype);
1691 /* If either operand is not a constant, do the conversions to the signed
1692 type and subtract. The hardware will do the right thing with any
1693 overflow in the subtraction. */
1694 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1695 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1696 fold_convert (ctype, arg1));
1698 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1699 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1700 overflow) and negate (which can't either). Special-case a result
1701 of zero while we're here. */
1702 if (tree_int_cst_equal (arg0, arg1))
1703 return fold_convert (ctype, integer_zero_node);
1704 else if (tree_int_cst_lt (arg1, arg0))
1705 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1706 else
1707 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1708 fold_convert (ctype, size_binop (MINUS_EXPR,
1709 arg1, arg0)));
1713 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1714 type TYPE. If no simplification can be done return NULL_TREE. */
1716 static tree
1717 fold_convert_const (enum tree_code code, tree type, tree arg1)
1719 int overflow = 0;
1720 tree t;
1722 if (TREE_TYPE (arg1) == type)
1723 return arg1;
1725 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1727 if (TREE_CODE (arg1) == INTEGER_CST)
1729 /* If we would build a constant wider than GCC supports,
1730 leave the conversion unfolded. */
1731 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1732 return NULL_TREE;
1734 /* If we are trying to make a sizetype for a small integer, use
1735 size_int to pick up cached types to reduce duplicate nodes. */
1736 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1737 && !TREE_CONSTANT_OVERFLOW (arg1)
1738 && compare_tree_int (arg1, 10000) < 0)
1739 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1741 /* Given an integer constant, make new constant with new type,
1742 appropriately sign-extended or truncated. */
1743 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1744 TREE_INT_CST_HIGH (arg1));
1745 TREE_TYPE (t) = type;
1746 /* Indicate an overflow if (1) ARG1 already overflowed,
1747 or (2) force_fit_type indicates an overflow.
1748 Tell force_fit_type that an overflow has already occurred
1749 if ARG1 is a too-large unsigned value and T is signed.
1750 But don't indicate an overflow if converting a pointer. */
1751 TREE_OVERFLOW (t)
1752 = ((force_fit_type (t,
1753 (TREE_INT_CST_HIGH (arg1) < 0
1754 && (TREE_UNSIGNED (type)
1755 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1756 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1757 || TREE_OVERFLOW (arg1));
1758 TREE_CONSTANT_OVERFLOW (t)
1759 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1760 return t;
1762 else if (TREE_CODE (arg1) == REAL_CST)
1764 /* The following code implements the floating point to integer
1765 conversion rules required by the Java Language Specification,
1766 that IEEE NaNs are mapped to zero and values that overflow
1767 the target precision saturate, i.e. values greater than
1768 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1769 are mapped to INT_MIN. These semantics are allowed by the
1770 C and C++ standards that simply state that the behavior of
1771 FP-to-integer conversion is unspecified upon overflow. */
1773 HOST_WIDE_INT high, low;
1775 REAL_VALUE_TYPE r;
1776 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1778 switch (code)
1780 case FIX_TRUNC_EXPR:
1781 real_trunc (&r, VOIDmode, &x);
1782 break;
1784 case FIX_CEIL_EXPR:
1785 real_ceil (&r, VOIDmode, &x);
1786 break;
1788 case FIX_FLOOR_EXPR:
1789 real_floor (&r, VOIDmode, &x);
1790 break;
1792 default:
1793 abort ();
1796 /* If R is NaN, return zero and show we have an overflow. */
1797 if (REAL_VALUE_ISNAN (r))
1799 overflow = 1;
1800 high = 0;
1801 low = 0;
1804 /* See if R is less than the lower bound or greater than the
1805 upper bound. */
1807 if (! overflow)
1809 tree lt = TYPE_MIN_VALUE (type);
1810 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1811 if (REAL_VALUES_LESS (r, l))
1813 overflow = 1;
1814 high = TREE_INT_CST_HIGH (lt);
1815 low = TREE_INT_CST_LOW (lt);
1819 if (! overflow)
1821 tree ut = TYPE_MAX_VALUE (type);
1822 if (ut)
1824 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1825 if (REAL_VALUES_LESS (u, r))
1827 overflow = 1;
1828 high = TREE_INT_CST_HIGH (ut);
1829 low = TREE_INT_CST_LOW (ut);
1834 if (! overflow)
1835 REAL_VALUE_TO_INT (&low, &high, r);
1837 t = build_int_2 (low, high);
1838 TREE_TYPE (t) = type;
1839 TREE_OVERFLOW (t)
1840 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1841 TREE_CONSTANT_OVERFLOW (t)
1842 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1843 return t;
1846 else if (TREE_CODE (type) == REAL_TYPE)
1848 if (TREE_CODE (arg1) == INTEGER_CST)
1849 return build_real_from_int_cst (type, arg1);
1850 if (TREE_CODE (arg1) == REAL_CST)
1852 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1854 /* We make a copy of ARG1 so that we don't modify an
1855 existing constant tree. */
1856 t = copy_node (arg1);
1857 TREE_TYPE (t) = type;
1858 return t;
1861 t = build_real (type,
1862 real_value_truncate (TYPE_MODE (type),
1863 TREE_REAL_CST (arg1)));
1865 TREE_OVERFLOW (t)
1866 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1867 TREE_CONSTANT_OVERFLOW (t)
1868 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1869 return t;
1872 return NULL_TREE;
1875 /* Convert expression ARG to type TYPE. Used by the middle-end for
1876 simple conversions in preference to calling the front-end's convert. */
1878 static tree
1879 fold_convert (tree type, tree arg)
1881 tree orig = TREE_TYPE (arg);
1882 tree tem;
1884 if (type == orig)
1885 return arg;
1887 if (TREE_CODE (arg) == ERROR_MARK
1888 || TREE_CODE (type) == ERROR_MARK
1889 || TREE_CODE (orig) == ERROR_MARK)
1890 return error_mark_node;
1892 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1893 return fold (build1 (NOP_EXPR, type, arg));
1895 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1897 if (TREE_CODE (arg) == INTEGER_CST)
1899 tem = fold_convert_const (NOP_EXPR, type, arg);
1900 if (tem != NULL_TREE)
1901 return tem;
1903 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1904 return fold (build1 (NOP_EXPR, type, arg));
1905 if (TREE_CODE (orig) == COMPLEX_TYPE)
1907 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1908 return fold_convert (type, tem);
1910 if (TREE_CODE (orig) == VECTOR_TYPE
1911 && GET_MODE_SIZE (TYPE_MODE (type))
1912 == GET_MODE_SIZE (TYPE_MODE (orig)))
1913 return fold (build1 (NOP_EXPR, type, arg));
1915 else if (TREE_CODE (type) == REAL_TYPE)
1917 if (TREE_CODE (arg) == INTEGER_CST)
1919 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1920 if (tem != NULL_TREE)
1921 return tem;
1923 else if (TREE_CODE (arg) == REAL_CST)
1925 tem = fold_convert_const (NOP_EXPR, type, arg);
1926 if (tem != NULL_TREE)
1927 return tem;
1930 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1931 return fold (build1 (FLOAT_EXPR, type, arg));
1932 if (TREE_CODE (orig) == REAL_TYPE)
1933 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1934 type, arg));
1935 if (TREE_CODE (orig) == COMPLEX_TYPE)
1937 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1938 return fold_convert (type, tem);
1941 else if (TREE_CODE (type) == COMPLEX_TYPE)
1943 if (INTEGRAL_TYPE_P (orig)
1944 || POINTER_TYPE_P (orig)
1945 || TREE_CODE (orig) == REAL_TYPE)
1946 return build (COMPLEX_EXPR, type,
1947 fold_convert (TREE_TYPE (type), arg),
1948 fold_convert (TREE_TYPE (type), integer_zero_node));
1949 if (TREE_CODE (orig) == COMPLEX_TYPE)
1951 tree rpart, ipart;
1953 if (TREE_CODE (arg) == COMPLEX_EXPR)
1955 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1956 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1957 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1960 arg = save_expr (arg);
1961 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1962 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1963 rpart = fold_convert (TREE_TYPE (type), rpart);
1964 ipart = fold_convert (TREE_TYPE (type), ipart);
1965 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1968 else if (TREE_CODE (type) == VECTOR_TYPE)
1970 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1971 && GET_MODE_SIZE (TYPE_MODE (type))
1972 == GET_MODE_SIZE (TYPE_MODE (orig)))
1973 return fold (build1 (NOP_EXPR, type, arg));
1974 if (TREE_CODE (orig) == VECTOR_TYPE
1975 && GET_MODE_SIZE (TYPE_MODE (type))
1976 == GET_MODE_SIZE (TYPE_MODE (orig)))
1977 return fold (build1 (NOP_EXPR, type, arg));
1979 else if (VOID_TYPE_P (type))
1980 return fold (build1 (CONVERT_EXPR, type, arg));
1981 abort ();
1984 /* Return an expr equal to X but certainly not valid as an lvalue. */
1986 tree
1987 non_lvalue (tree x)
1989 tree result;
1991 /* These things are certainly not lvalues. */
1992 if (TREE_CODE (x) == NON_LVALUE_EXPR
1993 || TREE_CODE (x) == INTEGER_CST
1994 || TREE_CODE (x) == REAL_CST
1995 || TREE_CODE (x) == STRING_CST
1996 || TREE_CODE (x) == ADDR_EXPR)
1997 return x;
1999 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2000 TREE_CONSTANT (result) = TREE_CONSTANT (x);
2001 return result;
2004 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2005 Zero means allow extended lvalues. */
2007 int pedantic_lvalues;
2009 /* When pedantic, return an expr equal to X but certainly not valid as a
2010 pedantic lvalue. Otherwise, return X. */
2012 tree
2013 pedantic_non_lvalue (tree x)
2015 if (pedantic_lvalues)
2016 return non_lvalue (x);
2017 else
2018 return x;
2021 /* Given a tree comparison code, return the code that is the logical inverse
2022 of the given code. It is not safe to do this for floating-point
2023 comparisons, except for NE_EXPR and EQ_EXPR. */
2025 static enum tree_code
2026 invert_tree_comparison (enum tree_code code)
2028 switch (code)
2030 case EQ_EXPR:
2031 return NE_EXPR;
2032 case NE_EXPR:
2033 return EQ_EXPR;
2034 case GT_EXPR:
2035 return LE_EXPR;
2036 case GE_EXPR:
2037 return LT_EXPR;
2038 case LT_EXPR:
2039 return GE_EXPR;
2040 case LE_EXPR:
2041 return GT_EXPR;
2042 default:
2043 abort ();
2047 /* Similar, but return the comparison that results if the operands are
2048 swapped. This is safe for floating-point. */
2050 static enum tree_code
2051 swap_tree_comparison (enum tree_code code)
2053 switch (code)
2055 case EQ_EXPR:
2056 case NE_EXPR:
2057 return code;
2058 case GT_EXPR:
2059 return LT_EXPR;
2060 case GE_EXPR:
2061 return LE_EXPR;
2062 case LT_EXPR:
2063 return GT_EXPR;
2064 case LE_EXPR:
2065 return GE_EXPR;
2066 default:
2067 abort ();
2072 /* Convert a comparison tree code from an enum tree_code representation
2073 into a compcode bit-based encoding. This function is the inverse of
2074 compcode_to_comparison. */
2076 static int
2077 comparison_to_compcode (enum tree_code code)
2079 switch (code)
2081 case LT_EXPR:
2082 return COMPCODE_LT;
2083 case EQ_EXPR:
2084 return COMPCODE_EQ;
2085 case LE_EXPR:
2086 return COMPCODE_LE;
2087 case GT_EXPR:
2088 return COMPCODE_GT;
2089 case NE_EXPR:
2090 return COMPCODE_NE;
2091 case GE_EXPR:
2092 return COMPCODE_GE;
2093 default:
2094 abort ();
2098 /* Convert a compcode bit-based encoding of a comparison operator back
2099 to GCC's enum tree_code representation. This function is the
2100 inverse of comparison_to_compcode. */
2102 static enum tree_code
2103 compcode_to_comparison (int code)
2105 switch (code)
2107 case COMPCODE_LT:
2108 return LT_EXPR;
2109 case COMPCODE_EQ:
2110 return EQ_EXPR;
2111 case COMPCODE_LE:
2112 return LE_EXPR;
2113 case COMPCODE_GT:
2114 return GT_EXPR;
2115 case COMPCODE_NE:
2116 return NE_EXPR;
2117 case COMPCODE_GE:
2118 return GE_EXPR;
2119 default:
2120 abort ();
2124 /* Return nonzero if CODE is a tree code that represents a truth value. */
2126 static int
2127 truth_value_p (enum tree_code code)
2129 return (TREE_CODE_CLASS (code) == '<'
2130 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2131 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2132 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2135 /* Return nonzero if two operands (typically of the same tree node)
2136 are necessarily equal. If either argument has side-effects this
2137 function returns zero.
2139 If ONLY_CONST is nonzero, only return nonzero for constants.
2140 This function tests whether the operands are indistinguishable;
2141 it does not test whether they are equal using C's == operation.
2142 The distinction is important for IEEE floating point, because
2143 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2144 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2146 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
2147 even though it may hold multiple values during a function.
2148 This is because a GCC tree node guarantees that nothing else is
2149 executed between the evaluation of its "operands" (which may often
2150 be evaluated in arbitrary order). Hence if the operands themselves
2151 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2152 same value in each operand/subexpression. Hence a zero value for
2153 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2154 If comparing arbitrary expression trees, such as from different
2155 statements, ONLY_CONST must usually be nonzero. */
2158 operand_equal_p (tree arg0, tree arg1, int only_const)
2160 tree fndecl;
2162 /* If both types don't have the same signedness, then we can't consider
2163 them equal. We must check this before the STRIP_NOPS calls
2164 because they may change the signedness of the arguments. */
2165 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2166 return 0;
2168 STRIP_NOPS (arg0);
2169 STRIP_NOPS (arg1);
2171 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2172 /* This is needed for conversions and for COMPONENT_REF.
2173 Might as well play it safe and always test this. */
2174 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2175 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2176 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2177 return 0;
2179 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2180 We don't care about side effects in that case because the SAVE_EXPR
2181 takes care of that for us. In all other cases, two expressions are
2182 equal if they have no side effects. If we have two identical
2183 expressions with side effects that should be treated the same due
2184 to the only side effects being identical SAVE_EXPR's, that will
2185 be detected in the recursive calls below. */
2186 if (arg0 == arg1 && ! only_const
2187 && (TREE_CODE (arg0) == SAVE_EXPR
2188 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2189 return 1;
2191 /* Next handle constant cases, those for which we can return 1 even
2192 if ONLY_CONST is set. */
2193 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2194 switch (TREE_CODE (arg0))
2196 case INTEGER_CST:
2197 return (! TREE_CONSTANT_OVERFLOW (arg0)
2198 && ! TREE_CONSTANT_OVERFLOW (arg1)
2199 && tree_int_cst_equal (arg0, arg1));
2201 case REAL_CST:
2202 return (! TREE_CONSTANT_OVERFLOW (arg0)
2203 && ! TREE_CONSTANT_OVERFLOW (arg1)
2204 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2205 TREE_REAL_CST (arg1)));
2207 case VECTOR_CST:
2209 tree v1, v2;
2211 if (TREE_CONSTANT_OVERFLOW (arg0)
2212 || TREE_CONSTANT_OVERFLOW (arg1))
2213 return 0;
2215 v1 = TREE_VECTOR_CST_ELTS (arg0);
2216 v2 = TREE_VECTOR_CST_ELTS (arg1);
2217 while (v1 && v2)
2219 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2220 only_const))
2221 return 0;
2222 v1 = TREE_CHAIN (v1);
2223 v2 = TREE_CHAIN (v2);
2226 return 1;
2229 case COMPLEX_CST:
2230 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2231 only_const)
2232 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2233 only_const));
2235 case STRING_CST:
2236 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2237 && ! memcmp (TREE_STRING_POINTER (arg0),
2238 TREE_STRING_POINTER (arg1),
2239 TREE_STRING_LENGTH (arg0)));
2241 case ADDR_EXPR:
2242 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2244 default:
2245 break;
2248 if (only_const)
2249 return 0;
2251 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2253 case '1':
2254 /* Two conversions are equal only if signedness and modes match. */
2255 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2256 && (TREE_UNSIGNED (TREE_TYPE (arg0))
2257 != TREE_UNSIGNED (TREE_TYPE (arg1))))
2258 return 0;
2260 return operand_equal_p (TREE_OPERAND (arg0, 0),
2261 TREE_OPERAND (arg1, 0), 0);
2263 case '<':
2264 case '2':
2265 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2266 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2268 return 1;
2270 /* For commutative ops, allow the other order. */
2271 return (commutative_tree_code (TREE_CODE (arg0))
2272 && operand_equal_p (TREE_OPERAND (arg0, 0),
2273 TREE_OPERAND (arg1, 1), 0)
2274 && operand_equal_p (TREE_OPERAND (arg0, 1),
2275 TREE_OPERAND (arg1, 0), 0));
2277 case 'r':
2278 /* If either of the pointer (or reference) expressions we are
2279 dereferencing contain a side effect, these cannot be equal. */
2280 if (TREE_SIDE_EFFECTS (arg0)
2281 || TREE_SIDE_EFFECTS (arg1))
2282 return 0;
2284 switch (TREE_CODE (arg0))
2286 case INDIRECT_REF:
2287 return operand_equal_p (TREE_OPERAND (arg0, 0),
2288 TREE_OPERAND (arg1, 0), 0);
2290 case COMPONENT_REF:
2291 case ARRAY_REF:
2292 case ARRAY_RANGE_REF:
2293 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2294 TREE_OPERAND (arg1, 0), 0)
2295 && operand_equal_p (TREE_OPERAND (arg0, 1),
2296 TREE_OPERAND (arg1, 1), 0));
2298 case BIT_FIELD_REF:
2299 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2300 TREE_OPERAND (arg1, 0), 0)
2301 && operand_equal_p (TREE_OPERAND (arg0, 1),
2302 TREE_OPERAND (arg1, 1), 0)
2303 && operand_equal_p (TREE_OPERAND (arg0, 2),
2304 TREE_OPERAND (arg1, 2), 0));
2305 default:
2306 return 0;
2309 case 'e':
2310 switch (TREE_CODE (arg0))
2312 case ADDR_EXPR:
2313 case TRUTH_NOT_EXPR:
2314 return operand_equal_p (TREE_OPERAND (arg0, 0),
2315 TREE_OPERAND (arg1, 0), 0);
2317 case RTL_EXPR:
2318 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2320 case CALL_EXPR:
2321 /* If the CALL_EXPRs call different functions, then they
2322 clearly can not be equal. */
2323 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2324 TREE_OPERAND (arg1, 0), 0))
2325 return 0;
2327 /* Only consider const functions equivalent. */
2328 fndecl = get_callee_fndecl (arg0);
2329 if (fndecl == NULL_TREE
2330 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2331 return 0;
2333 /* Now see if all the arguments are the same. operand_equal_p
2334 does not handle TREE_LIST, so we walk the operands here
2335 feeding them to operand_equal_p. */
2336 arg0 = TREE_OPERAND (arg0, 1);
2337 arg1 = TREE_OPERAND (arg1, 1);
2338 while (arg0 && arg1)
2340 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2341 return 0;
2343 arg0 = TREE_CHAIN (arg0);
2344 arg1 = TREE_CHAIN (arg1);
2347 /* If we get here and both argument lists are exhausted
2348 then the CALL_EXPRs are equal. */
2349 return ! (arg0 || arg1);
2351 default:
2352 return 0;
2355 case 'd':
2356 /* Consider __builtin_sqrt equal to sqrt. */
2357 return TREE_CODE (arg0) == FUNCTION_DECL
2358 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2359 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2360 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2362 default:
2363 return 0;
2367 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2368 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2370 When in doubt, return 0. */
2372 static int
2373 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2375 int unsignedp1, unsignedpo;
2376 tree primarg0, primarg1, primother;
2377 unsigned int correct_width;
2379 if (operand_equal_p (arg0, arg1, 0))
2380 return 1;
2382 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2383 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2384 return 0;
2386 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2387 and see if the inner values are the same. This removes any
2388 signedness comparison, which doesn't matter here. */
2389 primarg0 = arg0, primarg1 = arg1;
2390 STRIP_NOPS (primarg0);
2391 STRIP_NOPS (primarg1);
2392 if (operand_equal_p (primarg0, primarg1, 0))
2393 return 1;
2395 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2396 actual comparison operand, ARG0.
2398 First throw away any conversions to wider types
2399 already present in the operands. */
2401 primarg1 = get_narrower (arg1, &unsignedp1);
2402 primother = get_narrower (other, &unsignedpo);
2404 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2405 if (unsignedp1 == unsignedpo
2406 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2407 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2409 tree type = TREE_TYPE (arg0);
2411 /* Make sure shorter operand is extended the right way
2412 to match the longer operand. */
2413 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2414 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2416 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2417 return 1;
2420 return 0;
2423 /* See if ARG is an expression that is either a comparison or is performing
2424 arithmetic on comparisons. The comparisons must only be comparing
2425 two different values, which will be stored in *CVAL1 and *CVAL2; if
2426 they are nonzero it means that some operands have already been found.
2427 No variables may be used anywhere else in the expression except in the
2428 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2429 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2431 If this is true, return 1. Otherwise, return zero. */
2433 static int
2434 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2436 enum tree_code code = TREE_CODE (arg);
2437 char class = TREE_CODE_CLASS (code);
2439 /* We can handle some of the 'e' cases here. */
2440 if (class == 'e' && code == TRUTH_NOT_EXPR)
2441 class = '1';
2442 else if (class == 'e'
2443 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2444 || code == COMPOUND_EXPR))
2445 class = '2';
2447 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2448 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2450 /* If we've already found a CVAL1 or CVAL2, this expression is
2451 two complex to handle. */
2452 if (*cval1 || *cval2)
2453 return 0;
2455 class = '1';
2456 *save_p = 1;
2459 switch (class)
2461 case '1':
2462 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2464 case '2':
2465 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2466 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2467 cval1, cval2, save_p));
2469 case 'c':
2470 return 1;
2472 case 'e':
2473 if (code == COND_EXPR)
2474 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2475 cval1, cval2, save_p)
2476 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2477 cval1, cval2, save_p)
2478 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2479 cval1, cval2, save_p));
2480 return 0;
2482 case '<':
2483 /* First see if we can handle the first operand, then the second. For
2484 the second operand, we know *CVAL1 can't be zero. It must be that
2485 one side of the comparison is each of the values; test for the
2486 case where this isn't true by failing if the two operands
2487 are the same. */
2489 if (operand_equal_p (TREE_OPERAND (arg, 0),
2490 TREE_OPERAND (arg, 1), 0))
2491 return 0;
2493 if (*cval1 == 0)
2494 *cval1 = TREE_OPERAND (arg, 0);
2495 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2497 else if (*cval2 == 0)
2498 *cval2 = TREE_OPERAND (arg, 0);
2499 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2501 else
2502 return 0;
2504 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2506 else if (*cval2 == 0)
2507 *cval2 = TREE_OPERAND (arg, 1);
2508 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2510 else
2511 return 0;
2513 return 1;
2515 default:
2516 return 0;
2520 /* ARG is a tree that is known to contain just arithmetic operations and
2521 comparisons. Evaluate the operations in the tree substituting NEW0 for
2522 any occurrence of OLD0 as an operand of a comparison and likewise for
2523 NEW1 and OLD1. */
2525 static tree
2526 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2528 tree type = TREE_TYPE (arg);
2529 enum tree_code code = TREE_CODE (arg);
2530 char class = TREE_CODE_CLASS (code);
2532 /* We can handle some of the 'e' cases here. */
2533 if (class == 'e' && code == TRUTH_NOT_EXPR)
2534 class = '1';
2535 else if (class == 'e'
2536 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2537 class = '2';
2539 switch (class)
2541 case '1':
2542 return fold (build1 (code, type,
2543 eval_subst (TREE_OPERAND (arg, 0),
2544 old0, new0, old1, new1)));
2546 case '2':
2547 return fold (build (code, type,
2548 eval_subst (TREE_OPERAND (arg, 0),
2549 old0, new0, old1, new1),
2550 eval_subst (TREE_OPERAND (arg, 1),
2551 old0, new0, old1, new1)));
2553 case 'e':
2554 switch (code)
2556 case SAVE_EXPR:
2557 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2559 case COMPOUND_EXPR:
2560 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2562 case COND_EXPR:
2563 return fold (build (code, type,
2564 eval_subst (TREE_OPERAND (arg, 0),
2565 old0, new0, old1, new1),
2566 eval_subst (TREE_OPERAND (arg, 1),
2567 old0, new0, old1, new1),
2568 eval_subst (TREE_OPERAND (arg, 2),
2569 old0, new0, old1, new1)));
2570 default:
2571 break;
2573 /* Fall through - ??? */
2575 case '<':
2577 tree arg0 = TREE_OPERAND (arg, 0);
2578 tree arg1 = TREE_OPERAND (arg, 1);
2580 /* We need to check both for exact equality and tree equality. The
2581 former will be true if the operand has a side-effect. In that
2582 case, we know the operand occurred exactly once. */
2584 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2585 arg0 = new0;
2586 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2587 arg0 = new1;
2589 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2590 arg1 = new0;
2591 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2592 arg1 = new1;
2594 return fold (build (code, type, arg0, arg1));
2597 default:
2598 return arg;
2602 /* Return a tree for the case when the result of an expression is RESULT
2603 converted to TYPE and OMITTED was previously an operand of the expression
2604 but is now not needed (e.g., we folded OMITTED * 0).
2606 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2607 the conversion of RESULT to TYPE. */
2609 tree
2610 omit_one_operand (tree type, tree result, tree omitted)
2612 tree t = fold_convert (type, result);
2614 if (TREE_SIDE_EFFECTS (omitted))
2615 return build (COMPOUND_EXPR, type, omitted, t);
2617 return non_lvalue (t);
2620 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2622 static tree
2623 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2625 tree t = fold_convert (type, result);
2627 if (TREE_SIDE_EFFECTS (omitted))
2628 return build (COMPOUND_EXPR, type, omitted, t);
2630 return pedantic_non_lvalue (t);
2633 /* Return a simplified tree node for the truth-negation of ARG. This
2634 never alters ARG itself. We assume that ARG is an operation that
2635 returns a truth value (0 or 1). */
2637 tree
2638 invert_truthvalue (tree arg)
2640 tree type = TREE_TYPE (arg);
2641 enum tree_code code = TREE_CODE (arg);
2643 if (code == ERROR_MARK)
2644 return arg;
2646 /* If this is a comparison, we can simply invert it, except for
2647 floating-point non-equality comparisons, in which case we just
2648 enclose a TRUTH_NOT_EXPR around what we have. */
2650 if (TREE_CODE_CLASS (code) == '<')
2652 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2653 && !flag_unsafe_math_optimizations
2654 && code != NE_EXPR
2655 && code != EQ_EXPR)
2656 return build1 (TRUTH_NOT_EXPR, type, arg);
2657 else if (code == UNORDERED_EXPR
2658 || code == ORDERED_EXPR
2659 || code == UNEQ_EXPR
2660 || code == UNLT_EXPR
2661 || code == UNLE_EXPR
2662 || code == UNGT_EXPR
2663 || code == UNGE_EXPR)
2664 return build1 (TRUTH_NOT_EXPR, type, arg);
2665 else
2666 return build (invert_tree_comparison (code), type,
2667 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2670 switch (code)
2672 case INTEGER_CST:
2673 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2675 case TRUTH_AND_EXPR:
2676 return build (TRUTH_OR_EXPR, type,
2677 invert_truthvalue (TREE_OPERAND (arg, 0)),
2678 invert_truthvalue (TREE_OPERAND (arg, 1)));
2680 case TRUTH_OR_EXPR:
2681 return build (TRUTH_AND_EXPR, type,
2682 invert_truthvalue (TREE_OPERAND (arg, 0)),
2683 invert_truthvalue (TREE_OPERAND (arg, 1)));
2685 case TRUTH_XOR_EXPR:
2686 /* Here we can invert either operand. We invert the first operand
2687 unless the second operand is a TRUTH_NOT_EXPR in which case our
2688 result is the XOR of the first operand with the inside of the
2689 negation of the second operand. */
2691 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2692 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2693 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2694 else
2695 return build (TRUTH_XOR_EXPR, type,
2696 invert_truthvalue (TREE_OPERAND (arg, 0)),
2697 TREE_OPERAND (arg, 1));
2699 case TRUTH_ANDIF_EXPR:
2700 return build (TRUTH_ORIF_EXPR, type,
2701 invert_truthvalue (TREE_OPERAND (arg, 0)),
2702 invert_truthvalue (TREE_OPERAND (arg, 1)));
2704 case TRUTH_ORIF_EXPR:
2705 return build (TRUTH_ANDIF_EXPR, type,
2706 invert_truthvalue (TREE_OPERAND (arg, 0)),
2707 invert_truthvalue (TREE_OPERAND (arg, 1)));
2709 case TRUTH_NOT_EXPR:
2710 return TREE_OPERAND (arg, 0);
2712 case COND_EXPR:
2713 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2714 invert_truthvalue (TREE_OPERAND (arg, 1)),
2715 invert_truthvalue (TREE_OPERAND (arg, 2)));
2717 case COMPOUND_EXPR:
2718 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2719 invert_truthvalue (TREE_OPERAND (arg, 1)));
2721 case NON_LVALUE_EXPR:
2722 return invert_truthvalue (TREE_OPERAND (arg, 0));
2724 case NOP_EXPR:
2725 case CONVERT_EXPR:
2726 case FLOAT_EXPR:
2727 return build1 (TREE_CODE (arg), type,
2728 invert_truthvalue (TREE_OPERAND (arg, 0)));
2730 case BIT_AND_EXPR:
2731 if (!integer_onep (TREE_OPERAND (arg, 1)))
2732 break;
2733 return build (EQ_EXPR, type, arg,
2734 fold_convert (type, integer_zero_node));
2736 case SAVE_EXPR:
2737 return build1 (TRUTH_NOT_EXPR, type, arg);
2739 case CLEANUP_POINT_EXPR:
2740 return build1 (CLEANUP_POINT_EXPR, type,
2741 invert_truthvalue (TREE_OPERAND (arg, 0)));
2743 default:
2744 break;
2746 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2747 abort ();
2748 return build1 (TRUTH_NOT_EXPR, type, arg);
2751 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2752 operands are another bit-wise operation with a common input. If so,
2753 distribute the bit operations to save an operation and possibly two if
2754 constants are involved. For example, convert
2755 (A | B) & (A | C) into A | (B & C)
2756 Further simplification will occur if B and C are constants.
2758 If this optimization cannot be done, 0 will be returned. */
2760 static tree
2761 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2763 tree common;
2764 tree left, right;
2766 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2767 || TREE_CODE (arg0) == code
2768 || (TREE_CODE (arg0) != BIT_AND_EXPR
2769 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2770 return 0;
2772 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2774 common = TREE_OPERAND (arg0, 0);
2775 left = TREE_OPERAND (arg0, 1);
2776 right = TREE_OPERAND (arg1, 1);
2778 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2780 common = TREE_OPERAND (arg0, 0);
2781 left = TREE_OPERAND (arg0, 1);
2782 right = TREE_OPERAND (arg1, 0);
2784 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2786 common = TREE_OPERAND (arg0, 1);
2787 left = TREE_OPERAND (arg0, 0);
2788 right = TREE_OPERAND (arg1, 1);
2790 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2792 common = TREE_OPERAND (arg0, 1);
2793 left = TREE_OPERAND (arg0, 0);
2794 right = TREE_OPERAND (arg1, 0);
2796 else
2797 return 0;
2799 return fold (build (TREE_CODE (arg0), type, common,
2800 fold (build (code, type, left, right))));
2803 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2804 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2806 static tree
2807 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2808 int unsignedp)
2810 tree result = build (BIT_FIELD_REF, type, inner,
2811 size_int (bitsize), bitsize_int (bitpos));
2813 TREE_UNSIGNED (result) = unsignedp;
2815 return result;
2818 /* Optimize a bit-field compare.
2820 There are two cases: First is a compare against a constant and the
2821 second is a comparison of two items where the fields are at the same
2822 bit position relative to the start of a chunk (byte, halfword, word)
2823 large enough to contain it. In these cases we can avoid the shift
2824 implicit in bitfield extractions.
2826 For constants, we emit a compare of the shifted constant with the
2827 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2828 compared. For two fields at the same position, we do the ANDs with the
2829 similar mask and compare the result of the ANDs.
2831 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2832 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2833 are the left and right operands of the comparison, respectively.
2835 If the optimization described above can be done, we return the resulting
2836 tree. Otherwise we return zero. */
2838 static tree
2839 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2840 tree lhs, tree rhs)
2842 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2843 tree type = TREE_TYPE (lhs);
2844 tree signed_type, unsigned_type;
2845 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2846 enum machine_mode lmode, rmode, nmode;
2847 int lunsignedp, runsignedp;
2848 int lvolatilep = 0, rvolatilep = 0;
2849 tree linner, rinner = NULL_TREE;
2850 tree mask;
2851 tree offset;
2853 /* Get all the information about the extractions being done. If the bit size
2854 if the same as the size of the underlying object, we aren't doing an
2855 extraction at all and so can do nothing. We also don't want to
2856 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2857 then will no longer be able to replace it. */
2858 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2859 &lunsignedp, &lvolatilep);
2860 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2861 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2862 return 0;
2864 if (!const_p)
2866 /* If this is not a constant, we can only do something if bit positions,
2867 sizes, and signedness are the same. */
2868 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2869 &runsignedp, &rvolatilep);
2871 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2872 || lunsignedp != runsignedp || offset != 0
2873 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2874 return 0;
2877 /* See if we can find a mode to refer to this field. We should be able to,
2878 but fail if we can't. */
2879 nmode = get_best_mode (lbitsize, lbitpos,
2880 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2881 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2882 TYPE_ALIGN (TREE_TYPE (rinner))),
2883 word_mode, lvolatilep || rvolatilep);
2884 if (nmode == VOIDmode)
2885 return 0;
2887 /* Set signed and unsigned types of the precision of this mode for the
2888 shifts below. */
2889 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
2890 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
2892 /* Compute the bit position and size for the new reference and our offset
2893 within it. If the new reference is the same size as the original, we
2894 won't optimize anything, so return zero. */
2895 nbitsize = GET_MODE_BITSIZE (nmode);
2896 nbitpos = lbitpos & ~ (nbitsize - 1);
2897 lbitpos -= nbitpos;
2898 if (nbitsize == lbitsize)
2899 return 0;
2901 if (BYTES_BIG_ENDIAN)
2902 lbitpos = nbitsize - lbitsize - lbitpos;
2904 /* Make the mask to be used against the extracted field. */
2905 mask = build_int_2 (~0, ~0);
2906 TREE_TYPE (mask) = unsigned_type;
2907 force_fit_type (mask, 0);
2908 mask = fold_convert (unsigned_type, mask);
2909 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2910 mask = const_binop (RSHIFT_EXPR, mask,
2911 size_int (nbitsize - lbitsize - lbitpos), 0);
2913 if (! const_p)
2914 /* If not comparing with constant, just rework the comparison
2915 and return. */
2916 return build (code, compare_type,
2917 build (BIT_AND_EXPR, unsigned_type,
2918 make_bit_field_ref (linner, unsigned_type,
2919 nbitsize, nbitpos, 1),
2920 mask),
2921 build (BIT_AND_EXPR, unsigned_type,
2922 make_bit_field_ref (rinner, unsigned_type,
2923 nbitsize, nbitpos, 1),
2924 mask));
2926 /* Otherwise, we are handling the constant case. See if the constant is too
2927 big for the field. Warn and return a tree of for 0 (false) if so. We do
2928 this not only for its own sake, but to avoid having to test for this
2929 error case below. If we didn't, we might generate wrong code.
2931 For unsigned fields, the constant shifted right by the field length should
2932 be all zero. For signed fields, the high-order bits should agree with
2933 the sign bit. */
2935 if (lunsignedp)
2937 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2938 fold_convert (unsigned_type, rhs),
2939 size_int (lbitsize), 0)))
2941 warning ("comparison is always %d due to width of bit-field",
2942 code == NE_EXPR);
2943 return fold_convert (compare_type,
2944 (code == NE_EXPR
2945 ? integer_one_node : integer_zero_node));
2948 else
2950 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
2951 size_int (lbitsize - 1), 0);
2952 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2954 warning ("comparison is always %d due to width of bit-field",
2955 code == NE_EXPR);
2956 return fold_convert (compare_type,
2957 (code == NE_EXPR
2958 ? integer_one_node : integer_zero_node));
2962 /* Single-bit compares should always be against zero. */
2963 if (lbitsize == 1 && ! integer_zerop (rhs))
2965 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2966 rhs = fold_convert (type, integer_zero_node);
2969 /* Make a new bitfield reference, shift the constant over the
2970 appropriate number of bits and mask it with the computed mask
2971 (in case this was a signed field). If we changed it, make a new one. */
2972 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2973 if (lvolatilep)
2975 TREE_SIDE_EFFECTS (lhs) = 1;
2976 TREE_THIS_VOLATILE (lhs) = 1;
2979 rhs = fold (const_binop (BIT_AND_EXPR,
2980 const_binop (LSHIFT_EXPR,
2981 fold_convert (unsigned_type, rhs),
2982 size_int (lbitpos), 0),
2983 mask, 0));
2985 return build (code, compare_type,
2986 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2987 rhs);
2990 /* Subroutine for fold_truthop: decode a field reference.
2992 If EXP is a comparison reference, we return the innermost reference.
2994 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2995 set to the starting bit number.
2997 If the innermost field can be completely contained in a mode-sized
2998 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3000 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3001 otherwise it is not changed.
3003 *PUNSIGNEDP is set to the signedness of the field.
3005 *PMASK is set to the mask used. This is either contained in a
3006 BIT_AND_EXPR or derived from the width of the field.
3008 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3010 Return 0 if this is not a component reference or is one that we can't
3011 do anything with. */
3013 static tree
3014 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3015 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3016 int *punsignedp, int *pvolatilep,
3017 tree *pmask, tree *pand_mask)
3019 tree outer_type = 0;
3020 tree and_mask = 0;
3021 tree mask, inner, offset;
3022 tree unsigned_type;
3023 unsigned int precision;
3025 /* All the optimizations using this function assume integer fields.
3026 There are problems with FP fields since the type_for_size call
3027 below can fail for, e.g., XFmode. */
3028 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3029 return 0;
3031 /* We are interested in the bare arrangement of bits, so strip everything
3032 that doesn't affect the machine mode. However, record the type of the
3033 outermost expression if it may matter below. */
3034 if (TREE_CODE (exp) == NOP_EXPR
3035 || TREE_CODE (exp) == CONVERT_EXPR
3036 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3037 outer_type = TREE_TYPE (exp);
3038 STRIP_NOPS (exp);
3040 if (TREE_CODE (exp) == BIT_AND_EXPR)
3042 and_mask = TREE_OPERAND (exp, 1);
3043 exp = TREE_OPERAND (exp, 0);
3044 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3045 if (TREE_CODE (and_mask) != INTEGER_CST)
3046 return 0;
3049 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3050 punsignedp, pvolatilep);
3051 if ((inner == exp && and_mask == 0)
3052 || *pbitsize < 0 || offset != 0
3053 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3054 return 0;
3056 /* If the number of bits in the reference is the same as the bitsize of
3057 the outer type, then the outer type gives the signedness. Otherwise
3058 (in case of a small bitfield) the signedness is unchanged. */
3059 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3060 *punsignedp = TREE_UNSIGNED (outer_type);
3062 /* Compute the mask to access the bitfield. */
3063 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3064 precision = TYPE_PRECISION (unsigned_type);
3066 mask = build_int_2 (~0, ~0);
3067 TREE_TYPE (mask) = unsigned_type;
3068 force_fit_type (mask, 0);
3069 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3070 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3072 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3073 if (and_mask != 0)
3074 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3075 fold_convert (unsigned_type, and_mask), mask));
3077 *pmask = mask;
3078 *pand_mask = and_mask;
3079 return inner;
3082 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3083 bit positions. */
3085 static int
3086 all_ones_mask_p (tree mask, int size)
3088 tree type = TREE_TYPE (mask);
3089 unsigned int precision = TYPE_PRECISION (type);
3090 tree tmask;
3092 tmask = build_int_2 (~0, ~0);
3093 TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
3094 force_fit_type (tmask, 0);
3095 return
3096 tree_int_cst_equal (mask,
3097 const_binop (RSHIFT_EXPR,
3098 const_binop (LSHIFT_EXPR, tmask,
3099 size_int (precision - size),
3101 size_int (precision - size), 0));
3104 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3105 represents the sign bit of EXP's type. If EXP represents a sign
3106 or zero extension, also test VAL against the unextended type.
3107 The return value is the (sub)expression whose sign bit is VAL,
3108 or NULL_TREE otherwise. */
3110 static tree
3111 sign_bit_p (tree exp, tree val)
3113 unsigned HOST_WIDE_INT mask_lo, lo;
3114 HOST_WIDE_INT mask_hi, hi;
3115 int width;
3116 tree t;
3118 /* Tree EXP must have an integral type. */
3119 t = TREE_TYPE (exp);
3120 if (! INTEGRAL_TYPE_P (t))
3121 return NULL_TREE;
3123 /* Tree VAL must be an integer constant. */
3124 if (TREE_CODE (val) != INTEGER_CST
3125 || TREE_CONSTANT_OVERFLOW (val))
3126 return NULL_TREE;
3128 width = TYPE_PRECISION (t);
3129 if (width > HOST_BITS_PER_WIDE_INT)
3131 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3132 lo = 0;
3134 mask_hi = ((unsigned HOST_WIDE_INT) -1
3135 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3136 mask_lo = -1;
3138 else
3140 hi = 0;
3141 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3143 mask_hi = 0;
3144 mask_lo = ((unsigned HOST_WIDE_INT) -1
3145 >> (HOST_BITS_PER_WIDE_INT - width));
3148 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3149 treat VAL as if it were unsigned. */
3150 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3151 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3152 return exp;
3154 /* Handle extension from a narrower type. */
3155 if (TREE_CODE (exp) == NOP_EXPR
3156 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3157 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3159 return NULL_TREE;
3162 /* Subroutine for fold_truthop: determine if an operand is simple enough
3163 to be evaluated unconditionally. */
3165 static int
3166 simple_operand_p (tree exp)
3168 /* Strip any conversions that don't change the machine mode. */
3169 while ((TREE_CODE (exp) == NOP_EXPR
3170 || TREE_CODE (exp) == CONVERT_EXPR)
3171 && (TYPE_MODE (TREE_TYPE (exp))
3172 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3173 exp = TREE_OPERAND (exp, 0);
3175 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3176 || (DECL_P (exp)
3177 && ! TREE_ADDRESSABLE (exp)
3178 && ! TREE_THIS_VOLATILE (exp)
3179 && ! DECL_NONLOCAL (exp)
3180 /* Don't regard global variables as simple. They may be
3181 allocated in ways unknown to the compiler (shared memory,
3182 #pragma weak, etc). */
3183 && ! TREE_PUBLIC (exp)
3184 && ! DECL_EXTERNAL (exp)
3185 /* Loading a static variable is unduly expensive, but global
3186 registers aren't expensive. */
3187 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3190 /* The following functions are subroutines to fold_range_test and allow it to
3191 try to change a logical combination of comparisons into a range test.
3193 For example, both
3194 X == 2 || X == 3 || X == 4 || X == 5
3196 X >= 2 && X <= 5
3197 are converted to
3198 (unsigned) (X - 2) <= 3
3200 We describe each set of comparisons as being either inside or outside
3201 a range, using a variable named like IN_P, and then describe the
3202 range with a lower and upper bound. If one of the bounds is omitted,
3203 it represents either the highest or lowest value of the type.
3205 In the comments below, we represent a range by two numbers in brackets
3206 preceded by a "+" to designate being inside that range, or a "-" to
3207 designate being outside that range, so the condition can be inverted by
3208 flipping the prefix. An omitted bound is represented by a "-". For
3209 example, "- [-, 10]" means being outside the range starting at the lowest
3210 possible value and ending at 10, in other words, being greater than 10.
3211 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3212 always false.
3214 We set up things so that the missing bounds are handled in a consistent
3215 manner so neither a missing bound nor "true" and "false" need to be
3216 handled using a special case. */
3218 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3219 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3220 and UPPER1_P are nonzero if the respective argument is an upper bound
3221 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3222 must be specified for a comparison. ARG1 will be converted to ARG0's
3223 type if both are specified. */
3225 static tree
3226 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3227 tree arg1, int upper1_p)
3229 tree tem;
3230 int result;
3231 int sgn0, sgn1;
3233 /* If neither arg represents infinity, do the normal operation.
3234 Else, if not a comparison, return infinity. Else handle the special
3235 comparison rules. Note that most of the cases below won't occur, but
3236 are handled for consistency. */
3238 if (arg0 != 0 && arg1 != 0)
3240 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3241 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3242 STRIP_NOPS (tem);
3243 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3246 if (TREE_CODE_CLASS (code) != '<')
3247 return 0;
3249 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3250 for neither. In real maths, we cannot assume open ended ranges are
3251 the same. But, this is computer arithmetic, where numbers are finite.
3252 We can therefore make the transformation of any unbounded range with
3253 the value Z, Z being greater than any representable number. This permits
3254 us to treat unbounded ranges as equal. */
3255 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3256 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3257 switch (code)
3259 case EQ_EXPR:
3260 result = sgn0 == sgn1;
3261 break;
3262 case NE_EXPR:
3263 result = sgn0 != sgn1;
3264 break;
3265 case LT_EXPR:
3266 result = sgn0 < sgn1;
3267 break;
3268 case LE_EXPR:
3269 result = sgn0 <= sgn1;
3270 break;
3271 case GT_EXPR:
3272 result = sgn0 > sgn1;
3273 break;
3274 case GE_EXPR:
3275 result = sgn0 >= sgn1;
3276 break;
3277 default:
3278 abort ();
3281 return fold_convert (type, result ? integer_one_node : integer_zero_node);
3284 /* Given EXP, a logical expression, set the range it is testing into
3285 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3286 actually being tested. *PLOW and *PHIGH will be made of the same type
3287 as the returned expression. If EXP is not a comparison, we will most
3288 likely not be returning a useful value and range. */
3290 static tree
3291 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3293 enum tree_code code;
3294 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3295 tree orig_type = NULL_TREE;
3296 int in_p, n_in_p;
3297 tree low, high, n_low, n_high;
3299 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3300 and see if we can refine the range. Some of the cases below may not
3301 happen, but it doesn't seem worth worrying about this. We "continue"
3302 the outer loop when we've changed something; otherwise we "break"
3303 the switch, which will "break" the while. */
3305 in_p = 0;
3306 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3308 while (1)
3310 code = TREE_CODE (exp);
3312 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3314 if (first_rtl_op (code) > 0)
3315 arg0 = TREE_OPERAND (exp, 0);
3316 if (TREE_CODE_CLASS (code) == '<'
3317 || TREE_CODE_CLASS (code) == '1'
3318 || TREE_CODE_CLASS (code) == '2')
3319 type = TREE_TYPE (arg0);
3320 if (TREE_CODE_CLASS (code) == '2'
3321 || TREE_CODE_CLASS (code) == '<'
3322 || (TREE_CODE_CLASS (code) == 'e'
3323 && TREE_CODE_LENGTH (code) > 1))
3324 arg1 = TREE_OPERAND (exp, 1);
3327 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3328 lose a cast by accident. */
3329 if (type != NULL_TREE && orig_type == NULL_TREE)
3330 orig_type = type;
3332 switch (code)
3334 case TRUTH_NOT_EXPR:
3335 in_p = ! in_p, exp = arg0;
3336 continue;
3338 case EQ_EXPR: case NE_EXPR:
3339 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3340 /* We can only do something if the range is testing for zero
3341 and if the second operand is an integer constant. Note that
3342 saying something is "in" the range we make is done by
3343 complementing IN_P since it will set in the initial case of
3344 being not equal to zero; "out" is leaving it alone. */
3345 if (low == 0 || high == 0
3346 || ! integer_zerop (low) || ! integer_zerop (high)
3347 || TREE_CODE (arg1) != INTEGER_CST)
3348 break;
3350 switch (code)
3352 case NE_EXPR: /* - [c, c] */
3353 low = high = arg1;
3354 break;
3355 case EQ_EXPR: /* + [c, c] */
3356 in_p = ! in_p, low = high = arg1;
3357 break;
3358 case GT_EXPR: /* - [-, c] */
3359 low = 0, high = arg1;
3360 break;
3361 case GE_EXPR: /* + [c, -] */
3362 in_p = ! in_p, low = arg1, high = 0;
3363 break;
3364 case LT_EXPR: /* - [c, -] */
3365 low = arg1, high = 0;
3366 break;
3367 case LE_EXPR: /* + [-, c] */
3368 in_p = ! in_p, low = 0, high = arg1;
3369 break;
3370 default:
3371 abort ();
3374 exp = arg0;
3376 /* If this is an unsigned comparison, we also know that EXP is
3377 greater than or equal to zero. We base the range tests we make
3378 on that fact, so we record it here so we can parse existing
3379 range tests. */
3380 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3382 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3383 1, fold_convert (type, integer_zero_node),
3384 NULL_TREE))
3385 break;
3387 in_p = n_in_p, low = n_low, high = n_high;
3389 /* If the high bound is missing, but we have a nonzero low
3390 bound, reverse the range so it goes from zero to the low bound
3391 minus 1. */
3392 if (high == 0 && low && ! integer_zerop (low))
3394 in_p = ! in_p;
3395 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3396 integer_one_node, 0);
3397 low = fold_convert (type, integer_zero_node);
3400 continue;
3402 case NEGATE_EXPR:
3403 /* (-x) IN [a,b] -> x in [-b, -a] */
3404 n_low = range_binop (MINUS_EXPR, type,
3405 fold_convert (type, integer_zero_node),
3406 0, high, 1);
3407 n_high = range_binop (MINUS_EXPR, type,
3408 fold_convert (type, integer_zero_node),
3409 0, low, 0);
3410 low = n_low, high = n_high;
3411 exp = arg0;
3412 continue;
3414 case BIT_NOT_EXPR:
3415 /* ~ X -> -X - 1 */
3416 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3417 fold_convert (type, integer_one_node));
3418 continue;
3420 case PLUS_EXPR: case MINUS_EXPR:
3421 if (TREE_CODE (arg1) != INTEGER_CST)
3422 break;
3424 /* If EXP is signed, any overflow in the computation is undefined,
3425 so we don't worry about it so long as our computations on
3426 the bounds don't overflow. For unsigned, overflow is defined
3427 and this is exactly the right thing. */
3428 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3429 type, low, 0, arg1, 0);
3430 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3431 type, high, 1, arg1, 0);
3432 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3433 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3434 break;
3436 /* Check for an unsigned range which has wrapped around the maximum
3437 value thus making n_high < n_low, and normalize it. */
3438 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3440 low = range_binop (PLUS_EXPR, type, n_high, 0,
3441 integer_one_node, 0);
3442 high = range_binop (MINUS_EXPR, type, n_low, 0,
3443 integer_one_node, 0);
3445 /* If the range is of the form +/- [ x+1, x ], we won't
3446 be able to normalize it. But then, it represents the
3447 whole range or the empty set, so make it
3448 +/- [ -, - ]. */
3449 if (tree_int_cst_equal (n_low, low)
3450 && tree_int_cst_equal (n_high, high))
3451 low = high = 0;
3452 else
3453 in_p = ! in_p;
3455 else
3456 low = n_low, high = n_high;
3458 exp = arg0;
3459 continue;
3461 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3462 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3463 break;
3465 if (! INTEGRAL_TYPE_P (type)
3466 || (low != 0 && ! int_fits_type_p (low, type))
3467 || (high != 0 && ! int_fits_type_p (high, type)))
3468 break;
3470 n_low = low, n_high = high;
3472 if (n_low != 0)
3473 n_low = fold_convert (type, n_low);
3475 if (n_high != 0)
3476 n_high = fold_convert (type, n_high);
3478 /* If we're converting from an unsigned to a signed type,
3479 we will be doing the comparison as unsigned. The tests above
3480 have already verified that LOW and HIGH are both positive.
3482 So we have to make sure that the original unsigned value will
3483 be interpreted as positive. */
3484 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3486 tree equiv_type = lang_hooks.types.type_for_mode
3487 (TYPE_MODE (type), 1);
3488 tree high_positive;
3490 /* A range without an upper bound is, naturally, unbounded.
3491 Since convert would have cropped a very large value, use
3492 the max value for the destination type. */
3493 high_positive
3494 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3495 : TYPE_MAX_VALUE (type);
3497 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3498 high_positive = fold (build (RSHIFT_EXPR, type,
3499 fold_convert (type,
3500 high_positive),
3501 fold_convert (type,
3502 integer_one_node)));
3504 /* If the low bound is specified, "and" the range with the
3505 range for which the original unsigned value will be
3506 positive. */
3507 if (low != 0)
3509 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3510 1, n_low, n_high, 1,
3511 fold_convert (type, integer_zero_node),
3512 high_positive))
3513 break;
3515 in_p = (n_in_p == in_p);
3517 else
3519 /* Otherwise, "or" the range with the range of the input
3520 that will be interpreted as negative. */
3521 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3522 0, n_low, n_high, 1,
3523 fold_convert (type, integer_zero_node),
3524 high_positive))
3525 break;
3527 in_p = (in_p != n_in_p);
3531 exp = arg0;
3532 low = n_low, high = n_high;
3533 continue;
3535 default:
3536 break;
3539 break;
3542 /* If EXP is a constant, we can evaluate whether this is true or false. */
3543 if (TREE_CODE (exp) == INTEGER_CST)
3545 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3546 exp, 0, low, 0))
3547 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3548 exp, 1, high, 1)));
3549 low = high = 0;
3550 exp = 0;
3553 *pin_p = in_p, *plow = low, *phigh = high;
3554 return exp;
3557 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3558 type, TYPE, return an expression to test if EXP is in (or out of, depending
3559 on IN_P) the range. */
3561 static tree
3562 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3564 tree etype = TREE_TYPE (exp);
3565 tree value;
3567 if (! in_p
3568 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3569 return invert_truthvalue (value);
3571 if (low == 0 && high == 0)
3572 return fold_convert (type, integer_one_node);
3574 if (low == 0)
3575 return fold (build (LE_EXPR, type, exp, high));
3577 if (high == 0)
3578 return fold (build (GE_EXPR, type, exp, low));
3580 if (operand_equal_p (low, high, 0))
3581 return fold (build (EQ_EXPR, type, exp, low));
3583 if (integer_zerop (low))
3585 if (! TREE_UNSIGNED (etype))
3587 etype = lang_hooks.types.unsigned_type (etype);
3588 high = fold_convert (etype, high);
3589 exp = fold_convert (etype, exp);
3591 return build_range_check (type, exp, 1, 0, high);
3594 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3595 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3597 unsigned HOST_WIDE_INT lo;
3598 HOST_WIDE_INT hi;
3599 int prec;
3601 prec = TYPE_PRECISION (etype);
3602 if (prec <= HOST_BITS_PER_WIDE_INT)
3604 hi = 0;
3605 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3607 else
3609 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3610 lo = (unsigned HOST_WIDE_INT) -1;
3613 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3615 if (TREE_UNSIGNED (etype))
3617 etype = lang_hooks.types.signed_type (etype);
3618 exp = fold_convert (etype, exp);
3620 return fold (build (GT_EXPR, type, exp,
3621 fold_convert (etype, integer_zero_node)));
3625 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3626 && ! TREE_OVERFLOW (value))
3627 return build_range_check (type,
3628 fold (build (MINUS_EXPR, etype, exp, low)),
3629 1, fold_convert (etype, integer_zero_node),
3630 value);
3632 return 0;
3635 /* Given two ranges, see if we can merge them into one. Return 1 if we
3636 can, 0 if we can't. Set the output range into the specified parameters. */
3638 static int
3639 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3640 tree high0, int in1_p, tree low1, tree high1)
3642 int no_overlap;
3643 int subset;
3644 int temp;
3645 tree tem;
3646 int in_p;
3647 tree low, high;
3648 int lowequal = ((low0 == 0 && low1 == 0)
3649 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3650 low0, 0, low1, 0)));
3651 int highequal = ((high0 == 0 && high1 == 0)
3652 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3653 high0, 1, high1, 1)));
3655 /* Make range 0 be the range that starts first, or ends last if they
3656 start at the same value. Swap them if it isn't. */
3657 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3658 low0, 0, low1, 0))
3659 || (lowequal
3660 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3661 high1, 1, high0, 1))))
3663 temp = in0_p, in0_p = in1_p, in1_p = temp;
3664 tem = low0, low0 = low1, low1 = tem;
3665 tem = high0, high0 = high1, high1 = tem;
3668 /* Now flag two cases, whether the ranges are disjoint or whether the
3669 second range is totally subsumed in the first. Note that the tests
3670 below are simplified by the ones above. */
3671 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3672 high0, 1, low1, 0));
3673 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3674 high1, 1, high0, 1));
3676 /* We now have four cases, depending on whether we are including or
3677 excluding the two ranges. */
3678 if (in0_p && in1_p)
3680 /* If they don't overlap, the result is false. If the second range
3681 is a subset it is the result. Otherwise, the range is from the start
3682 of the second to the end of the first. */
3683 if (no_overlap)
3684 in_p = 0, low = high = 0;
3685 else if (subset)
3686 in_p = 1, low = low1, high = high1;
3687 else
3688 in_p = 1, low = low1, high = high0;
3691 else if (in0_p && ! in1_p)
3693 /* If they don't overlap, the result is the first range. If they are
3694 equal, the result is false. If the second range is a subset of the
3695 first, and the ranges begin at the same place, we go from just after
3696 the end of the first range to the end of the second. If the second
3697 range is not a subset of the first, or if it is a subset and both
3698 ranges end at the same place, the range starts at the start of the
3699 first range and ends just before the second range.
3700 Otherwise, we can't describe this as a single range. */
3701 if (no_overlap)
3702 in_p = 1, low = low0, high = high0;
3703 else if (lowequal && highequal)
3704 in_p = 0, low = high = 0;
3705 else if (subset && lowequal)
3707 in_p = 1, high = high0;
3708 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3709 integer_one_node, 0);
3711 else if (! subset || highequal)
3713 in_p = 1, low = low0;
3714 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3715 integer_one_node, 0);
3717 else
3718 return 0;
3721 else if (! in0_p && in1_p)
3723 /* If they don't overlap, the result is the second range. If the second
3724 is a subset of the first, the result is false. Otherwise,
3725 the range starts just after the first range and ends at the
3726 end of the second. */
3727 if (no_overlap)
3728 in_p = 1, low = low1, high = high1;
3729 else if (subset || highequal)
3730 in_p = 0, low = high = 0;
3731 else
3733 in_p = 1, high = high1;
3734 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3735 integer_one_node, 0);
3739 else
3741 /* The case where we are excluding both ranges. Here the complex case
3742 is if they don't overlap. In that case, the only time we have a
3743 range is if they are adjacent. If the second is a subset of the
3744 first, the result is the first. Otherwise, the range to exclude
3745 starts at the beginning of the first range and ends at the end of the
3746 second. */
3747 if (no_overlap)
3749 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3750 range_binop (PLUS_EXPR, NULL_TREE,
3751 high0, 1,
3752 integer_one_node, 1),
3753 1, low1, 0)))
3754 in_p = 0, low = low0, high = high1;
3755 else
3756 return 0;
3758 else if (subset)
3759 in_p = 0, low = low0, high = high0;
3760 else
3761 in_p = 0, low = low0, high = high1;
3764 *pin_p = in_p, *plow = low, *phigh = high;
3765 return 1;
3768 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3769 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3770 #endif
3772 /* EXP is some logical combination of boolean tests. See if we can
3773 merge it into some range test. Return the new tree if so. */
3775 static tree
3776 fold_range_test (tree exp)
3778 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3779 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3780 int in0_p, in1_p, in_p;
3781 tree low0, low1, low, high0, high1, high;
3782 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3783 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3784 tree tem;
3786 /* If this is an OR operation, invert both sides; we will invert
3787 again at the end. */
3788 if (or_op)
3789 in0_p = ! in0_p, in1_p = ! in1_p;
3791 /* If both expressions are the same, if we can merge the ranges, and we
3792 can build the range test, return it or it inverted. If one of the
3793 ranges is always true or always false, consider it to be the same
3794 expression as the other. */
3795 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3796 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3797 in1_p, low1, high1)
3798 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3799 lhs != 0 ? lhs
3800 : rhs != 0 ? rhs : integer_zero_node,
3801 in_p, low, high))))
3802 return or_op ? invert_truthvalue (tem) : tem;
3804 /* On machines where the branch cost is expensive, if this is a
3805 short-circuited branch and the underlying object on both sides
3806 is the same, make a non-short-circuit operation. */
3807 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3808 && lhs != 0 && rhs != 0
3809 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3810 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3811 && operand_equal_p (lhs, rhs, 0))
3813 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3814 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3815 which cases we can't do this. */
3816 if (simple_operand_p (lhs))
3817 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3818 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3819 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3820 TREE_OPERAND (exp, 1));
3822 else if (lang_hooks.decls.global_bindings_p () == 0
3823 && ! CONTAINS_PLACEHOLDER_P (lhs))
3825 tree common = save_expr (lhs);
3827 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3828 or_op ? ! in0_p : in0_p,
3829 low0, high0))
3830 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3831 or_op ? ! in1_p : in1_p,
3832 low1, high1))))
3833 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3834 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3835 TREE_TYPE (exp), lhs, rhs);
3839 return 0;
3842 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3843 bit value. Arrange things so the extra bits will be set to zero if and
3844 only if C is signed-extended to its full width. If MASK is nonzero,
3845 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3847 static tree
3848 unextend (tree c, int p, int unsignedp, tree mask)
3850 tree type = TREE_TYPE (c);
3851 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3852 tree temp;
3854 if (p == modesize || unsignedp)
3855 return c;
3857 /* We work by getting just the sign bit into the low-order bit, then
3858 into the high-order bit, then sign-extend. We then XOR that value
3859 with C. */
3860 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3861 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3863 /* We must use a signed type in order to get an arithmetic right shift.
3864 However, we must also avoid introducing accidental overflows, so that
3865 a subsequent call to integer_zerop will work. Hence we must
3866 do the type conversion here. At this point, the constant is either
3867 zero or one, and the conversion to a signed type can never overflow.
3868 We could get an overflow if this conversion is done anywhere else. */
3869 if (TREE_UNSIGNED (type))
3870 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
3872 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3873 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3874 if (mask != 0)
3875 temp = const_binop (BIT_AND_EXPR, temp,
3876 fold_convert (TREE_TYPE (c), mask), 0);
3877 /* If necessary, convert the type back to match the type of C. */
3878 if (TREE_UNSIGNED (type))
3879 temp = fold_convert (type, temp);
3881 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3884 /* Find ways of folding logical expressions of LHS and RHS:
3885 Try to merge two comparisons to the same innermost item.
3886 Look for range tests like "ch >= '0' && ch <= '9'".
3887 Look for combinations of simple terms on machines with expensive branches
3888 and evaluate the RHS unconditionally.
3890 For example, if we have p->a == 2 && p->b == 4 and we can make an
3891 object large enough to span both A and B, we can do this with a comparison
3892 against the object ANDed with the a mask.
3894 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3895 operations to do this with one comparison.
3897 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3898 function and the one above.
3900 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3901 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3903 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3904 two operands.
3906 We return the simplified tree or 0 if no optimization is possible. */
3908 static tree
3909 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3911 /* If this is the "or" of two comparisons, we can do something if
3912 the comparisons are NE_EXPR. If this is the "and", we can do something
3913 if the comparisons are EQ_EXPR. I.e.,
3914 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3916 WANTED_CODE is this operation code. For single bit fields, we can
3917 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3918 comparison for one-bit fields. */
3920 enum tree_code wanted_code;
3921 enum tree_code lcode, rcode;
3922 tree ll_arg, lr_arg, rl_arg, rr_arg;
3923 tree ll_inner, lr_inner, rl_inner, rr_inner;
3924 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3925 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3926 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3927 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3928 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3929 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3930 enum machine_mode lnmode, rnmode;
3931 tree ll_mask, lr_mask, rl_mask, rr_mask;
3932 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3933 tree l_const, r_const;
3934 tree lntype, rntype, result;
3935 int first_bit, end_bit;
3936 int volatilep;
3938 /* Start by getting the comparison codes. Fail if anything is volatile.
3939 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3940 it were surrounded with a NE_EXPR. */
3942 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3943 return 0;
3945 lcode = TREE_CODE (lhs);
3946 rcode = TREE_CODE (rhs);
3948 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3949 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3951 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3952 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3954 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3955 return 0;
3957 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3958 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3960 ll_arg = TREE_OPERAND (lhs, 0);
3961 lr_arg = TREE_OPERAND (lhs, 1);
3962 rl_arg = TREE_OPERAND (rhs, 0);
3963 rr_arg = TREE_OPERAND (rhs, 1);
3965 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3966 if (simple_operand_p (ll_arg)
3967 && simple_operand_p (lr_arg)
3968 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3970 int compcode;
3972 if (operand_equal_p (ll_arg, rl_arg, 0)
3973 && operand_equal_p (lr_arg, rr_arg, 0))
3975 int lcompcode, rcompcode;
3977 lcompcode = comparison_to_compcode (lcode);
3978 rcompcode = comparison_to_compcode (rcode);
3979 compcode = (code == TRUTH_AND_EXPR)
3980 ? lcompcode & rcompcode
3981 : lcompcode | rcompcode;
3983 else if (operand_equal_p (ll_arg, rr_arg, 0)
3984 && operand_equal_p (lr_arg, rl_arg, 0))
3986 int lcompcode, rcompcode;
3988 rcode = swap_tree_comparison (rcode);
3989 lcompcode = comparison_to_compcode (lcode);
3990 rcompcode = comparison_to_compcode (rcode);
3991 compcode = (code == TRUTH_AND_EXPR)
3992 ? lcompcode & rcompcode
3993 : lcompcode | rcompcode;
3995 else
3996 compcode = -1;
3998 if (compcode == COMPCODE_TRUE)
3999 return fold_convert (truth_type, integer_one_node);
4000 else if (compcode == COMPCODE_FALSE)
4001 return fold_convert (truth_type, integer_zero_node);
4002 else if (compcode != -1)
4003 return build (compcode_to_comparison (compcode),
4004 truth_type, ll_arg, lr_arg);
4007 /* If the RHS can be evaluated unconditionally and its operands are
4008 simple, it wins to evaluate the RHS unconditionally on machines
4009 with expensive branches. In this case, this isn't a comparison
4010 that can be merged. Avoid doing this if the RHS is a floating-point
4011 comparison since those can trap. */
4013 if (BRANCH_COST >= 2
4014 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4015 && simple_operand_p (rl_arg)
4016 && simple_operand_p (rr_arg))
4018 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4019 if (code == TRUTH_OR_EXPR
4020 && lcode == NE_EXPR && integer_zerop (lr_arg)
4021 && rcode == NE_EXPR && integer_zerop (rr_arg)
4022 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4023 return build (NE_EXPR, truth_type,
4024 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4025 ll_arg, rl_arg),
4026 integer_zero_node);
4028 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4029 if (code == TRUTH_AND_EXPR
4030 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4031 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4032 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4033 return build (EQ_EXPR, truth_type,
4034 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4035 ll_arg, rl_arg),
4036 integer_zero_node);
4038 return build (code, truth_type, lhs, rhs);
4041 /* See if the comparisons can be merged. Then get all the parameters for
4042 each side. */
4044 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4045 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4046 return 0;
4048 volatilep = 0;
4049 ll_inner = decode_field_reference (ll_arg,
4050 &ll_bitsize, &ll_bitpos, &ll_mode,
4051 &ll_unsignedp, &volatilep, &ll_mask,
4052 &ll_and_mask);
4053 lr_inner = decode_field_reference (lr_arg,
4054 &lr_bitsize, &lr_bitpos, &lr_mode,
4055 &lr_unsignedp, &volatilep, &lr_mask,
4056 &lr_and_mask);
4057 rl_inner = decode_field_reference (rl_arg,
4058 &rl_bitsize, &rl_bitpos, &rl_mode,
4059 &rl_unsignedp, &volatilep, &rl_mask,
4060 &rl_and_mask);
4061 rr_inner = decode_field_reference (rr_arg,
4062 &rr_bitsize, &rr_bitpos, &rr_mode,
4063 &rr_unsignedp, &volatilep, &rr_mask,
4064 &rr_and_mask);
4066 /* It must be true that the inner operation on the lhs of each
4067 comparison must be the same if we are to be able to do anything.
4068 Then see if we have constants. If not, the same must be true for
4069 the rhs's. */
4070 if (volatilep || ll_inner == 0 || rl_inner == 0
4071 || ! operand_equal_p (ll_inner, rl_inner, 0))
4072 return 0;
4074 if (TREE_CODE (lr_arg) == INTEGER_CST
4075 && TREE_CODE (rr_arg) == INTEGER_CST)
4076 l_const = lr_arg, r_const = rr_arg;
4077 else if (lr_inner == 0 || rr_inner == 0
4078 || ! operand_equal_p (lr_inner, rr_inner, 0))
4079 return 0;
4080 else
4081 l_const = r_const = 0;
4083 /* If either comparison code is not correct for our logical operation,
4084 fail. However, we can convert a one-bit comparison against zero into
4085 the opposite comparison against that bit being set in the field. */
4087 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4088 if (lcode != wanted_code)
4090 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4092 /* Make the left operand unsigned, since we are only interested
4093 in the value of one bit. Otherwise we are doing the wrong
4094 thing below. */
4095 ll_unsignedp = 1;
4096 l_const = ll_mask;
4098 else
4099 return 0;
4102 /* This is analogous to the code for l_const above. */
4103 if (rcode != wanted_code)
4105 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4107 rl_unsignedp = 1;
4108 r_const = rl_mask;
4110 else
4111 return 0;
4114 /* After this point all optimizations will generate bit-field
4115 references, which we might not want. */
4116 if (! lang_hooks.can_use_bit_fields_p ())
4117 return 0;
4119 /* See if we can find a mode that contains both fields being compared on
4120 the left. If we can't, fail. Otherwise, update all constants and masks
4121 to be relative to a field of that size. */
4122 first_bit = MIN (ll_bitpos, rl_bitpos);
4123 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4124 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4125 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4126 volatilep);
4127 if (lnmode == VOIDmode)
4128 return 0;
4130 lnbitsize = GET_MODE_BITSIZE (lnmode);
4131 lnbitpos = first_bit & ~ (lnbitsize - 1);
4132 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4133 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4135 if (BYTES_BIG_ENDIAN)
4137 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4138 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4141 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4142 size_int (xll_bitpos), 0);
4143 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4144 size_int (xrl_bitpos), 0);
4146 if (l_const)
4148 l_const = fold_convert (lntype, l_const);
4149 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4150 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4151 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4152 fold (build1 (BIT_NOT_EXPR,
4153 lntype, ll_mask)),
4154 0)))
4156 warning ("comparison is always %d", wanted_code == NE_EXPR);
4158 return fold_convert (truth_type,
4159 wanted_code == NE_EXPR
4160 ? integer_one_node : integer_zero_node);
4163 if (r_const)
4165 r_const = fold_convert (lntype, r_const);
4166 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4167 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4168 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4169 fold (build1 (BIT_NOT_EXPR,
4170 lntype, rl_mask)),
4171 0)))
4173 warning ("comparison is always %d", wanted_code == NE_EXPR);
4175 return fold_convert (truth_type,
4176 wanted_code == NE_EXPR
4177 ? integer_one_node : integer_zero_node);
4181 /* If the right sides are not constant, do the same for it. Also,
4182 disallow this optimization if a size or signedness mismatch occurs
4183 between the left and right sides. */
4184 if (l_const == 0)
4186 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4187 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4188 /* Make sure the two fields on the right
4189 correspond to the left without being swapped. */
4190 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4191 return 0;
4193 first_bit = MIN (lr_bitpos, rr_bitpos);
4194 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4195 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4196 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4197 volatilep);
4198 if (rnmode == VOIDmode)
4199 return 0;
4201 rnbitsize = GET_MODE_BITSIZE (rnmode);
4202 rnbitpos = first_bit & ~ (rnbitsize - 1);
4203 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4204 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4206 if (BYTES_BIG_ENDIAN)
4208 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4209 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4212 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4213 size_int (xlr_bitpos), 0);
4214 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4215 size_int (xrr_bitpos), 0);
4217 /* Make a mask that corresponds to both fields being compared.
4218 Do this for both items being compared. If the operands are the
4219 same size and the bits being compared are in the same position
4220 then we can do this by masking both and comparing the masked
4221 results. */
4222 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4223 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4224 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4226 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4227 ll_unsignedp || rl_unsignedp);
4228 if (! all_ones_mask_p (ll_mask, lnbitsize))
4229 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4231 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4232 lr_unsignedp || rr_unsignedp);
4233 if (! all_ones_mask_p (lr_mask, rnbitsize))
4234 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4236 return build (wanted_code, truth_type, lhs, rhs);
4239 /* There is still another way we can do something: If both pairs of
4240 fields being compared are adjacent, we may be able to make a wider
4241 field containing them both.
4243 Note that we still must mask the lhs/rhs expressions. Furthermore,
4244 the mask must be shifted to account for the shift done by
4245 make_bit_field_ref. */
4246 if ((ll_bitsize + ll_bitpos == rl_bitpos
4247 && lr_bitsize + lr_bitpos == rr_bitpos)
4248 || (ll_bitpos == rl_bitpos + rl_bitsize
4249 && lr_bitpos == rr_bitpos + rr_bitsize))
4251 tree type;
4253 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4254 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4255 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4256 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4258 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4259 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4260 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4261 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4263 /* Convert to the smaller type before masking out unwanted bits. */
4264 type = lntype;
4265 if (lntype != rntype)
4267 if (lnbitsize > rnbitsize)
4269 lhs = fold_convert (rntype, lhs);
4270 ll_mask = fold_convert (rntype, ll_mask);
4271 type = rntype;
4273 else if (lnbitsize < rnbitsize)
4275 rhs = fold_convert (lntype, rhs);
4276 lr_mask = fold_convert (lntype, lr_mask);
4277 type = lntype;
4281 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4282 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4284 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4285 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4287 return build (wanted_code, truth_type, lhs, rhs);
4290 return 0;
4293 /* Handle the case of comparisons with constants. If there is something in
4294 common between the masks, those bits of the constants must be the same.
4295 If not, the condition is always false. Test for this to avoid generating
4296 incorrect code below. */
4297 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4298 if (! integer_zerop (result)
4299 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4300 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4302 if (wanted_code == NE_EXPR)
4304 warning ("`or' of unmatched not-equal tests is always 1");
4305 return fold_convert (truth_type, integer_one_node);
4307 else
4309 warning ("`and' of mutually exclusive equal-tests is always 0");
4310 return fold_convert (truth_type, integer_zero_node);
4314 /* Construct the expression we will return. First get the component
4315 reference we will make. Unless the mask is all ones the width of
4316 that field, perform the mask operation. Then compare with the
4317 merged constant. */
4318 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4319 ll_unsignedp || rl_unsignedp);
4321 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4322 if (! all_ones_mask_p (ll_mask, lnbitsize))
4323 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4325 return build (wanted_code, truth_type, result,
4326 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4329 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4330 constant. */
4332 static tree
4333 optimize_minmax_comparison (tree t)
4335 tree type = TREE_TYPE (t);
4336 tree arg0 = TREE_OPERAND (t, 0);
4337 enum tree_code op_code;
4338 tree comp_const = TREE_OPERAND (t, 1);
4339 tree minmax_const;
4340 int consts_equal, consts_lt;
4341 tree inner;
4343 STRIP_SIGN_NOPS (arg0);
4345 op_code = TREE_CODE (arg0);
4346 minmax_const = TREE_OPERAND (arg0, 1);
4347 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4348 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4349 inner = TREE_OPERAND (arg0, 0);
4351 /* If something does not permit us to optimize, return the original tree. */
4352 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4353 || TREE_CODE (comp_const) != INTEGER_CST
4354 || TREE_CONSTANT_OVERFLOW (comp_const)
4355 || TREE_CODE (minmax_const) != INTEGER_CST
4356 || TREE_CONSTANT_OVERFLOW (minmax_const))
4357 return t;
4359 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4360 and GT_EXPR, doing the rest with recursive calls using logical
4361 simplifications. */
4362 switch (TREE_CODE (t))
4364 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4365 return
4366 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4368 case GE_EXPR:
4369 return
4370 fold (build (TRUTH_ORIF_EXPR, type,
4371 optimize_minmax_comparison
4372 (build (EQ_EXPR, type, arg0, comp_const)),
4373 optimize_minmax_comparison
4374 (build (GT_EXPR, type, arg0, comp_const))));
4376 case EQ_EXPR:
4377 if (op_code == MAX_EXPR && consts_equal)
4378 /* MAX (X, 0) == 0 -> X <= 0 */
4379 return fold (build (LE_EXPR, type, inner, comp_const));
4381 else if (op_code == MAX_EXPR && consts_lt)
4382 /* MAX (X, 0) == 5 -> X == 5 */
4383 return fold (build (EQ_EXPR, type, inner, comp_const));
4385 else if (op_code == MAX_EXPR)
4386 /* MAX (X, 0) == -1 -> false */
4387 return omit_one_operand (type, integer_zero_node, inner);
4389 else if (consts_equal)
4390 /* MIN (X, 0) == 0 -> X >= 0 */
4391 return fold (build (GE_EXPR, type, inner, comp_const));
4393 else if (consts_lt)
4394 /* MIN (X, 0) == 5 -> false */
4395 return omit_one_operand (type, integer_zero_node, inner);
4397 else
4398 /* MIN (X, 0) == -1 -> X == -1 */
4399 return fold (build (EQ_EXPR, type, inner, comp_const));
4401 case GT_EXPR:
4402 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4403 /* MAX (X, 0) > 0 -> X > 0
4404 MAX (X, 0) > 5 -> X > 5 */
4405 return fold (build (GT_EXPR, type, inner, comp_const));
4407 else if (op_code == MAX_EXPR)
4408 /* MAX (X, 0) > -1 -> true */
4409 return omit_one_operand (type, integer_one_node, inner);
4411 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4412 /* MIN (X, 0) > 0 -> false
4413 MIN (X, 0) > 5 -> false */
4414 return omit_one_operand (type, integer_zero_node, inner);
4416 else
4417 /* MIN (X, 0) > -1 -> X > -1 */
4418 return fold (build (GT_EXPR, type, inner, comp_const));
4420 default:
4421 return t;
4425 /* T is an integer expression that is being multiplied, divided, or taken a
4426 modulus (CODE says which and what kind of divide or modulus) by a
4427 constant C. See if we can eliminate that operation by folding it with
4428 other operations already in T. WIDE_TYPE, if non-null, is a type that
4429 should be used for the computation if wider than our type.
4431 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4432 (X * 2) + (Y * 4). We must, however, be assured that either the original
4433 expression would not overflow or that overflow is undefined for the type
4434 in the language in question.
4436 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4437 the machine has a multiply-accumulate insn or that this is part of an
4438 addressing calculation.
4440 If we return a non-null expression, it is an equivalent form of the
4441 original computation, but need not be in the original type. */
4443 static tree
4444 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4446 /* To avoid exponential search depth, refuse to allow recursion past
4447 three levels. Beyond that (1) it's highly unlikely that we'll find
4448 something interesting and (2) we've probably processed it before
4449 when we built the inner expression. */
4451 static int depth;
4452 tree ret;
4454 if (depth > 3)
4455 return NULL;
4457 depth++;
4458 ret = extract_muldiv_1 (t, c, code, wide_type);
4459 depth--;
4461 return ret;
4464 static tree
4465 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4467 tree type = TREE_TYPE (t);
4468 enum tree_code tcode = TREE_CODE (t);
4469 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4470 > GET_MODE_SIZE (TYPE_MODE (type)))
4471 ? wide_type : type);
4472 tree t1, t2;
4473 int same_p = tcode == code;
4474 tree op0 = NULL_TREE, op1 = NULL_TREE;
4476 /* Don't deal with constants of zero here; they confuse the code below. */
4477 if (integer_zerop (c))
4478 return NULL_TREE;
4480 if (TREE_CODE_CLASS (tcode) == '1')
4481 op0 = TREE_OPERAND (t, 0);
4483 if (TREE_CODE_CLASS (tcode) == '2')
4484 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4486 /* Note that we need not handle conditional operations here since fold
4487 already handles those cases. So just do arithmetic here. */
4488 switch (tcode)
4490 case INTEGER_CST:
4491 /* For a constant, we can always simplify if we are a multiply
4492 or (for divide and modulus) if it is a multiple of our constant. */
4493 if (code == MULT_EXPR
4494 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4495 return const_binop (code, fold_convert (ctype, t),
4496 fold_convert (ctype, c), 0);
4497 break;
4499 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4500 /* If op0 is an expression ... */
4501 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4502 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4503 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4504 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4505 /* ... and is unsigned, and its type is smaller than ctype,
4506 then we cannot pass through as widening. */
4507 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4508 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4509 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4510 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4511 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4512 /* ... or its type is larger than ctype,
4513 then we cannot pass through this truncation. */
4514 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4515 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4516 /* ... or signedness changes for division or modulus,
4517 then we cannot pass through this conversion. */
4518 || (code != MULT_EXPR
4519 && (TREE_UNSIGNED (ctype)
4520 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4521 break;
4523 /* Pass the constant down and see if we can make a simplification. If
4524 we can, replace this expression with the inner simplification for
4525 possible later conversion to our or some other type. */
4526 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4527 && TREE_CODE (t2) == INTEGER_CST
4528 && ! TREE_CONSTANT_OVERFLOW (t2)
4529 && (0 != (t1 = extract_muldiv (op0, t2, code,
4530 code == MULT_EXPR
4531 ? ctype : NULL_TREE))))
4532 return t1;
4533 break;
4535 case NEGATE_EXPR: case ABS_EXPR:
4536 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4537 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4538 break;
4540 case MIN_EXPR: case MAX_EXPR:
4541 /* If widening the type changes the signedness, then we can't perform
4542 this optimization as that changes the result. */
4543 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4544 break;
4546 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4547 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4548 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4550 if (tree_int_cst_sgn (c) < 0)
4551 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4553 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4554 fold_convert (ctype, t2)));
4556 break;
4558 case LSHIFT_EXPR: case RSHIFT_EXPR:
4559 /* If the second operand is constant, this is a multiplication
4560 or floor division, by a power of two, so we can treat it that
4561 way unless the multiplier or divisor overflows. */
4562 if (TREE_CODE (op1) == INTEGER_CST
4563 /* const_binop may not detect overflow correctly,
4564 so check for it explicitly here. */
4565 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4566 && TREE_INT_CST_HIGH (op1) == 0
4567 && 0 != (t1 = fold_convert (ctype,
4568 const_binop (LSHIFT_EXPR,
4569 size_one_node,
4570 op1, 0)))
4571 && ! TREE_OVERFLOW (t1))
4572 return extract_muldiv (build (tcode == LSHIFT_EXPR
4573 ? MULT_EXPR : FLOOR_DIV_EXPR,
4574 ctype, fold_convert (ctype, op0), t1),
4575 c, code, wide_type);
4576 break;
4578 case PLUS_EXPR: case MINUS_EXPR:
4579 /* See if we can eliminate the operation on both sides. If we can, we
4580 can return a new PLUS or MINUS. If we can't, the only remaining
4581 cases where we can do anything are if the second operand is a
4582 constant. */
4583 t1 = extract_muldiv (op0, c, code, wide_type);
4584 t2 = extract_muldiv (op1, c, code, wide_type);
4585 if (t1 != 0 && t2 != 0
4586 && (code == MULT_EXPR
4587 /* If not multiplication, we can only do this if both operands
4588 are divisible by c. */
4589 || (multiple_of_p (ctype, op0, c)
4590 && multiple_of_p (ctype, op1, c))))
4591 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4592 fold_convert (ctype, t2)));
4594 /* If this was a subtraction, negate OP1 and set it to be an addition.
4595 This simplifies the logic below. */
4596 if (tcode == MINUS_EXPR)
4597 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4599 if (TREE_CODE (op1) != INTEGER_CST)
4600 break;
4602 /* If either OP1 or C are negative, this optimization is not safe for
4603 some of the division and remainder types while for others we need
4604 to change the code. */
4605 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4607 if (code == CEIL_DIV_EXPR)
4608 code = FLOOR_DIV_EXPR;
4609 else if (code == FLOOR_DIV_EXPR)
4610 code = CEIL_DIV_EXPR;
4611 else if (code != MULT_EXPR
4612 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4613 break;
4616 /* If it's a multiply or a division/modulus operation of a multiple
4617 of our constant, do the operation and verify it doesn't overflow. */
4618 if (code == MULT_EXPR
4619 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4621 op1 = const_binop (code, fold_convert (ctype, op1),
4622 fold_convert (ctype, c), 0);
4623 /* We allow the constant to overflow with wrapping semantics. */
4624 if (op1 == 0
4625 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4626 break;
4628 else
4629 break;
4631 /* If we have an unsigned type is not a sizetype, we cannot widen
4632 the operation since it will change the result if the original
4633 computation overflowed. */
4634 if (TREE_UNSIGNED (ctype)
4635 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4636 && ctype != type)
4637 break;
4639 /* If we were able to eliminate our operation from the first side,
4640 apply our operation to the second side and reform the PLUS. */
4641 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4642 return fold (build (tcode, ctype, fold_convert (ctype, t1), op1));
4644 /* The last case is if we are a multiply. In that case, we can
4645 apply the distributive law to commute the multiply and addition
4646 if the multiplication of the constants doesn't overflow. */
4647 if (code == MULT_EXPR)
4648 return fold (build (tcode, ctype,
4649 fold (build (code, ctype,
4650 fold_convert (ctype, op0),
4651 fold_convert (ctype, c))),
4652 op1));
4654 break;
4656 case MULT_EXPR:
4657 /* We have a special case here if we are doing something like
4658 (C * 8) % 4 since we know that's zero. */
4659 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4660 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4661 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4662 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4663 return omit_one_operand (type, integer_zero_node, op0);
4665 /* ... fall through ... */
4667 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4668 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4669 /* If we can extract our operation from the LHS, do so and return a
4670 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4671 do something only if the second operand is a constant. */
4672 if (same_p
4673 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4674 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4675 fold_convert (ctype, op1)));
4676 else if (tcode == MULT_EXPR && code == MULT_EXPR
4677 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4678 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4679 fold_convert (ctype, t1)));
4680 else if (TREE_CODE (op1) != INTEGER_CST)
4681 return 0;
4683 /* If these are the same operation types, we can associate them
4684 assuming no overflow. */
4685 if (tcode == code
4686 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4687 fold_convert (ctype, c), 0))
4688 && ! TREE_OVERFLOW (t1))
4689 return fold (build (tcode, ctype, fold_convert (ctype, op0), t1));
4691 /* If these operations "cancel" each other, we have the main
4692 optimizations of this pass, which occur when either constant is a
4693 multiple of the other, in which case we replace this with either an
4694 operation or CODE or TCODE.
4696 If we have an unsigned type that is not a sizetype, we cannot do
4697 this since it will change the result if the original computation
4698 overflowed. */
4699 if ((! TREE_UNSIGNED (ctype)
4700 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4701 && ! flag_wrapv
4702 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4703 || (tcode == MULT_EXPR
4704 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4705 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4707 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4708 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4709 fold_convert (ctype,
4710 const_binop (TRUNC_DIV_EXPR,
4711 op1, c, 0))));
4712 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4713 return fold (build (code, ctype, fold_convert (ctype, op0),
4714 fold_convert (ctype,
4715 const_binop (TRUNC_DIV_EXPR,
4716 c, op1, 0))));
4718 break;
4720 default:
4721 break;
4724 return 0;
4727 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4728 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4729 that we may sometimes modify the tree. */
4731 static tree
4732 strip_compound_expr (tree t, tree s)
4734 enum tree_code code = TREE_CODE (t);
4736 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4737 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4738 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4739 return TREE_OPERAND (t, 1);
4741 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4742 don't bother handling any other types. */
4743 else if (code == COND_EXPR)
4745 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4746 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4747 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4749 else if (TREE_CODE_CLASS (code) == '1')
4750 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4751 else if (TREE_CODE_CLASS (code) == '<'
4752 || TREE_CODE_CLASS (code) == '2')
4754 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4755 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4758 return t;
4761 /* Return a node which has the indicated constant VALUE (either 0 or
4762 1), and is of the indicated TYPE. */
4764 static tree
4765 constant_boolean_node (int value, tree type)
4767 if (type == integer_type_node)
4768 return value ? integer_one_node : integer_zero_node;
4769 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4770 return lang_hooks.truthvalue_conversion (value ? integer_one_node
4771 : integer_zero_node);
4772 else
4774 tree t = build_int_2 (value, 0);
4776 TREE_TYPE (t) = type;
4777 return t;
4781 /* Utility function for the following routine, to see how complex a nesting of
4782 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4783 we don't care (to avoid spending too much time on complex expressions.). */
4785 static int
4786 count_cond (tree expr, int lim)
4788 int ctrue, cfalse;
4790 if (TREE_CODE (expr) != COND_EXPR)
4791 return 0;
4792 else if (lim <= 0)
4793 return 0;
4795 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4796 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4797 return MIN (lim, 1 + ctrue + cfalse);
4800 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4801 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4802 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4803 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4804 COND is the first argument to CODE; otherwise (as in the example
4805 given here), it is the second argument. TYPE is the type of the
4806 original expression. */
4808 static tree
4809 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4810 tree cond, tree arg, int cond_first_p)
4812 tree test, true_value, false_value;
4813 tree lhs = NULL_TREE;
4814 tree rhs = NULL_TREE;
4815 /* In the end, we'll produce a COND_EXPR. Both arms of the
4816 conditional expression will be binary operations. The left-hand
4817 side of the expression to be executed if the condition is true
4818 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4819 of the expression to be executed if the condition is true will be
4820 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4821 but apply to the expression to be executed if the conditional is
4822 false. */
4823 tree *true_lhs;
4824 tree *true_rhs;
4825 tree *false_lhs;
4826 tree *false_rhs;
4827 /* These are the codes to use for the left-hand side and right-hand
4828 side of the COND_EXPR. Normally, they are the same as CODE. */
4829 enum tree_code lhs_code = code;
4830 enum tree_code rhs_code = code;
4831 /* And these are the types of the expressions. */
4832 tree lhs_type = type;
4833 tree rhs_type = type;
4834 int save = 0;
4836 if (cond_first_p)
4838 true_rhs = false_rhs = &arg;
4839 true_lhs = &true_value;
4840 false_lhs = &false_value;
4842 else
4844 true_lhs = false_lhs = &arg;
4845 true_rhs = &true_value;
4846 false_rhs = &false_value;
4849 if (TREE_CODE (cond) == COND_EXPR)
4851 test = TREE_OPERAND (cond, 0);
4852 true_value = TREE_OPERAND (cond, 1);
4853 false_value = TREE_OPERAND (cond, 2);
4854 /* If this operand throws an expression, then it does not make
4855 sense to try to perform a logical or arithmetic operation
4856 involving it. Instead of building `a + throw 3' for example,
4857 we simply build `a, throw 3'. */
4858 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4860 if (! cond_first_p)
4862 lhs_code = COMPOUND_EXPR;
4863 lhs_type = void_type_node;
4865 else
4866 lhs = true_value;
4868 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4870 if (! cond_first_p)
4872 rhs_code = COMPOUND_EXPR;
4873 rhs_type = void_type_node;
4875 else
4876 rhs = false_value;
4879 else
4881 tree testtype = TREE_TYPE (cond);
4882 test = cond;
4883 true_value = fold_convert (testtype, integer_one_node);
4884 false_value = fold_convert (testtype, integer_zero_node);
4887 /* If ARG is complex we want to make sure we only evaluate it once. Though
4888 this is only required if it is volatile, it might be more efficient even
4889 if it is not. However, if we succeed in folding one part to a constant,
4890 we do not need to make this SAVE_EXPR. Since we do this optimization
4891 primarily to see if we do end up with constant and this SAVE_EXPR
4892 interferes with later optimizations, suppressing it when we can is
4893 important.
4895 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4896 do so. Don't try to see if the result is a constant if an arm is a
4897 COND_EXPR since we get exponential behavior in that case. */
4899 if (saved_expr_p (arg))
4900 save = 1;
4901 else if (lhs == 0 && rhs == 0
4902 && !TREE_CONSTANT (arg)
4903 && lang_hooks.decls.global_bindings_p () == 0
4904 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4905 || TREE_SIDE_EFFECTS (arg)))
4907 if (TREE_CODE (true_value) != COND_EXPR)
4908 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4910 if (TREE_CODE (false_value) != COND_EXPR)
4911 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4913 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4914 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4916 arg = save_expr (arg);
4917 lhs = rhs = 0;
4918 save = saved_expr_p (arg);
4922 if (lhs == 0)
4923 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4924 if (rhs == 0)
4925 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4927 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4929 /* If ARG involves a SAVE_EXPR, we need to ensure it is evaluated
4930 ahead of the COND_EXPR we made. Otherwise we would have it only
4931 evaluated in one branch, with the other branch using the result
4932 but missing the evaluation code. Beware that the save_expr call
4933 above might not return a SAVE_EXPR, so testing the TREE_CODE
4934 of ARG is not enough to decide here.  */
4935 if (save)
4936 return build (COMPOUND_EXPR, type,
4937 fold_convert (void_type_node, arg),
4938 strip_compound_expr (test, arg));
4939 else
4940 return fold_convert (type, test);
4944 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4946 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4947 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4948 ADDEND is the same as X.
4950 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4951 and finite. The problematic cases are when X is zero, and its mode
4952 has signed zeros. In the case of rounding towards -infinity,
4953 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4954 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4956 static bool
4957 fold_real_zero_addition_p (tree type, tree addend, int negate)
4959 if (!real_zerop (addend))
4960 return false;
4962 /* Don't allow the fold with -fsignaling-nans. */
4963 if (HONOR_SNANS (TYPE_MODE (type)))
4964 return false;
4966 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4967 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4968 return true;
4970 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4971 if (TREE_CODE (addend) == REAL_CST
4972 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4973 negate = !negate;
4975 /* The mode has signed zeros, and we have to honor their sign.
4976 In this situation, there is only one case we can return true for.
4977 X - 0 is the same as X unless rounding towards -infinity is
4978 supported. */
4979 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4982 /* Subroutine of fold() that checks comparisons of built-in math
4983 functions against real constants.
4985 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4986 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4987 is the type of the result and ARG0 and ARG1 are the operands of the
4988 comparison. ARG1 must be a TREE_REAL_CST.
4990 The function returns the constant folded tree if a simplification
4991 can be made, and NULL_TREE otherwise. */
4993 static tree
4994 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4995 tree type, tree arg0, tree arg1)
4997 REAL_VALUE_TYPE c;
4999 if (BUILTIN_SQRT_P (fcode))
5001 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5002 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5004 c = TREE_REAL_CST (arg1);
5005 if (REAL_VALUE_NEGATIVE (c))
5007 /* sqrt(x) < y is always false, if y is negative. */
5008 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5009 return omit_one_operand (type,
5010 fold_convert (type, integer_zero_node),
5011 arg);
5013 /* sqrt(x) > y is always true, if y is negative and we
5014 don't care about NaNs, i.e. negative values of x. */
5015 if (code == NE_EXPR || !HONOR_NANS (mode))
5016 return omit_one_operand (type,
5017 fold_convert (type, integer_one_node),
5018 arg);
5020 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5021 return fold (build (GE_EXPR, type, arg,
5022 build_real (TREE_TYPE (arg), dconst0)));
5024 else if (code == GT_EXPR || code == GE_EXPR)
5026 REAL_VALUE_TYPE c2;
5028 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5029 real_convert (&c2, mode, &c2);
5031 if (REAL_VALUE_ISINF (c2))
5033 /* sqrt(x) > y is x == +Inf, when y is very large. */
5034 if (HONOR_INFINITIES (mode))
5035 return fold (build (EQ_EXPR, type, arg,
5036 build_real (TREE_TYPE (arg), c2)));
5038 /* sqrt(x) > y is always false, when y is very large
5039 and we don't care about infinities. */
5040 return omit_one_operand (type,
5041 fold_convert (type, integer_zero_node),
5042 arg);
5045 /* sqrt(x) > c is the same as x > c*c. */
5046 return fold (build (code, type, arg,
5047 build_real (TREE_TYPE (arg), c2)));
5049 else if (code == LT_EXPR || code == LE_EXPR)
5051 REAL_VALUE_TYPE c2;
5053 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5054 real_convert (&c2, mode, &c2);
5056 if (REAL_VALUE_ISINF (c2))
5058 /* sqrt(x) < y is always true, when y is a very large
5059 value and we don't care about NaNs or Infinities. */
5060 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5061 return omit_one_operand (type,
5062 fold_convert (type, integer_one_node),
5063 arg);
5065 /* sqrt(x) < y is x != +Inf when y is very large and we
5066 don't care about NaNs. */
5067 if (! HONOR_NANS (mode))
5068 return fold (build (NE_EXPR, type, arg,
5069 build_real (TREE_TYPE (arg), c2)));
5071 /* sqrt(x) < y is x >= 0 when y is very large and we
5072 don't care about Infinities. */
5073 if (! HONOR_INFINITIES (mode))
5074 return fold (build (GE_EXPR, type, arg,
5075 build_real (TREE_TYPE (arg), dconst0)));
5077 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5078 if (lang_hooks.decls.global_bindings_p () != 0
5079 || CONTAINS_PLACEHOLDER_P (arg))
5080 return NULL_TREE;
5082 arg = save_expr (arg);
5083 return fold (build (TRUTH_ANDIF_EXPR, type,
5084 fold (build (GE_EXPR, type, arg,
5085 build_real (TREE_TYPE (arg),
5086 dconst0))),
5087 fold (build (NE_EXPR, type, arg,
5088 build_real (TREE_TYPE (arg),
5089 c2)))));
5092 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5093 if (! HONOR_NANS (mode))
5094 return fold (build (code, type, arg,
5095 build_real (TREE_TYPE (arg), c2)));
5097 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5098 if (lang_hooks.decls.global_bindings_p () == 0
5099 && ! CONTAINS_PLACEHOLDER_P (arg))
5101 arg = save_expr (arg);
5102 return fold (build (TRUTH_ANDIF_EXPR, type,
5103 fold (build (GE_EXPR, type, arg,
5104 build_real (TREE_TYPE (arg),
5105 dconst0))),
5106 fold (build (code, type, arg,
5107 build_real (TREE_TYPE (arg),
5108 c2)))));
5113 return NULL_TREE;
5116 /* Subroutine of fold() that optimizes comparisons against Infinities,
5117 either +Inf or -Inf.
5119 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5120 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5121 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5123 The function returns the constant folded tree if a simplification
5124 can be made, and NULL_TREE otherwise. */
5126 static tree
5127 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5129 enum machine_mode mode;
5130 REAL_VALUE_TYPE max;
5131 tree temp;
5132 bool neg;
5134 mode = TYPE_MODE (TREE_TYPE (arg0));
5136 /* For negative infinity swap the sense of the comparison. */
5137 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5138 if (neg)
5139 code = swap_tree_comparison (code);
5141 switch (code)
5143 case GT_EXPR:
5144 /* x > +Inf is always false, if with ignore sNANs. */
5145 if (HONOR_SNANS (mode))
5146 return NULL_TREE;
5147 return omit_one_operand (type,
5148 fold_convert (type, integer_zero_node),
5149 arg0);
5151 case LE_EXPR:
5152 /* x <= +Inf is always true, if we don't case about NaNs. */
5153 if (! HONOR_NANS (mode))
5154 return omit_one_operand (type,
5155 fold_convert (type, integer_one_node),
5156 arg0);
5158 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5159 if (lang_hooks.decls.global_bindings_p () == 0
5160 && ! CONTAINS_PLACEHOLDER_P (arg0))
5162 arg0 = save_expr (arg0);
5163 return fold (build (EQ_EXPR, type, arg0, arg0));
5165 break;
5167 case EQ_EXPR:
5168 case GE_EXPR:
5169 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5170 real_maxval (&max, neg, mode);
5171 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
5172 arg0, build_real (TREE_TYPE (arg0), max)));
5174 case LT_EXPR:
5175 /* x < +Inf is always equal to x <= DBL_MAX. */
5176 real_maxval (&max, neg, mode);
5177 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5178 arg0, build_real (TREE_TYPE (arg0), max)));
5180 case NE_EXPR:
5181 /* x != +Inf is always equal to !(x > DBL_MAX). */
5182 real_maxval (&max, neg, mode);
5183 if (! HONOR_NANS (mode))
5184 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5185 arg0, build_real (TREE_TYPE (arg0), max)));
5186 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
5187 arg0, build_real (TREE_TYPE (arg0), max)));
5188 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5190 default:
5191 break;
5194 return NULL_TREE;
5197 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5198 equality/inequality test, then return a simplified form of
5199 the test using shifts and logical operations. Otherwise return
5200 NULL. TYPE is the desired result type. */
5202 tree
5203 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5204 tree result_type)
5206 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5207 operand 0. */
5208 if (code == TRUTH_NOT_EXPR)
5210 code = TREE_CODE (arg0);
5211 if (code != NE_EXPR && code != EQ_EXPR)
5212 return NULL_TREE;
5214 /* Extract the arguments of the EQ/NE. */
5215 arg1 = TREE_OPERAND (arg0, 1);
5216 arg0 = TREE_OPERAND (arg0, 0);
5218 /* This requires us to invert the code. */
5219 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5222 /* If this is testing a single bit, we can optimize the test. */
5223 if ((code == NE_EXPR || code == EQ_EXPR)
5224 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5225 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5227 tree inner = TREE_OPERAND (arg0, 0);
5228 tree type = TREE_TYPE (arg0);
5229 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5230 enum machine_mode operand_mode = TYPE_MODE (type);
5231 int ops_unsigned;
5232 tree signed_type, unsigned_type, intermediate_type;
5233 tree arg00;
5235 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5236 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5237 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5238 if (arg00 != NULL_TREE)
5240 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5241 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
5242 fold_convert (stype, arg00),
5243 fold_convert (stype, integer_zero_node)));
5246 /* At this point, we know that arg0 is not testing the sign bit. */
5247 if (TYPE_PRECISION (type) - 1 == bitnum)
5248 abort ();
5250 /* Otherwise we have (A & C) != 0 where C is a single bit,
5251 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5252 Similarly for (A & C) == 0. */
5254 /* If INNER is a right shift of a constant and it plus BITNUM does
5255 not overflow, adjust BITNUM and INNER. */
5256 if (TREE_CODE (inner) == RSHIFT_EXPR
5257 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5258 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5259 && bitnum < TYPE_PRECISION (type)
5260 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5261 bitnum - TYPE_PRECISION (type)))
5263 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5264 inner = TREE_OPERAND (inner, 0);
5267 /* If we are going to be able to omit the AND below, we must do our
5268 operations as unsigned. If we must use the AND, we have a choice.
5269 Normally unsigned is faster, but for some machines signed is. */
5270 #ifdef LOAD_EXTEND_OP
5271 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5272 #else
5273 ops_unsigned = 1;
5274 #endif
5276 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5277 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5278 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5279 inner = fold_convert (intermediate_type, inner);
5281 if (bitnum != 0)
5282 inner = build (RSHIFT_EXPR, intermediate_type,
5283 inner, size_int (bitnum));
5285 if (code == EQ_EXPR)
5286 inner = build (BIT_XOR_EXPR, intermediate_type,
5287 inner, integer_one_node);
5289 /* Put the AND last so it can combine with more things. */
5290 inner = build (BIT_AND_EXPR, intermediate_type,
5291 inner, integer_one_node);
5293 /* Make sure to return the proper type. */
5294 inner = fold_convert (result_type, inner);
5296 return inner;
5298 return NULL_TREE;
5301 /* Check whether we are allowed to reorder operands arg0 and arg1,
5302 such that the evaluation of arg1 occurs before arg0. */
5304 static bool
5305 reorder_operands_p (tree arg0, tree arg1)
5307 if (! flag_evaluation_order)
5308 return true;
5309 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5310 return true;
5311 return ! TREE_SIDE_EFFECTS (arg0)
5312 && ! TREE_SIDE_EFFECTS (arg1);
5315 /* Test whether it is preferable two swap two operands, ARG0 and
5316 ARG1, for example because ARG0 is an integer constant and ARG1
5317 isn't. If REORDER is true, only recommend swapping if we can
5318 evaluate the operands in reverse order. */
5320 static bool
5321 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5323 STRIP_SIGN_NOPS (arg0);
5324 STRIP_SIGN_NOPS (arg1);
5326 if (TREE_CODE (arg1) == INTEGER_CST)
5327 return 0;
5328 if (TREE_CODE (arg0) == INTEGER_CST)
5329 return 1;
5331 if (TREE_CODE (arg1) == REAL_CST)
5332 return 0;
5333 if (TREE_CODE (arg0) == REAL_CST)
5334 return 1;
5336 if (TREE_CODE (arg1) == COMPLEX_CST)
5337 return 0;
5338 if (TREE_CODE (arg0) == COMPLEX_CST)
5339 return 1;
5341 if (TREE_CONSTANT (arg1))
5342 return 0;
5343 if (TREE_CONSTANT (arg0))
5344 return 1;
5346 if (optimize_size)
5347 return 0;
5349 if (reorder && flag_evaluation_order
5350 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5351 return 0;
5353 if (DECL_P (arg1))
5354 return 0;
5355 if (DECL_P (arg0))
5356 return 1;
5358 return 0;
5361 /* Perform constant folding and related simplification of EXPR.
5362 The related simplifications include x*1 => x, x*0 => 0, etc.,
5363 and application of the associative law.
5364 NOP_EXPR conversions may be removed freely (as long as we
5365 are careful not to change the C type of the overall expression)
5366 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5367 but we can constant-fold them if they have constant operands. */
5369 #ifdef ENABLE_FOLD_CHECKING
5370 # define fold(x) fold_1 (x)
5371 static tree fold_1 (tree);
5372 static
5373 #endif
5374 tree
5375 fold (tree expr)
5377 const tree t = expr;
5378 const tree type = TREE_TYPE (expr);
5379 tree t1 = NULL_TREE;
5380 tree tem;
5381 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5382 enum tree_code code = TREE_CODE (t);
5383 int kind = TREE_CODE_CLASS (code);
5384 /* WINS will be nonzero when the switch is done
5385 if all operands are constant. */
5386 int wins = 1;
5388 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5389 Likewise for a SAVE_EXPR that's already been evaluated. */
5390 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5391 return t;
5393 /* Return right away if a constant. */
5394 if (kind == 'c')
5395 return t;
5397 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5399 tree subop;
5401 /* Special case for conversion ops that can have fixed point args. */
5402 arg0 = TREE_OPERAND (t, 0);
5404 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5405 if (arg0 != 0)
5406 STRIP_SIGN_NOPS (arg0);
5408 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5409 subop = TREE_REALPART (arg0);
5410 else
5411 subop = arg0;
5413 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5414 && TREE_CODE (subop) != REAL_CST)
5415 /* Note that TREE_CONSTANT isn't enough:
5416 static var addresses are constant but we can't
5417 do arithmetic on them. */
5418 wins = 0;
5420 else if (IS_EXPR_CODE_CLASS (kind))
5422 int len = first_rtl_op (code);
5423 int i;
5424 for (i = 0; i < len; i++)
5426 tree op = TREE_OPERAND (t, i);
5427 tree subop;
5429 if (op == 0)
5430 continue; /* Valid for CALL_EXPR, at least. */
5432 /* Strip any conversions that don't change the mode. This is
5433 safe for every expression, except for a comparison expression
5434 because its signedness is derived from its operands. So, in
5435 the latter case, only strip conversions that don't change the
5436 signedness.
5438 Note that this is done as an internal manipulation within the
5439 constant folder, in order to find the simplest representation
5440 of the arguments so that their form can be studied. In any
5441 cases, the appropriate type conversions should be put back in
5442 the tree that will get out of the constant folder. */
5443 if (kind == '<')
5444 STRIP_SIGN_NOPS (op);
5445 else
5446 STRIP_NOPS (op);
5448 if (TREE_CODE (op) == COMPLEX_CST)
5449 subop = TREE_REALPART (op);
5450 else
5451 subop = op;
5453 if (TREE_CODE (subop) != INTEGER_CST
5454 && TREE_CODE (subop) != REAL_CST)
5455 /* Note that TREE_CONSTANT isn't enough:
5456 static var addresses are constant but we can't
5457 do arithmetic on them. */
5458 wins = 0;
5460 if (i == 0)
5461 arg0 = op;
5462 else if (i == 1)
5463 arg1 = op;
5467 /* If this is a commutative operation, and ARG0 is a constant, move it
5468 to ARG1 to reduce the number of tests below. */
5469 if (commutative_tree_code (code)
5470 && tree_swap_operands_p (arg0, arg1, true))
5471 return fold (build (code, type, TREE_OPERAND (t, 1),
5472 TREE_OPERAND (t, 0)));
5474 /* Now WINS is set as described above,
5475 ARG0 is the first operand of EXPR,
5476 and ARG1 is the second operand (if it has more than one operand).
5478 First check for cases where an arithmetic operation is applied to a
5479 compound, conditional, or comparison operation. Push the arithmetic
5480 operation inside the compound or conditional to see if any folding
5481 can then be done. Convert comparison to conditional for this purpose.
5482 The also optimizes non-constant cases that used to be done in
5483 expand_expr.
5485 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5486 one of the operands is a comparison and the other is a comparison, a
5487 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5488 code below would make the expression more complex. Change it to a
5489 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5490 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5492 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5493 || code == EQ_EXPR || code == NE_EXPR)
5494 && ((truth_value_p (TREE_CODE (arg0))
5495 && (truth_value_p (TREE_CODE (arg1))
5496 || (TREE_CODE (arg1) == BIT_AND_EXPR
5497 && integer_onep (TREE_OPERAND (arg1, 1)))))
5498 || (truth_value_p (TREE_CODE (arg1))
5499 && (truth_value_p (TREE_CODE (arg0))
5500 || (TREE_CODE (arg0) == BIT_AND_EXPR
5501 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5503 tem = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5504 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5505 : TRUTH_XOR_EXPR,
5506 type, arg0, arg1));
5508 if (code == EQ_EXPR)
5509 tem = invert_truthvalue (tem);
5511 return tem;
5514 if (TREE_CODE_CLASS (code) == '1')
5516 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5517 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5518 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5519 else if (TREE_CODE (arg0) == COND_EXPR)
5521 tree arg01 = TREE_OPERAND (arg0, 1);
5522 tree arg02 = TREE_OPERAND (arg0, 2);
5523 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5524 arg01 = fold (build1 (code, type, arg01));
5525 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5526 arg02 = fold (build1 (code, type, arg02));
5527 tem = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5528 arg01, arg02));
5530 /* If this was a conversion, and all we did was to move into
5531 inside the COND_EXPR, bring it back out. But leave it if
5532 it is a conversion from integer to integer and the
5533 result precision is no wider than a word since such a
5534 conversion is cheap and may be optimized away by combine,
5535 while it couldn't if it were outside the COND_EXPR. Then return
5536 so we don't get into an infinite recursion loop taking the
5537 conversion out and then back in. */
5539 if ((code == NOP_EXPR || code == CONVERT_EXPR
5540 || code == NON_LVALUE_EXPR)
5541 && TREE_CODE (tem) == COND_EXPR
5542 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
5543 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
5544 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
5545 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
5546 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
5547 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
5548 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
5549 && (INTEGRAL_TYPE_P
5550 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
5551 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
5552 tem = build1 (code, type,
5553 build (COND_EXPR,
5554 TREE_TYPE (TREE_OPERAND
5555 (TREE_OPERAND (tem, 1), 0)),
5556 TREE_OPERAND (tem, 0),
5557 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
5558 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
5559 return tem;
5561 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5562 return fold (build (COND_EXPR, type, arg0,
5563 fold (build1 (code, type, integer_one_node)),
5564 fold (build1 (code, type, integer_zero_node))));
5566 else if (TREE_CODE_CLASS (code) == '<'
5567 && TREE_CODE (arg0) == COMPOUND_EXPR)
5568 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5569 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5570 else if (TREE_CODE_CLASS (code) == '<'
5571 && TREE_CODE (arg1) == COMPOUND_EXPR)
5572 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5573 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5574 else if (TREE_CODE_CLASS (code) == '2'
5575 || TREE_CODE_CLASS (code) == '<')
5577 if (TREE_CODE (arg1) == COMPOUND_EXPR
5578 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5579 && ! TREE_SIDE_EFFECTS (arg0))
5580 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5581 fold (build (code, type,
5582 arg0, TREE_OPERAND (arg1, 1))));
5583 else if ((TREE_CODE (arg1) == COND_EXPR
5584 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5585 && TREE_CODE_CLASS (code) != '<'))
5586 && (TREE_CODE (arg0) != COND_EXPR
5587 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5588 && (! TREE_SIDE_EFFECTS (arg0)
5589 || (lang_hooks.decls.global_bindings_p () == 0
5590 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5591 return
5592 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5593 /*cond_first_p=*/0);
5594 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5595 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5596 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5597 else if ((TREE_CODE (arg0) == COND_EXPR
5598 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5599 && TREE_CODE_CLASS (code) != '<'))
5600 && (TREE_CODE (arg1) != COND_EXPR
5601 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5602 && (! TREE_SIDE_EFFECTS (arg1)
5603 || (lang_hooks.decls.global_bindings_p () == 0
5604 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5605 return
5606 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5607 /*cond_first_p=*/1);
5610 switch (code)
5612 case CONST_DECL:
5613 return fold (DECL_INITIAL (t));
5615 case NOP_EXPR:
5616 case FLOAT_EXPR:
5617 case CONVERT_EXPR:
5618 case FIX_TRUNC_EXPR:
5619 case FIX_CEIL_EXPR:
5620 case FIX_FLOOR_EXPR:
5621 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
5622 return TREE_OPERAND (t, 0);
5624 /* Handle cases of two conversions in a row. */
5625 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5626 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5628 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5629 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5630 int inside_int = INTEGRAL_TYPE_P (inside_type);
5631 int inside_ptr = POINTER_TYPE_P (inside_type);
5632 int inside_float = FLOAT_TYPE_P (inside_type);
5633 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5634 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5635 int inter_int = INTEGRAL_TYPE_P (inter_type);
5636 int inter_ptr = POINTER_TYPE_P (inter_type);
5637 int inter_float = FLOAT_TYPE_P (inter_type);
5638 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5639 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5640 int final_int = INTEGRAL_TYPE_P (type);
5641 int final_ptr = POINTER_TYPE_P (type);
5642 int final_float = FLOAT_TYPE_P (type);
5643 unsigned int final_prec = TYPE_PRECISION (type);
5644 int final_unsignedp = TREE_UNSIGNED (type);
5646 /* In addition to the cases of two conversions in a row
5647 handled below, if we are converting something to its own
5648 type via an object of identical or wider precision, neither
5649 conversion is needed. */
5650 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
5651 && ((inter_int && final_int) || (inter_float && final_float))
5652 && inter_prec >= final_prec)
5653 return fold (build1 (code, type,
5654 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5656 /* Likewise, if the intermediate and final types are either both
5657 float or both integer, we don't need the middle conversion if
5658 it is wider than the final type and doesn't change the signedness
5659 (for integers). Avoid this if the final type is a pointer
5660 since then we sometimes need the inner conversion. Likewise if
5661 the outer has a precision not equal to the size of its mode. */
5662 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5663 || (inter_float && inside_float))
5664 && inter_prec >= inside_prec
5665 && (inter_float || inter_unsignedp == inside_unsignedp)
5666 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
5667 && TYPE_MODE (type) == TYPE_MODE (inter_type))
5668 && ! final_ptr)
5669 return fold (build1 (code, type,
5670 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5672 /* If we have a sign-extension of a zero-extended value, we can
5673 replace that by a single zero-extension. */
5674 if (inside_int && inter_int && final_int
5675 && inside_prec < inter_prec && inter_prec < final_prec
5676 && inside_unsignedp && !inter_unsignedp)
5677 return fold (build1 (code, type,
5678 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5680 /* Two conversions in a row are not needed unless:
5681 - some conversion is floating-point (overstrict for now), or
5682 - the intermediate type is narrower than both initial and
5683 final, or
5684 - the intermediate type and innermost type differ in signedness,
5685 and the outermost type is wider than the intermediate, or
5686 - the initial type is a pointer type and the precisions of the
5687 intermediate and final types differ, or
5688 - the final type is a pointer type and the precisions of the
5689 initial and intermediate types differ. */
5690 if (! inside_float && ! inter_float && ! final_float
5691 && (inter_prec > inside_prec || inter_prec > final_prec)
5692 && ! (inside_int && inter_int
5693 && inter_unsignedp != inside_unsignedp
5694 && inter_prec < final_prec)
5695 && ((inter_unsignedp && inter_prec > inside_prec)
5696 == (final_unsignedp && final_prec > inter_prec))
5697 && ! (inside_ptr && inter_prec != final_prec)
5698 && ! (final_ptr && inside_prec != inter_prec)
5699 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
5700 && TYPE_MODE (type) == TYPE_MODE (inter_type))
5701 && ! final_ptr)
5702 return fold (build1 (code, type,
5703 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5706 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5707 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5708 /* Detect assigning a bitfield. */
5709 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5710 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5712 /* Don't leave an assignment inside a conversion
5713 unless assigning a bitfield. */
5714 tree prev = TREE_OPERAND (t, 0);
5715 tem = copy_node (t);
5716 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
5717 /* First do the assignment, then return converted constant. */
5718 tem = build (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
5719 TREE_NO_UNUSED_WARNING (tem) = 1;
5720 TREE_USED (tem) = 1;
5721 return tem;
5724 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5725 constants (if x has signed type, the sign bit cannot be set
5726 in c). This folds extension into the BIT_AND_EXPR. */
5727 if (INTEGRAL_TYPE_P (type)
5728 && TREE_CODE (type) != BOOLEAN_TYPE
5729 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5730 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5732 tree and = TREE_OPERAND (t, 0);
5733 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5734 int change = 0;
5736 if (TREE_UNSIGNED (TREE_TYPE (and))
5737 || (TYPE_PRECISION (type)
5738 <= TYPE_PRECISION (TREE_TYPE (and))))
5739 change = 1;
5740 else if (TYPE_PRECISION (TREE_TYPE (and1))
5741 <= HOST_BITS_PER_WIDE_INT
5742 && host_integerp (and1, 1))
5744 unsigned HOST_WIDE_INT cst;
5746 cst = tree_low_cst (and1, 1);
5747 cst &= (HOST_WIDE_INT) -1
5748 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5749 change = (cst == 0);
5750 #ifdef LOAD_EXTEND_OP
5751 if (change
5752 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5753 == ZERO_EXTEND))
5755 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
5756 and0 = fold_convert (uns, and0);
5757 and1 = fold_convert (uns, and1);
5759 #endif
5761 if (change)
5762 return fold (build (BIT_AND_EXPR, type,
5763 fold_convert (type, and0),
5764 fold_convert (type, and1)));
5767 tem = fold_convert_const (code, type, arg0);
5768 return tem ? tem : t;
5770 case VIEW_CONVERT_EXPR:
5771 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5772 return build1 (VIEW_CONVERT_EXPR, type,
5773 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5774 return t;
5776 case COMPONENT_REF:
5777 if (TREE_CODE (arg0) == CONSTRUCTOR
5778 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5780 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5781 if (m)
5782 return TREE_VALUE (m);
5784 return t;
5786 case RANGE_EXPR:
5787 if (TREE_CONSTANT (t) != wins)
5789 tem = copy_node (t);
5790 TREE_CONSTANT (tem) = wins;
5791 return tem;
5793 return t;
5795 case NEGATE_EXPR:
5796 if (negate_expr_p (arg0))
5797 return fold_convert (type, negate_expr (arg0));
5798 return t;
5800 case ABS_EXPR:
5801 if (wins
5802 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
5803 return fold_abs_const (arg0, type);
5804 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5805 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5806 /* Convert fabs((double)float) into (double)fabsf(float). */
5807 else if (TREE_CODE (arg0) == NOP_EXPR
5808 && TREE_CODE (type) == REAL_TYPE)
5810 tree targ0 = strip_float_extensions (arg0);
5811 if (targ0 != arg0)
5812 return fold_convert (type, fold (build1 (ABS_EXPR,
5813 TREE_TYPE (targ0),
5814 targ0)));
5816 else if (tree_expr_nonnegative_p (arg0))
5817 return arg0;
5818 return t;
5820 case CONJ_EXPR:
5821 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5822 return fold_convert (type, arg0);
5823 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5824 return build (COMPLEX_EXPR, type,
5825 TREE_OPERAND (arg0, 0),
5826 negate_expr (TREE_OPERAND (arg0, 1)));
5827 else if (TREE_CODE (arg0) == COMPLEX_CST)
5828 return build_complex (type, TREE_REALPART (arg0),
5829 negate_expr (TREE_IMAGPART (arg0)));
5830 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5831 return fold (build (TREE_CODE (arg0), type,
5832 fold (build1 (CONJ_EXPR, type,
5833 TREE_OPERAND (arg0, 0))),
5834 fold (build1 (CONJ_EXPR,
5835 type, TREE_OPERAND (arg0, 1)))));
5836 else if (TREE_CODE (arg0) == CONJ_EXPR)
5837 return TREE_OPERAND (arg0, 0);
5838 return t;
5840 case BIT_NOT_EXPR:
5841 if (wins)
5843 tem = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5844 ~ TREE_INT_CST_HIGH (arg0));
5845 TREE_TYPE (tem) = type;
5846 force_fit_type (tem, 0);
5847 TREE_OVERFLOW (tem) = TREE_OVERFLOW (arg0);
5848 TREE_CONSTANT_OVERFLOW (tem) = TREE_CONSTANT_OVERFLOW (arg0);
5849 return tem;
5851 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5852 return TREE_OPERAND (arg0, 0);
5853 return t;
5855 case PLUS_EXPR:
5856 /* A + (-B) -> A - B */
5857 if (TREE_CODE (arg1) == NEGATE_EXPR)
5858 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5859 /* (-A) + B -> B - A */
5860 if (TREE_CODE (arg0) == NEGATE_EXPR)
5861 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5862 else if (! FLOAT_TYPE_P (type))
5864 if (integer_zerop (arg1))
5865 return non_lvalue (fold_convert (type, arg0));
5867 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5868 with a constant, and the two constants have no bits in common,
5869 we should treat this as a BIT_IOR_EXPR since this may produce more
5870 simplifications. */
5871 if (TREE_CODE (arg0) == BIT_AND_EXPR
5872 && TREE_CODE (arg1) == BIT_AND_EXPR
5873 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5874 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5875 && integer_zerop (const_binop (BIT_AND_EXPR,
5876 TREE_OPERAND (arg0, 1),
5877 TREE_OPERAND (arg1, 1), 0)))
5879 code = BIT_IOR_EXPR;
5880 goto bit_ior;
5883 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5884 (plus (plus (mult) (mult)) (foo)) so that we can
5885 take advantage of the factoring cases below. */
5886 if ((TREE_CODE (arg0) == PLUS_EXPR
5887 && TREE_CODE (arg1) == MULT_EXPR)
5888 || (TREE_CODE (arg1) == PLUS_EXPR
5889 && TREE_CODE (arg0) == MULT_EXPR))
5891 tree parg0, parg1, parg, marg;
5893 if (TREE_CODE (arg0) == PLUS_EXPR)
5894 parg = arg0, marg = arg1;
5895 else
5896 parg = arg1, marg = arg0;
5897 parg0 = TREE_OPERAND (parg, 0);
5898 parg1 = TREE_OPERAND (parg, 1);
5899 STRIP_NOPS (parg0);
5900 STRIP_NOPS (parg1);
5902 if (TREE_CODE (parg0) == MULT_EXPR
5903 && TREE_CODE (parg1) != MULT_EXPR)
5904 return fold (build (PLUS_EXPR, type,
5905 fold (build (PLUS_EXPR, type,
5906 fold_convert (type, parg0),
5907 fold_convert (type, marg))),
5908 fold_convert (type, parg1)));
5909 if (TREE_CODE (parg0) != MULT_EXPR
5910 && TREE_CODE (parg1) == MULT_EXPR)
5911 return fold (build (PLUS_EXPR, type,
5912 fold (build (PLUS_EXPR, type,
5913 fold_convert (type, parg1),
5914 fold_convert (type, marg))),
5915 fold_convert (type, parg0)));
5918 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5920 tree arg00, arg01, arg10, arg11;
5921 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5923 /* (A * C) + (B * C) -> (A+B) * C.
5924 We are most concerned about the case where C is a constant,
5925 but other combinations show up during loop reduction. Since
5926 it is not difficult, try all four possibilities. */
5928 arg00 = TREE_OPERAND (arg0, 0);
5929 arg01 = TREE_OPERAND (arg0, 1);
5930 arg10 = TREE_OPERAND (arg1, 0);
5931 arg11 = TREE_OPERAND (arg1, 1);
5932 same = NULL_TREE;
5934 if (operand_equal_p (arg01, arg11, 0))
5935 same = arg01, alt0 = arg00, alt1 = arg10;
5936 else if (operand_equal_p (arg00, arg10, 0))
5937 same = arg00, alt0 = arg01, alt1 = arg11;
5938 else if (operand_equal_p (arg00, arg11, 0))
5939 same = arg00, alt0 = arg01, alt1 = arg10;
5940 else if (operand_equal_p (arg01, arg10, 0))
5941 same = arg01, alt0 = arg00, alt1 = arg11;
5943 /* No identical multiplicands; see if we can find a common
5944 power-of-two factor in non-power-of-two multiplies. This
5945 can help in multi-dimensional array access. */
5946 else if (TREE_CODE (arg01) == INTEGER_CST
5947 && TREE_CODE (arg11) == INTEGER_CST
5948 && TREE_INT_CST_HIGH (arg01) == 0
5949 && TREE_INT_CST_HIGH (arg11) == 0)
5951 HOST_WIDE_INT int01, int11, tmp;
5952 int01 = TREE_INT_CST_LOW (arg01);
5953 int11 = TREE_INT_CST_LOW (arg11);
5955 /* Move min of absolute values to int11. */
5956 if ((int01 >= 0 ? int01 : -int01)
5957 < (int11 >= 0 ? int11 : -int11))
5959 tmp = int01, int01 = int11, int11 = tmp;
5960 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5961 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5964 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5966 alt0 = fold (build (MULT_EXPR, type, arg00,
5967 build_int_2 (int01 / int11, 0)));
5968 alt1 = arg10;
5969 same = arg11;
5973 if (same)
5974 return fold (build (MULT_EXPR, type,
5975 fold (build (PLUS_EXPR, type, alt0, alt1)),
5976 same));
5979 else
5981 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5982 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5983 return non_lvalue (fold_convert (type, arg0));
5985 /* Likewise if the operands are reversed. */
5986 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5987 return non_lvalue (fold_convert (type, arg1));
5989 /* Convert x+x into x*2.0. */
5990 if (operand_equal_p (arg0, arg1, 0)
5991 && SCALAR_FLOAT_TYPE_P (type))
5992 return fold (build (MULT_EXPR, type, arg0,
5993 build_real (type, dconst2)));
5995 /* Convert x*c+x into x*(c+1). */
5996 if (flag_unsafe_math_optimizations
5997 && TREE_CODE (arg0) == MULT_EXPR
5998 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5999 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6000 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6002 REAL_VALUE_TYPE c;
6004 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6005 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6006 return fold (build (MULT_EXPR, type, arg1,
6007 build_real (type, c)));
6010 /* Convert x+x*c into x*(c+1). */
6011 if (flag_unsafe_math_optimizations
6012 && TREE_CODE (arg1) == MULT_EXPR
6013 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6014 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6015 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6017 REAL_VALUE_TYPE c;
6019 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6020 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6021 return fold (build (MULT_EXPR, type, arg0,
6022 build_real (type, c)));
6025 /* Convert x*c1+x*c2 into x*(c1+c2). */
6026 if (flag_unsafe_math_optimizations
6027 && TREE_CODE (arg0) == MULT_EXPR
6028 && TREE_CODE (arg1) == MULT_EXPR
6029 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6030 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6031 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6032 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6033 && operand_equal_p (TREE_OPERAND (arg0, 0),
6034 TREE_OPERAND (arg1, 0), 0))
6036 REAL_VALUE_TYPE c1, c2;
6038 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6039 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6040 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6041 return fold (build (MULT_EXPR, type,
6042 TREE_OPERAND (arg0, 0),
6043 build_real (type, c1)));
6047 bit_rotate:
6048 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6049 is a rotate of A by C1 bits. */
6050 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6051 is a rotate of A by B bits. */
6053 enum tree_code code0, code1;
6054 code0 = TREE_CODE (arg0);
6055 code1 = TREE_CODE (arg1);
6056 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6057 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6058 && operand_equal_p (TREE_OPERAND (arg0, 0),
6059 TREE_OPERAND (arg1, 0), 0)
6060 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6062 tree tree01, tree11;
6063 enum tree_code code01, code11;
6065 tree01 = TREE_OPERAND (arg0, 1);
6066 tree11 = TREE_OPERAND (arg1, 1);
6067 STRIP_NOPS (tree01);
6068 STRIP_NOPS (tree11);
6069 code01 = TREE_CODE (tree01);
6070 code11 = TREE_CODE (tree11);
6071 if (code01 == INTEGER_CST
6072 && code11 == INTEGER_CST
6073 && TREE_INT_CST_HIGH (tree01) == 0
6074 && TREE_INT_CST_HIGH (tree11) == 0
6075 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6076 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6077 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6078 code0 == LSHIFT_EXPR ? tree01 : tree11);
6079 else if (code11 == MINUS_EXPR)
6081 tree tree110, tree111;
6082 tree110 = TREE_OPERAND (tree11, 0);
6083 tree111 = TREE_OPERAND (tree11, 1);
6084 STRIP_NOPS (tree110);
6085 STRIP_NOPS (tree111);
6086 if (TREE_CODE (tree110) == INTEGER_CST
6087 && 0 == compare_tree_int (tree110,
6088 TYPE_PRECISION
6089 (TREE_TYPE (TREE_OPERAND
6090 (arg0, 0))))
6091 && operand_equal_p (tree01, tree111, 0))
6092 return build ((code0 == LSHIFT_EXPR
6093 ? LROTATE_EXPR
6094 : RROTATE_EXPR),
6095 type, TREE_OPERAND (arg0, 0), tree01);
6097 else if (code01 == MINUS_EXPR)
6099 tree tree010, tree011;
6100 tree010 = TREE_OPERAND (tree01, 0);
6101 tree011 = TREE_OPERAND (tree01, 1);
6102 STRIP_NOPS (tree010);
6103 STRIP_NOPS (tree011);
6104 if (TREE_CODE (tree010) == INTEGER_CST
6105 && 0 == compare_tree_int (tree010,
6106 TYPE_PRECISION
6107 (TREE_TYPE (TREE_OPERAND
6108 (arg0, 0))))
6109 && operand_equal_p (tree11, tree011, 0))
6110 return build ((code0 != LSHIFT_EXPR
6111 ? LROTATE_EXPR
6112 : RROTATE_EXPR),
6113 type, TREE_OPERAND (arg0, 0), tree11);
6118 associate:
6119 /* In most languages, can't associate operations on floats through
6120 parentheses. Rather than remember where the parentheses were, we
6121 don't associate floats at all, unless the user has specified
6122 -funsafe-math-optimizations. */
6124 if (! wins
6125 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6127 tree var0, con0, lit0, minus_lit0;
6128 tree var1, con1, lit1, minus_lit1;
6130 /* Split both trees into variables, constants, and literals. Then
6131 associate each group together, the constants with literals,
6132 then the result with variables. This increases the chances of
6133 literals being recombined later and of generating relocatable
6134 expressions for the sum of a constant and literal. */
6135 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6136 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6137 code == MINUS_EXPR);
6139 /* Only do something if we found more than two objects. Otherwise,
6140 nothing has changed and we risk infinite recursion. */
6141 if (2 < ((var0 != 0) + (var1 != 0)
6142 + (con0 != 0) + (con1 != 0)
6143 + (lit0 != 0) + (lit1 != 0)
6144 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6146 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6147 if (code == MINUS_EXPR)
6148 code = PLUS_EXPR;
6150 var0 = associate_trees (var0, var1, code, type);
6151 con0 = associate_trees (con0, con1, code, type);
6152 lit0 = associate_trees (lit0, lit1, code, type);
6153 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6155 /* Preserve the MINUS_EXPR if the negative part of the literal is
6156 greater than the positive part. Otherwise, the multiplicative
6157 folding code (i.e extract_muldiv) may be fooled in case
6158 unsigned constants are subtracted, like in the following
6159 example: ((X*2 + 4) - 8U)/2. */
6160 if (minus_lit0 && lit0)
6162 if (TREE_CODE (lit0) == INTEGER_CST
6163 && TREE_CODE (minus_lit0) == INTEGER_CST
6164 && tree_int_cst_lt (lit0, minus_lit0))
6166 minus_lit0 = associate_trees (minus_lit0, lit0,
6167 MINUS_EXPR, type);
6168 lit0 = 0;
6170 else
6172 lit0 = associate_trees (lit0, minus_lit0,
6173 MINUS_EXPR, type);
6174 minus_lit0 = 0;
6177 if (minus_lit0)
6179 if (con0 == 0)
6180 return fold_convert (type,
6181 associate_trees (var0, minus_lit0,
6182 MINUS_EXPR, type));
6183 else
6185 con0 = associate_trees (con0, minus_lit0,
6186 MINUS_EXPR, type);
6187 return fold_convert (type,
6188 associate_trees (var0, con0,
6189 PLUS_EXPR, type));
6193 con0 = associate_trees (con0, lit0, code, type);
6194 return fold_convert (type, associate_trees (var0, con0,
6195 code, type));
6199 binary:
6200 if (wins)
6201 t1 = const_binop (code, arg0, arg1, 0);
6202 if (t1 != NULL_TREE)
6204 /* The return value should always have
6205 the same type as the original expression. */
6206 if (TREE_TYPE (t1) != type)
6207 t1 = fold_convert (type, t1);
6209 return t1;
6211 return t;
6213 case MINUS_EXPR:
6214 /* A - (-B) -> A + B */
6215 if (TREE_CODE (arg1) == NEGATE_EXPR)
6216 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6217 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6218 if (TREE_CODE (arg0) == NEGATE_EXPR
6219 && (FLOAT_TYPE_P (type)
6220 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6221 && negate_expr_p (arg1)
6222 && reorder_operands_p (arg0, arg1))
6223 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
6224 TREE_OPERAND (arg0, 0)));
6226 if (! FLOAT_TYPE_P (type))
6228 if (! wins && integer_zerop (arg0))
6229 return negate_expr (fold_convert (type, arg1));
6230 if (integer_zerop (arg1))
6231 return non_lvalue (fold_convert (type, arg0));
6233 /* Fold A - (A & B) into ~B & A. */
6234 if (!TREE_SIDE_EFFECTS (arg0)
6235 && TREE_CODE (arg1) == BIT_AND_EXPR)
6237 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6238 return fold (build (BIT_AND_EXPR, type,
6239 fold (build1 (BIT_NOT_EXPR, type,
6240 TREE_OPERAND (arg1, 0))),
6241 arg0));
6242 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6243 return fold (build (BIT_AND_EXPR, type,
6244 fold (build1 (BIT_NOT_EXPR, type,
6245 TREE_OPERAND (arg1, 1))),
6246 arg0));
6249 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6250 any power of 2 minus 1. */
6251 if (TREE_CODE (arg0) == BIT_AND_EXPR
6252 && TREE_CODE (arg1) == BIT_AND_EXPR
6253 && operand_equal_p (TREE_OPERAND (arg0, 0),
6254 TREE_OPERAND (arg1, 0), 0))
6256 tree mask0 = TREE_OPERAND (arg0, 1);
6257 tree mask1 = TREE_OPERAND (arg1, 1);
6258 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6260 if (operand_equal_p (tem, mask1, 0))
6262 tem = fold (build (BIT_XOR_EXPR, type,
6263 TREE_OPERAND (arg0, 0), mask1));
6264 return fold (build (MINUS_EXPR, type, tem, mask1));
6269 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6270 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6271 return non_lvalue (fold_convert (type, arg0));
6273 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6274 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6275 (-ARG1 + ARG0) reduces to -ARG1. */
6276 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6277 return negate_expr (fold_convert (type, arg1));
6279 /* Fold &x - &x. This can happen from &x.foo - &x.
6280 This is unsafe for certain floats even in non-IEEE formats.
6281 In IEEE, it is unsafe because it does wrong for NaNs.
6282 Also note that operand_equal_p is always false if an operand
6283 is volatile. */
6285 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6286 && operand_equal_p (arg0, arg1, 0))
6287 return fold_convert (type, integer_zero_node);
6289 /* A - B -> A + (-B) if B is easily negatable. */
6290 if (!wins && negate_expr_p (arg1)
6291 && (FLOAT_TYPE_P (type)
6292 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6293 return fold (build (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6295 if (TREE_CODE (arg0) == MULT_EXPR
6296 && TREE_CODE (arg1) == MULT_EXPR
6297 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6299 /* (A * C) - (B * C) -> (A-B) * C. */
6300 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6301 TREE_OPERAND (arg1, 1), 0))
6302 return fold (build (MULT_EXPR, type,
6303 fold (build (MINUS_EXPR, type,
6304 TREE_OPERAND (arg0, 0),
6305 TREE_OPERAND (arg1, 0))),
6306 TREE_OPERAND (arg0, 1)));
6307 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6308 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6309 TREE_OPERAND (arg1, 0), 0))
6310 return fold (build (MULT_EXPR, type,
6311 TREE_OPERAND (arg0, 0),
6312 fold (build (MINUS_EXPR, type,
6313 TREE_OPERAND (arg0, 1),
6314 TREE_OPERAND (arg1, 1)))));
6317 goto associate;
6319 case MULT_EXPR:
6320 /* (-A) * (-B) -> A * B */
6321 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6322 return fold (build (MULT_EXPR, type,
6323 TREE_OPERAND (arg0, 0),
6324 negate_expr (arg1)));
6325 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6326 return fold (build (MULT_EXPR, type,
6327 negate_expr (arg0),
6328 TREE_OPERAND (arg1, 0)));
6330 if (! FLOAT_TYPE_P (type))
6332 if (integer_zerop (arg1))
6333 return omit_one_operand (type, arg1, arg0);
6334 if (integer_onep (arg1))
6335 return non_lvalue (fold_convert (type, arg0));
6337 /* (a * (1 << b)) is (a << b) */
6338 if (TREE_CODE (arg1) == LSHIFT_EXPR
6339 && integer_onep (TREE_OPERAND (arg1, 0)))
6340 return fold (build (LSHIFT_EXPR, type, arg0,
6341 TREE_OPERAND (arg1, 1)));
6342 if (TREE_CODE (arg0) == LSHIFT_EXPR
6343 && integer_onep (TREE_OPERAND (arg0, 0)))
6344 return fold (build (LSHIFT_EXPR, type, arg1,
6345 TREE_OPERAND (arg0, 1)));
6347 if (TREE_CODE (arg1) == INTEGER_CST
6348 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6349 fold_convert (type, arg1),
6350 code, NULL_TREE)))
6351 return fold_convert (type, tem);
6354 else
6356 /* Maybe fold x * 0 to 0. The expressions aren't the same
6357 when x is NaN, since x * 0 is also NaN. Nor are they the
6358 same in modes with signed zeros, since multiplying a
6359 negative value by 0 gives -0, not +0. */
6360 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6361 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6362 && real_zerop (arg1))
6363 return omit_one_operand (type, arg1, arg0);
6364 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6365 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6366 && real_onep (arg1))
6367 return non_lvalue (fold_convert (type, arg0));
6369 /* Transform x * -1.0 into -x. */
6370 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6371 && real_minus_onep (arg1))
6372 return fold (build1 (NEGATE_EXPR, type, arg0));
6374 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6375 if (flag_unsafe_math_optimizations
6376 && TREE_CODE (arg0) == RDIV_EXPR
6377 && TREE_CODE (arg1) == REAL_CST
6378 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6380 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6381 arg1, 0);
6382 if (tem)
6383 return fold (build (RDIV_EXPR, type, tem,
6384 TREE_OPERAND (arg0, 1)));
6387 if (flag_unsafe_math_optimizations)
6389 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6390 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6392 /* Optimizations of sqrt(...)*sqrt(...). */
6393 if (fcode0 == fcode1 && BUILTIN_SQRT_P (fcode0))
6395 tree sqrtfn, arg, arglist;
6396 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6397 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6399 /* Optimize sqrt(x)*sqrt(x) as x. */
6400 if (operand_equal_p (arg00, arg10, 0)
6401 && ! HONOR_SNANS (TYPE_MODE (type)))
6402 return arg00;
6404 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6405 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6406 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6407 arglist = build_tree_list (NULL_TREE, arg);
6408 return build_function_call_expr (sqrtfn, arglist);
6411 /* Optimize expN(x)*expN(y) as expN(x+y). */
6412 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
6414 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6415 tree arg = build (PLUS_EXPR, type,
6416 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6417 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6418 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6419 return build_function_call_expr (expfn, arglist);
6422 /* Optimizations of pow(...)*pow(...). */
6423 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6424 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6425 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6427 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6428 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6429 1)));
6430 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6431 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6432 1)));
6434 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6435 if (operand_equal_p (arg01, arg11, 0))
6437 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6438 tree arg = build (MULT_EXPR, type, arg00, arg10);
6439 tree arglist = tree_cons (NULL_TREE, fold (arg),
6440 build_tree_list (NULL_TREE,
6441 arg01));
6442 return build_function_call_expr (powfn, arglist);
6445 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6446 if (operand_equal_p (arg00, arg10, 0))
6448 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6449 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6450 tree arglist = tree_cons (NULL_TREE, arg00,
6451 build_tree_list (NULL_TREE,
6452 arg));
6453 return build_function_call_expr (powfn, arglist);
6457 /* Optimize tan(x)*cos(x) as sin(x). */
6458 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6459 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6460 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6461 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6462 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6463 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6464 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6465 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6467 tree sinfn;
6469 switch (fcode0)
6471 case BUILT_IN_TAN:
6472 case BUILT_IN_COS:
6473 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6474 break;
6475 case BUILT_IN_TANF:
6476 case BUILT_IN_COSF:
6477 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6478 break;
6479 case BUILT_IN_TANL:
6480 case BUILT_IN_COSL:
6481 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6482 break;
6483 default:
6484 sinfn = NULL_TREE;
6487 if (sinfn != NULL_TREE)
6488 return build_function_call_expr (sinfn,
6489 TREE_OPERAND (arg0, 1));
6492 /* Optimize x*pow(x,c) as pow(x,c+1). */
6493 if (fcode1 == BUILT_IN_POW
6494 || fcode1 == BUILT_IN_POWF
6495 || fcode1 == BUILT_IN_POWL)
6497 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6498 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6499 1)));
6500 if (TREE_CODE (arg11) == REAL_CST
6501 && ! TREE_CONSTANT_OVERFLOW (arg11)
6502 && operand_equal_p (arg0, arg10, 0))
6504 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6505 REAL_VALUE_TYPE c;
6506 tree arg, arglist;
6508 c = TREE_REAL_CST (arg11);
6509 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6510 arg = build_real (type, c);
6511 arglist = build_tree_list (NULL_TREE, arg);
6512 arglist = tree_cons (NULL_TREE, arg0, arglist);
6513 return build_function_call_expr (powfn, arglist);
6517 /* Optimize pow(x,c)*x as pow(x,c+1). */
6518 if (fcode0 == BUILT_IN_POW
6519 || fcode0 == BUILT_IN_POWF
6520 || fcode0 == BUILT_IN_POWL)
6522 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6523 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6524 1)));
6525 if (TREE_CODE (arg01) == REAL_CST
6526 && ! TREE_CONSTANT_OVERFLOW (arg01)
6527 && operand_equal_p (arg1, arg00, 0))
6529 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6530 REAL_VALUE_TYPE c;
6531 tree arg, arglist;
6533 c = TREE_REAL_CST (arg01);
6534 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6535 arg = build_real (type, c);
6536 arglist = build_tree_list (NULL_TREE, arg);
6537 arglist = tree_cons (NULL_TREE, arg1, arglist);
6538 return build_function_call_expr (powfn, arglist);
6542 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6543 if (! optimize_size
6544 && operand_equal_p (arg0, arg1, 0))
6546 tree powfn;
6548 if (type == double_type_node)
6549 powfn = implicit_built_in_decls[BUILT_IN_POW];
6550 else if (type == float_type_node)
6551 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6552 else if (type == long_double_type_node)
6553 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6554 else
6555 powfn = NULL_TREE;
6557 if (powfn)
6559 tree arg = build_real (type, dconst2);
6560 tree arglist = build_tree_list (NULL_TREE, arg);
6561 arglist = tree_cons (NULL_TREE, arg0, arglist);
6562 return build_function_call_expr (powfn, arglist);
6567 goto associate;
6569 case BIT_IOR_EXPR:
6570 bit_ior:
6571 if (integer_all_onesp (arg1))
6572 return omit_one_operand (type, arg1, arg0);
6573 if (integer_zerop (arg1))
6574 return non_lvalue (fold_convert (type, arg0));
6575 if (operand_equal_p (arg0, arg1, 0))
6576 return non_lvalue (fold_convert (type, arg0));
6577 t1 = distribute_bit_expr (code, type, arg0, arg1);
6578 if (t1 != NULL_TREE)
6579 return t1;
6581 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6583 This results in more efficient code for machines without a NAND
6584 instruction. Combine will canonicalize to the first form
6585 which will allow use of NAND instructions provided by the
6586 backend if they exist. */
6587 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6588 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6590 return fold (build1 (BIT_NOT_EXPR, type,
6591 build (BIT_AND_EXPR, type,
6592 TREE_OPERAND (arg0, 0),
6593 TREE_OPERAND (arg1, 0))));
6596 /* See if this can be simplified into a rotate first. If that
6597 is unsuccessful continue in the association code. */
6598 goto bit_rotate;
6600 case BIT_XOR_EXPR:
6601 if (integer_zerop (arg1))
6602 return non_lvalue (fold_convert (type, arg0));
6603 if (integer_all_onesp (arg1))
6604 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6605 if (operand_equal_p (arg0, arg1, 0))
6606 return omit_one_operand (type, integer_zero_node, arg0);
6608 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6609 with a constant, and the two constants have no bits in common,
6610 we should treat this as a BIT_IOR_EXPR since this may produce more
6611 simplifications. */
6612 if (TREE_CODE (arg0) == BIT_AND_EXPR
6613 && TREE_CODE (arg1) == BIT_AND_EXPR
6614 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6615 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6616 && integer_zerop (const_binop (BIT_AND_EXPR,
6617 TREE_OPERAND (arg0, 1),
6618 TREE_OPERAND (arg1, 1), 0)))
6620 code = BIT_IOR_EXPR;
6621 goto bit_ior;
6624 /* See if this can be simplified into a rotate first. If that
6625 is unsuccessful continue in the association code. */
6626 goto bit_rotate;
6628 case BIT_AND_EXPR:
6629 if (integer_all_onesp (arg1))
6630 return non_lvalue (fold_convert (type, arg0));
6631 if (integer_zerop (arg1))
6632 return omit_one_operand (type, arg1, arg0);
6633 if (operand_equal_p (arg0, arg1, 0))
6634 return non_lvalue (fold_convert (type, arg0));
6635 t1 = distribute_bit_expr (code, type, arg0, arg1);
6636 if (t1 != NULL_TREE)
6637 return t1;
6638 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6639 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6640 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6642 unsigned int prec
6643 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6645 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6646 && (~TREE_INT_CST_LOW (arg1)
6647 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6648 return fold_convert (type, TREE_OPERAND (arg0, 0));
6651 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6653 This results in more efficient code for machines without a NOR
6654 instruction. Combine will canonicalize to the first form
6655 which will allow use of NOR instructions provided by the
6656 backend if they exist. */
6657 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6658 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6660 return fold (build1 (BIT_NOT_EXPR, type,
6661 build (BIT_IOR_EXPR, type,
6662 TREE_OPERAND (arg0, 0),
6663 TREE_OPERAND (arg1, 0))));
6666 goto associate;
6668 case RDIV_EXPR:
6669 /* Don't touch a floating-point divide by zero unless the mode
6670 of the constant can represent infinity. */
6671 if (TREE_CODE (arg1) == REAL_CST
6672 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6673 && real_zerop (arg1))
6674 return t;
6676 /* (-A) / (-B) -> A / B */
6677 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6678 return fold (build (RDIV_EXPR, type,
6679 TREE_OPERAND (arg0, 0),
6680 negate_expr (arg1)));
6681 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6682 return fold (build (RDIV_EXPR, type,
6683 negate_expr (arg0),
6684 TREE_OPERAND (arg1, 0)));
6686 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6687 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6688 && real_onep (arg1))
6689 return non_lvalue (fold_convert (type, arg0));
6691 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6692 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6693 && real_minus_onep (arg1))
6694 return non_lvalue (fold_convert (type, negate_expr (arg0)));
6696 /* If ARG1 is a constant, we can convert this to a multiply by the
6697 reciprocal. This does not have the same rounding properties,
6698 so only do this if -funsafe-math-optimizations. We can actually
6699 always safely do it if ARG1 is a power of two, but it's hard to
6700 tell if it is or not in a portable manner. */
6701 if (TREE_CODE (arg1) == REAL_CST)
6703 if (flag_unsafe_math_optimizations
6704 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6705 arg1, 0)))
6706 return fold (build (MULT_EXPR, type, arg0, tem));
6707 /* Find the reciprocal if optimizing and the result is exact. */
6708 if (optimize)
6710 REAL_VALUE_TYPE r;
6711 r = TREE_REAL_CST (arg1);
6712 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6714 tem = build_real (type, r);
6715 return fold (build (MULT_EXPR, type, arg0, tem));
6719 /* Convert A/B/C to A/(B*C). */
6720 if (flag_unsafe_math_optimizations
6721 && TREE_CODE (arg0) == RDIV_EXPR)
6722 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6723 fold (build (MULT_EXPR, type,
6724 TREE_OPERAND (arg0, 1), arg1))));
6726 /* Convert A/(B/C) to (A/B)*C. */
6727 if (flag_unsafe_math_optimizations
6728 && TREE_CODE (arg1) == RDIV_EXPR)
6729 return fold (build (MULT_EXPR, type,
6730 fold (build (RDIV_EXPR, type, arg0,
6731 TREE_OPERAND (arg1, 0))),
6732 TREE_OPERAND (arg1, 1)));
6734 /* Convert C1/(X*C2) into (C1/C2)/X. */
6735 if (flag_unsafe_math_optimizations
6736 && TREE_CODE (arg1) == MULT_EXPR
6737 && TREE_CODE (arg0) == REAL_CST
6738 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6740 tree tem = const_binop (RDIV_EXPR, arg0,
6741 TREE_OPERAND (arg1, 1), 0);
6742 if (tem)
6743 return fold (build (RDIV_EXPR, type, tem,
6744 TREE_OPERAND (arg1, 0)));
6747 if (flag_unsafe_math_optimizations)
6749 enum built_in_function fcode = builtin_mathfn_code (arg1);
6750 /* Optimize x/expN(y) into x*expN(-y). */
6751 if (BUILTIN_EXPONENT_P (fcode))
6753 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6754 tree arg = build1 (NEGATE_EXPR, type,
6755 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6756 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6757 arg1 = build_function_call_expr (expfn, arglist);
6758 return fold (build (MULT_EXPR, type, arg0, arg1));
6761 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6762 if (fcode == BUILT_IN_POW
6763 || fcode == BUILT_IN_POWF
6764 || fcode == BUILT_IN_POWL)
6766 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6767 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6768 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6769 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6770 tree arglist = tree_cons(NULL_TREE, arg10,
6771 build_tree_list (NULL_TREE, neg11));
6772 arg1 = build_function_call_expr (powfn, arglist);
6773 return fold (build (MULT_EXPR, type, arg0, arg1));
6777 if (flag_unsafe_math_optimizations)
6779 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6780 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6782 /* Optimize sin(x)/cos(x) as tan(x). */
6783 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6784 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6785 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6786 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6787 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6789 tree tanfn;
6791 if (fcode0 == BUILT_IN_SIN)
6792 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6793 else if (fcode0 == BUILT_IN_SINF)
6794 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6795 else if (fcode0 == BUILT_IN_SINL)
6796 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6797 else
6798 tanfn = NULL_TREE;
6800 if (tanfn != NULL_TREE)
6801 return build_function_call_expr (tanfn,
6802 TREE_OPERAND (arg0, 1));
6805 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6806 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6807 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6808 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6809 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6810 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6812 tree tanfn;
6814 if (fcode0 == BUILT_IN_COS)
6815 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6816 else if (fcode0 == BUILT_IN_COSF)
6817 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6818 else if (fcode0 == BUILT_IN_COSL)
6819 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6820 else
6821 tanfn = NULL_TREE;
6823 if (tanfn != NULL_TREE)
6825 tree tmp = TREE_OPERAND (arg0, 1);
6826 tmp = build_function_call_expr (tanfn, tmp);
6827 return fold (build (RDIV_EXPR, type,
6828 build_real (type, dconst1),
6829 tmp));
6833 /* Optimize pow(x,c)/x as pow(x,c-1). */
6834 if (fcode0 == BUILT_IN_POW
6835 || fcode0 == BUILT_IN_POWF
6836 || fcode0 == BUILT_IN_POWL)
6838 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6839 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6840 if (TREE_CODE (arg01) == REAL_CST
6841 && ! TREE_CONSTANT_OVERFLOW (arg01)
6842 && operand_equal_p (arg1, arg00, 0))
6844 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6845 REAL_VALUE_TYPE c;
6846 tree arg, arglist;
6848 c = TREE_REAL_CST (arg01);
6849 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6850 arg = build_real (type, c);
6851 arglist = build_tree_list (NULL_TREE, arg);
6852 arglist = tree_cons (NULL_TREE, arg1, arglist);
6853 return build_function_call_expr (powfn, arglist);
6857 goto binary;
6859 case TRUNC_DIV_EXPR:
6860 case ROUND_DIV_EXPR:
6861 case FLOOR_DIV_EXPR:
6862 case CEIL_DIV_EXPR:
6863 case EXACT_DIV_EXPR:
6864 if (integer_onep (arg1))
6865 return non_lvalue (fold_convert (type, arg0));
6866 if (integer_zerop (arg1))
6867 return t;
6869 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6870 operation, EXACT_DIV_EXPR.
6872 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6873 At one time others generated faster code, it's not clear if they do
6874 after the last round to changes to the DIV code in expmed.c. */
6875 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6876 && multiple_of_p (type, arg0, arg1))
6877 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6879 if (TREE_CODE (arg1) == INTEGER_CST
6880 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6881 code, NULL_TREE)))
6882 return fold_convert (type, tem);
6884 goto binary;
6886 case CEIL_MOD_EXPR:
6887 case FLOOR_MOD_EXPR:
6888 case ROUND_MOD_EXPR:
6889 case TRUNC_MOD_EXPR:
6890 if (integer_onep (arg1))
6891 return omit_one_operand (type, integer_zero_node, arg0);
6892 if (integer_zerop (arg1))
6893 return t;
6895 if (TREE_CODE (arg1) == INTEGER_CST
6896 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6897 code, NULL_TREE)))
6898 return fold_convert (type, tem);
6900 goto binary;
6902 case LROTATE_EXPR:
6903 case RROTATE_EXPR:
6904 if (integer_all_onesp (arg0))
6905 return omit_one_operand (type, arg0, arg1);
6906 goto shift;
6908 case RSHIFT_EXPR:
6909 /* Optimize -1 >> x for arithmetic right shifts. */
6910 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6911 return omit_one_operand (type, arg0, arg1);
6912 /* ... fall through ... */
6914 case LSHIFT_EXPR:
6915 shift:
6916 if (integer_zerop (arg1))
6917 return non_lvalue (fold_convert (type, arg0));
6918 if (integer_zerop (arg0))
6919 return omit_one_operand (type, arg0, arg1);
6921 /* Since negative shift count is not well-defined,
6922 don't try to compute it in the compiler. */
6923 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6924 return t;
6925 /* Rewrite an LROTATE_EXPR by a constant into an
6926 RROTATE_EXPR by a new constant. */
6927 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6929 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
6930 tem = fold_convert (TREE_TYPE (arg1), tem);
6931 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
6932 return fold (build (RROTATE_EXPR, type, arg0, tem));
6935 /* If we have a rotate of a bit operation with the rotate count and
6936 the second operand of the bit operation both constant,
6937 permute the two operations. */
6938 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6939 && (TREE_CODE (arg0) == BIT_AND_EXPR
6940 || TREE_CODE (arg0) == BIT_IOR_EXPR
6941 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6942 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6943 return fold (build (TREE_CODE (arg0), type,
6944 fold (build (code, type,
6945 TREE_OPERAND (arg0, 0), arg1)),
6946 fold (build (code, type,
6947 TREE_OPERAND (arg0, 1), arg1))));
6949 /* Two consecutive rotates adding up to the width of the mode can
6950 be ignored. */
6951 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6952 && TREE_CODE (arg0) == RROTATE_EXPR
6953 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6954 && TREE_INT_CST_HIGH (arg1) == 0
6955 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6956 && ((TREE_INT_CST_LOW (arg1)
6957 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6958 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6959 return TREE_OPERAND (arg0, 0);
6961 goto binary;
6963 case MIN_EXPR:
6964 if (operand_equal_p (arg0, arg1, 0))
6965 return omit_one_operand (type, arg0, arg1);
6966 if (INTEGRAL_TYPE_P (type)
6967 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6968 return omit_one_operand (type, arg1, arg0);
6969 goto associate;
6971 case MAX_EXPR:
6972 if (operand_equal_p (arg0, arg1, 0))
6973 return omit_one_operand (type, arg0, arg1);
6974 if (INTEGRAL_TYPE_P (type)
6975 && TYPE_MAX_VALUE (type)
6976 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6977 return omit_one_operand (type, arg1, arg0);
6978 goto associate;
6980 case TRUTH_NOT_EXPR:
6981 /* Note that the operand of this must be an int
6982 and its values must be 0 or 1.
6983 ("true" is a fixed value perhaps depending on the language,
6984 but we don't handle values other than 1 correctly yet.) */
6985 tem = invert_truthvalue (arg0);
6986 /* Avoid infinite recursion. */
6987 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6989 tem = fold_single_bit_test (code, arg0, arg1, type);
6990 if (tem)
6991 return tem;
6992 return t;
6994 return fold_convert (type, tem);
6996 case TRUTH_ANDIF_EXPR:
6997 /* Note that the operands of this must be ints
6998 and their values must be 0 or 1.
6999 ("true" is a fixed value perhaps depending on the language.) */
7000 /* If first arg is constant zero, return it. */
7001 if (integer_zerop (arg0))
7002 return fold_convert (type, arg0);
7003 case TRUTH_AND_EXPR:
7004 /* If either arg is constant true, drop it. */
7005 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7006 return non_lvalue (fold_convert (type, arg1));
7007 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7008 /* Preserve sequence points. */
7009 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7010 return non_lvalue (fold_convert (type, arg0));
7011 /* If second arg is constant zero, result is zero, but first arg
7012 must be evaluated. */
7013 if (integer_zerop (arg1))
7014 return omit_one_operand (type, arg1, arg0);
7015 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7016 case will be handled here. */
7017 if (integer_zerop (arg0))
7018 return omit_one_operand (type, arg0, arg1);
7020 truth_andor:
7021 /* We only do these simplifications if we are optimizing. */
7022 if (!optimize)
7023 return t;
7025 /* Check for things like (A || B) && (A || C). We can convert this
7026 to A || (B && C). Note that either operator can be any of the four
7027 truth and/or operations and the transformation will still be
7028 valid. Also note that we only care about order for the
7029 ANDIF and ORIF operators. If B contains side effects, this
7030 might change the truth-value of A. */
7031 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7032 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7033 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7034 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7035 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7036 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7038 tree a00 = TREE_OPERAND (arg0, 0);
7039 tree a01 = TREE_OPERAND (arg0, 1);
7040 tree a10 = TREE_OPERAND (arg1, 0);
7041 tree a11 = TREE_OPERAND (arg1, 1);
7042 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7043 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7044 && (code == TRUTH_AND_EXPR
7045 || code == TRUTH_OR_EXPR));
7047 if (operand_equal_p (a00, a10, 0))
7048 return fold (build (TREE_CODE (arg0), type, a00,
7049 fold (build (code, type, a01, a11))));
7050 else if (commutative && operand_equal_p (a00, a11, 0))
7051 return fold (build (TREE_CODE (arg0), type, a00,
7052 fold (build (code, type, a01, a10))));
7053 else if (commutative && operand_equal_p (a01, a10, 0))
7054 return fold (build (TREE_CODE (arg0), type, a01,
7055 fold (build (code, type, a00, a11))));
7057 /* This case if tricky because we must either have commutative
7058 operators or else A10 must not have side-effects. */
7060 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7061 && operand_equal_p (a01, a11, 0))
7062 return fold (build (TREE_CODE (arg0), type,
7063 fold (build (code, type, a00, a10)),
7064 a01));
7067 /* See if we can build a range comparison. */
7068 if (0 != (tem = fold_range_test (t)))
7069 return tem;
7071 /* Check for the possibility of merging component references. If our
7072 lhs is another similar operation, try to merge its rhs with our
7073 rhs. Then try to merge our lhs and rhs. */
7074 if (TREE_CODE (arg0) == code
7075 && 0 != (tem = fold_truthop (code, type,
7076 TREE_OPERAND (arg0, 1), arg1)))
7077 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7079 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7080 return tem;
7082 return t;
7084 case TRUTH_ORIF_EXPR:
7085 /* Note that the operands of this must be ints
7086 and their values must be 0 or true.
7087 ("true" is a fixed value perhaps depending on the language.) */
7088 /* If first arg is constant true, return it. */
7089 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7090 return fold_convert (type, arg0);
7091 case TRUTH_OR_EXPR:
7092 /* If either arg is constant zero, drop it. */
7093 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7094 return non_lvalue (fold_convert (type, arg1));
7095 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7096 /* Preserve sequence points. */
7097 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7098 return non_lvalue (fold_convert (type, arg0));
7099 /* If second arg is constant true, result is true, but we must
7100 evaluate first arg. */
7101 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7102 return omit_one_operand (type, arg1, arg0);
7103 /* Likewise for first arg, but note this only occurs here for
7104 TRUTH_OR_EXPR. */
7105 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7106 return omit_one_operand (type, arg0, arg1);
7107 goto truth_andor;
7109 case TRUTH_XOR_EXPR:
7110 /* If either arg is constant zero, drop it. */
7111 if (integer_zerop (arg0))
7112 return non_lvalue (fold_convert (type, arg1));
7113 if (integer_zerop (arg1))
7114 return non_lvalue (fold_convert (type, arg0));
7115 /* If either arg is constant true, this is a logical inversion. */
7116 if (integer_onep (arg0))
7117 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7118 if (integer_onep (arg1))
7119 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7120 return t;
7122 case EQ_EXPR:
7123 case NE_EXPR:
7124 case LT_EXPR:
7125 case GT_EXPR:
7126 case LE_EXPR:
7127 case GE_EXPR:
7128 /* If one arg is a real or integer constant, put it last. */
7129 if (tree_swap_operands_p (arg0, arg1, true))
7130 return fold (build (swap_tree_comparison (code), type, arg1, arg0));
7132 /* If this is an equality comparison of the address of a non-weak
7133 object against zero, then we know the result. */
7134 if ((code == EQ_EXPR || code == NE_EXPR)
7135 && TREE_CODE (arg0) == ADDR_EXPR
7136 && DECL_P (TREE_OPERAND (arg0, 0))
7137 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7138 && integer_zerop (arg1))
7140 if (code == EQ_EXPR)
7141 return integer_zero_node;
7142 else
7143 return integer_one_node;
7146 /* If this is an equality comparison of the address of two non-weak,
7147 unaliased symbols neither of which are extern (since we do not
7148 have access to attributes for externs), then we know the result. */
7149 if ((code == EQ_EXPR || code == NE_EXPR)
7150 && TREE_CODE (arg0) == ADDR_EXPR
7151 && DECL_P (TREE_OPERAND (arg0, 0))
7152 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7153 && ! lookup_attribute ("alias",
7154 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7155 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7156 && TREE_CODE (arg1) == ADDR_EXPR
7157 && DECL_P (TREE_OPERAND (arg1, 0))
7158 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7159 && ! lookup_attribute ("alias",
7160 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7161 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7163 if (code == EQ_EXPR)
7164 return (operand_equal_p (arg0, arg1, 0)
7165 ? integer_one_node : integer_zero_node);
7166 else
7167 return (operand_equal_p (arg0, arg1, 0)
7168 ? integer_zero_node : integer_one_node);
7171 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7173 tree targ0 = strip_float_extensions (arg0);
7174 tree targ1 = strip_float_extensions (arg1);
7175 tree newtype = TREE_TYPE (targ0);
7177 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7178 newtype = TREE_TYPE (targ1);
7180 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7181 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7182 return fold (build (code, type, fold_convert (newtype, targ0),
7183 fold_convert (newtype, targ1)));
7185 /* (-a) CMP (-b) -> b CMP a */
7186 if (TREE_CODE (arg0) == NEGATE_EXPR
7187 && TREE_CODE (arg1) == NEGATE_EXPR)
7188 return fold (build (code, type, TREE_OPERAND (arg1, 0),
7189 TREE_OPERAND (arg0, 0)));
7191 if (TREE_CODE (arg1) == REAL_CST)
7193 REAL_VALUE_TYPE cst;
7194 cst = TREE_REAL_CST (arg1);
7196 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7197 if (TREE_CODE (arg0) == NEGATE_EXPR)
7198 return
7199 fold (build (swap_tree_comparison (code), type,
7200 TREE_OPERAND (arg0, 0),
7201 build_real (TREE_TYPE (arg1),
7202 REAL_VALUE_NEGATE (cst))));
7204 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7205 /* a CMP (-0) -> a CMP 0 */
7206 if (REAL_VALUE_MINUS_ZERO (cst))
7207 return fold (build (code, type, arg0,
7208 build_real (TREE_TYPE (arg1), dconst0)));
7210 /* x != NaN is always true, other ops are always false. */
7211 if (REAL_VALUE_ISNAN (cst)
7212 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7214 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7215 return omit_one_operand (type, fold_convert (type, tem), arg0);
7218 /* Fold comparisons against infinity. */
7219 if (REAL_VALUE_ISINF (cst))
7221 tem = fold_inf_compare (code, type, arg0, arg1);
7222 if (tem != NULL_TREE)
7223 return tem;
7227 /* If this is a comparison of a real constant with a PLUS_EXPR
7228 or a MINUS_EXPR of a real constant, we can convert it into a
7229 comparison with a revised real constant as long as no overflow
7230 occurs when unsafe_math_optimizations are enabled. */
7231 if (flag_unsafe_math_optimizations
7232 && TREE_CODE (arg1) == REAL_CST
7233 && (TREE_CODE (arg0) == PLUS_EXPR
7234 || TREE_CODE (arg0) == MINUS_EXPR)
7235 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7236 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7237 ? MINUS_EXPR : PLUS_EXPR,
7238 arg1, TREE_OPERAND (arg0, 1), 0))
7239 && ! TREE_CONSTANT_OVERFLOW (tem))
7240 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7242 /* Likewise, we can simplify a comparison of a real constant with
7243 a MINUS_EXPR whose first operand is also a real constant, i.e.
7244 (c1 - x) < c2 becomes x > c1-c2. */
7245 if (flag_unsafe_math_optimizations
7246 && TREE_CODE (arg1) == REAL_CST
7247 && TREE_CODE (arg0) == MINUS_EXPR
7248 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7249 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7250 arg1, 0))
7251 && ! TREE_CONSTANT_OVERFLOW (tem))
7252 return fold (build (swap_tree_comparison (code), type,
7253 TREE_OPERAND (arg0, 1), tem));
7255 /* Fold comparisons against built-in math functions. */
7256 if (TREE_CODE (arg1) == REAL_CST
7257 && flag_unsafe_math_optimizations
7258 && ! flag_errno_math)
7260 enum built_in_function fcode = builtin_mathfn_code (arg0);
7262 if (fcode != END_BUILTINS)
7264 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7265 if (tem != NULL_TREE)
7266 return tem;
7271 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7272 if (TREE_CONSTANT (arg1)
7273 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7274 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7275 /* This optimization is invalid for ordered comparisons
7276 if CONST+INCR overflows or if foo+incr might overflow.
7277 This optimization is invalid for floating point due to rounding.
7278 For pointer types we assume overflow doesn't happen. */
7279 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7280 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7281 && (code == EQ_EXPR || code == NE_EXPR))))
7283 tree varop, newconst;
7285 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7287 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
7288 arg1, TREE_OPERAND (arg0, 1)));
7289 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7290 TREE_OPERAND (arg0, 0),
7291 TREE_OPERAND (arg0, 1));
7293 else
7295 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
7296 arg1, TREE_OPERAND (arg0, 1)));
7297 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7298 TREE_OPERAND (arg0, 0),
7299 TREE_OPERAND (arg0, 1));
7303 /* If VAROP is a reference to a bitfield, we must mask
7304 the constant by the width of the field. */
7305 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7306 && DECL_BIT_FIELD(TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
7308 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7309 int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
7310 tree folded_compare;
7311 tree mask = 0;
7313 /* First check whether the comparison would come out
7314 always the same. If we don't do that we would
7315 change the meaning with the masking. */
7316 folded_compare = fold (build2 (code, type,
7317 TREE_OPERAND (varop, 0),
7318 arg1));
7319 if (integer_zerop (folded_compare)
7320 || integer_onep (folded_compare))
7321 return omit_one_operand (type, folded_compare, varop);
7323 if (size < HOST_BITS_PER_WIDE_INT)
7325 unsigned HOST_WIDE_INT lo = ((unsigned HOST_WIDE_INT) 1
7326 << size) - 1;
7327 mask = build_int_2 (lo, 0);
7329 else if (size < 2 * HOST_BITS_PER_WIDE_INT)
7331 HOST_WIDE_INT hi = ((HOST_WIDE_INT) 1
7332 << (size - HOST_BITS_PER_WIDE_INT)) - 1;
7333 mask = build_int_2 (~0, hi);
7336 if (mask)
7338 mask = fold_convert (TREE_TYPE (varop), mask);
7339 newconst = fold (build2 (BIT_AND_EXPR, TREE_TYPE (varop),
7340 newconst, mask));
7344 return fold (build2 (code, type, varop, newconst));
7347 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7348 This transformation affects the cases which are handled in later
7349 optimizations involving comparisons with non-negative constants. */
7350 if (TREE_CODE (arg1) == INTEGER_CST
7351 && TREE_CODE (arg0) != INTEGER_CST
7352 && tree_int_cst_sgn (arg1) > 0)
7354 switch (code)
7356 case GE_EXPR:
7357 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7358 return fold (build (GT_EXPR, type, arg0, arg1));
7360 case LT_EXPR:
7361 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7362 return fold (build (LE_EXPR, type, arg0, arg1));
7364 default:
7365 break;
7369 /* Comparisons with the highest or lowest possible integer of
7370 the specified size will have known values. */
7372 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7374 if (TREE_CODE (arg1) == INTEGER_CST
7375 && ! TREE_CONSTANT_OVERFLOW (arg1)
7376 && width <= HOST_BITS_PER_WIDE_INT
7377 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7378 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7380 unsigned HOST_WIDE_INT signed_max;
7381 unsigned HOST_WIDE_INT max, min;
7383 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7385 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7387 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7388 min = 0;
7390 else
7392 max = signed_max;
7393 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7396 if (TREE_INT_CST_HIGH (arg1) == 0
7397 && TREE_INT_CST_LOW (arg1) == max)
7398 switch (code)
7400 case GT_EXPR:
7401 return omit_one_operand (type,
7402 fold_convert (type,
7403 integer_zero_node),
7404 arg0);
7405 case GE_EXPR:
7406 return fold (build (EQ_EXPR, type, arg0, arg1));
7408 case LE_EXPR:
7409 return omit_one_operand (type,
7410 fold_convert (type,
7411 integer_one_node),
7412 arg0);
7413 case LT_EXPR:
7414 return fold (build (NE_EXPR, type, arg0, arg1));
7416 /* The GE_EXPR and LT_EXPR cases above are not normally
7417 reached because of previous transformations. */
7419 default:
7420 break;
7422 else if (TREE_INT_CST_HIGH (arg1) == 0
7423 && TREE_INT_CST_LOW (arg1) == max - 1)
7424 switch (code)
7426 case GT_EXPR:
7427 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7428 return fold (build (EQ_EXPR, type, arg0, arg1));
7429 case LE_EXPR:
7430 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7431 return fold (build (NE_EXPR, type, arg0, arg1));
7432 default:
7433 break;
7435 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7436 && TREE_INT_CST_LOW (arg1) == min)
7437 switch (code)
7439 case LT_EXPR:
7440 return omit_one_operand (type,
7441 fold_convert (type,
7442 integer_zero_node),
7443 arg0);
7444 case LE_EXPR:
7445 return fold (build (EQ_EXPR, type, arg0, arg1));
7447 case GE_EXPR:
7448 return omit_one_operand (type,
7449 fold_convert (type,
7450 integer_one_node),
7451 arg0);
7452 case GT_EXPR:
7453 return fold (build (NE_EXPR, type, arg0, arg1));
7455 default:
7456 break;
7458 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7459 && TREE_INT_CST_LOW (arg1) == min + 1)
7460 switch (code)
7462 case GE_EXPR:
7463 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7464 return fold (build (NE_EXPR, type, arg0, arg1));
7465 case LT_EXPR:
7466 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7467 return fold (build (EQ_EXPR, type, arg0, arg1));
7468 default:
7469 break;
7472 else if (TREE_INT_CST_HIGH (arg1) == 0
7473 && TREE_INT_CST_LOW (arg1) == signed_max
7474 && TREE_UNSIGNED (TREE_TYPE (arg1))
7475 /* signed_type does not work on pointer types. */
7476 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7478 /* The following case also applies to X < signed_max+1
7479 and X >= signed_max+1 because previous transformations. */
7480 if (code == LE_EXPR || code == GT_EXPR)
7482 tree st0, st1;
7483 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
7484 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
7485 return fold
7486 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7487 type, fold_convert (st0, arg0),
7488 fold_convert (st1, integer_zero_node)));
7494 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7495 a MINUS_EXPR of a constant, we can convert it into a comparison with
7496 a revised constant as long as no overflow occurs. */
7497 if ((code == EQ_EXPR || code == NE_EXPR)
7498 && TREE_CODE (arg1) == INTEGER_CST
7499 && (TREE_CODE (arg0) == PLUS_EXPR
7500 || TREE_CODE (arg0) == MINUS_EXPR)
7501 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7502 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7503 ? MINUS_EXPR : PLUS_EXPR,
7504 arg1, TREE_OPERAND (arg0, 1), 0))
7505 && ! TREE_CONSTANT_OVERFLOW (tem))
7506 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7508 /* Similarly for a NEGATE_EXPR. */
7509 else if ((code == EQ_EXPR || code == NE_EXPR)
7510 && TREE_CODE (arg0) == NEGATE_EXPR
7511 && TREE_CODE (arg1) == INTEGER_CST
7512 && 0 != (tem = negate_expr (arg1))
7513 && TREE_CODE (tem) == INTEGER_CST
7514 && ! TREE_CONSTANT_OVERFLOW (tem))
7515 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7517 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7518 for !=. Don't do this for ordered comparisons due to overflow. */
7519 else if ((code == NE_EXPR || code == EQ_EXPR)
7520 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7521 return fold (build (code, type,
7522 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7524 /* If we are widening one operand of an integer comparison,
7525 see if the other operand is similarly being widened. Perhaps we
7526 can do the comparison in the narrower type. */
7527 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7528 && TREE_CODE (arg0) == NOP_EXPR
7529 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7530 && (code == EQ_EXPR || code == NE_EXPR
7531 || TREE_UNSIGNED (TREE_TYPE (arg0))
7532 == TREE_UNSIGNED (TREE_TYPE (tem)))
7533 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7534 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7535 || (TREE_CODE (t1) == INTEGER_CST
7536 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7537 return fold (build (code, type, tem,
7538 fold_convert (TREE_TYPE (tem), t1)));
7540 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7541 constant, we can simplify it. */
7542 else if (TREE_CODE (arg1) == INTEGER_CST
7543 && (TREE_CODE (arg0) == MIN_EXPR
7544 || TREE_CODE (arg0) == MAX_EXPR)
7545 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7546 return optimize_minmax_comparison (t);
7548 /* If we are comparing an ABS_EXPR with a constant, we can
7549 convert all the cases into explicit comparisons, but they may
7550 well not be faster than doing the ABS and one comparison.
7551 But ABS (X) <= C is a range comparison, which becomes a subtraction
7552 and a comparison, and is probably faster. */
7553 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7554 && TREE_CODE (arg0) == ABS_EXPR
7555 && ! TREE_SIDE_EFFECTS (arg0)
7556 && (0 != (tem = negate_expr (arg1)))
7557 && TREE_CODE (tem) == INTEGER_CST
7558 && ! TREE_CONSTANT_OVERFLOW (tem))
7559 return fold (build (TRUTH_ANDIF_EXPR, type,
7560 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7561 build (LE_EXPR, type,
7562 TREE_OPERAND (arg0, 0), arg1)));
7564 /* If this is an EQ or NE comparison with zero and ARG0 is
7565 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7566 two operations, but the latter can be done in one less insn
7567 on machines that have only two-operand insns or on which a
7568 constant cannot be the first operand. */
7569 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7570 && TREE_CODE (arg0) == BIT_AND_EXPR)
7572 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7573 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7574 return
7575 fold (build (code, type,
7576 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7577 build (RSHIFT_EXPR,
7578 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7579 TREE_OPERAND (arg0, 1),
7580 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7581 fold_convert (TREE_TYPE (arg0),
7582 integer_one_node)),
7583 arg1));
7584 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7585 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7586 return
7587 fold (build (code, type,
7588 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7589 build (RSHIFT_EXPR,
7590 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7591 TREE_OPERAND (arg0, 0),
7592 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7593 fold_convert (TREE_TYPE (arg0),
7594 integer_one_node)),
7595 arg1));
7598 /* If this is an NE or EQ comparison of zero against the result of a
7599 signed MOD operation whose second operand is a power of 2, make
7600 the MOD operation unsigned since it is simpler and equivalent. */
7601 if ((code == NE_EXPR || code == EQ_EXPR)
7602 && integer_zerop (arg1)
7603 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7604 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7605 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7606 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7607 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7608 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7610 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
7611 tree newmod = build (TREE_CODE (arg0), newtype,
7612 fold_convert (newtype,
7613 TREE_OPERAND (arg0, 0)),
7614 fold_convert (newtype,
7615 TREE_OPERAND (arg0, 1)));
7617 return build (code, type, newmod, fold_convert (newtype, arg1));
7620 /* If this is an NE comparison of zero with an AND of one, remove the
7621 comparison since the AND will give the correct value. */
7622 if (code == NE_EXPR && integer_zerop (arg1)
7623 && TREE_CODE (arg0) == BIT_AND_EXPR
7624 && integer_onep (TREE_OPERAND (arg0, 1)))
7625 return fold_convert (type, arg0);
7627 /* If we have (A & C) == C where C is a power of 2, convert this into
7628 (A & C) != 0. Similarly for NE_EXPR. */
7629 if ((code == EQ_EXPR || code == NE_EXPR)
7630 && TREE_CODE (arg0) == BIT_AND_EXPR
7631 && integer_pow2p (TREE_OPERAND (arg0, 1))
7632 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7633 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7634 arg0, integer_zero_node));
7636 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7637 2, then fold the expression into shifts and logical operations. */
7638 tem = fold_single_bit_test (code, arg0, arg1, type);
7639 if (tem)
7640 return tem;
7642 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7643 Similarly for NE_EXPR. */
7644 if ((code == EQ_EXPR || code == NE_EXPR)
7645 && TREE_CODE (arg0) == BIT_AND_EXPR
7646 && TREE_CODE (arg1) == INTEGER_CST
7647 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7649 tree dandnotc
7650 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7651 arg1, build1 (BIT_NOT_EXPR,
7652 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7653 TREE_OPERAND (arg0, 1))));
7654 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7655 if (integer_nonzerop (dandnotc))
7656 return omit_one_operand (type, rslt, arg0);
7659 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7660 Similarly for NE_EXPR. */
7661 if ((code == EQ_EXPR || code == NE_EXPR)
7662 && TREE_CODE (arg0) == BIT_IOR_EXPR
7663 && TREE_CODE (arg1) == INTEGER_CST
7664 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7666 tree candnotd
7667 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7668 TREE_OPERAND (arg0, 1),
7669 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7670 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7671 if (integer_nonzerop (candnotd))
7672 return omit_one_operand (type, rslt, arg0);
7675 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7676 and similarly for >= into !=. */
7677 if ((code == LT_EXPR || code == GE_EXPR)
7678 && TREE_UNSIGNED (TREE_TYPE (arg0))
7679 && TREE_CODE (arg1) == LSHIFT_EXPR
7680 && integer_onep (TREE_OPERAND (arg1, 0)))
7681 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7682 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7683 TREE_OPERAND (arg1, 1)),
7684 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7686 else if ((code == LT_EXPR || code == GE_EXPR)
7687 && TREE_UNSIGNED (TREE_TYPE (arg0))
7688 && (TREE_CODE (arg1) == NOP_EXPR
7689 || TREE_CODE (arg1) == CONVERT_EXPR)
7690 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7691 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7692 return
7693 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7694 fold_convert (TREE_TYPE (arg0),
7695 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7696 TREE_OPERAND (TREE_OPERAND (arg1, 0),
7697 1))),
7698 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7700 /* Simplify comparison of something with itself. (For IEEE
7701 floating-point, we can only do some of these simplifications.) */
7702 if (operand_equal_p (arg0, arg1, 0))
7704 switch (code)
7706 case EQ_EXPR:
7707 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7708 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7709 return constant_boolean_node (1, type);
7710 break;
7712 case GE_EXPR:
7713 case LE_EXPR:
7714 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7715 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7716 return constant_boolean_node (1, type);
7717 return fold (build (EQ_EXPR, type, arg0, arg1));
7719 case NE_EXPR:
7720 /* For NE, we can only do this simplification if integer
7721 or we don't honor IEEE floating point NaNs. */
7722 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7723 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7724 break;
7725 /* ... fall through ... */
7726 case GT_EXPR:
7727 case LT_EXPR:
7728 return constant_boolean_node (0, type);
7729 default:
7730 abort ();
7734 /* If we are comparing an expression that just has comparisons
7735 of two integer values, arithmetic expressions of those comparisons,
7736 and constants, we can simplify it. There are only three cases
7737 to check: the two values can either be equal, the first can be
7738 greater, or the second can be greater. Fold the expression for
7739 those three values. Since each value must be 0 or 1, we have
7740 eight possibilities, each of which corresponds to the constant 0
7741 or 1 or one of the six possible comparisons.
7743 This handles common cases like (a > b) == 0 but also handles
7744 expressions like ((x > y) - (y > x)) > 0, which supposedly
7745 occur in macroized code. */
7747 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7749 tree cval1 = 0, cval2 = 0;
7750 int save_p = 0;
7752 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7753 /* Don't handle degenerate cases here; they should already
7754 have been handled anyway. */
7755 && cval1 != 0 && cval2 != 0
7756 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7757 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7758 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7759 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7760 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7761 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7762 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7764 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7765 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7767 /* We can't just pass T to eval_subst in case cval1 or cval2
7768 was the same as ARG1. */
7770 tree high_result
7771 = fold (build (code, type,
7772 eval_subst (arg0, cval1, maxval, cval2, minval),
7773 arg1));
7774 tree equal_result
7775 = fold (build (code, type,
7776 eval_subst (arg0, cval1, maxval, cval2, maxval),
7777 arg1));
7778 tree low_result
7779 = fold (build (code, type,
7780 eval_subst (arg0, cval1, minval, cval2, maxval),
7781 arg1));
7783 /* All three of these results should be 0 or 1. Confirm they
7784 are. Then use those values to select the proper code
7785 to use. */
7787 if ((integer_zerop (high_result)
7788 || integer_onep (high_result))
7789 && (integer_zerop (equal_result)
7790 || integer_onep (equal_result))
7791 && (integer_zerop (low_result)
7792 || integer_onep (low_result)))
7794 /* Make a 3-bit mask with the high-order bit being the
7795 value for `>', the next for '=', and the low for '<'. */
7796 switch ((integer_onep (high_result) * 4)
7797 + (integer_onep (equal_result) * 2)
7798 + integer_onep (low_result))
7800 case 0:
7801 /* Always false. */
7802 return omit_one_operand (type, integer_zero_node, arg0);
7803 case 1:
7804 code = LT_EXPR;
7805 break;
7806 case 2:
7807 code = EQ_EXPR;
7808 break;
7809 case 3:
7810 code = LE_EXPR;
7811 break;
7812 case 4:
7813 code = GT_EXPR;
7814 break;
7815 case 5:
7816 code = NE_EXPR;
7817 break;
7818 case 6:
7819 code = GE_EXPR;
7820 break;
7821 case 7:
7822 /* Always true. */
7823 return omit_one_operand (type, integer_one_node, arg0);
7826 tem = build (code, type, cval1, cval2);
7827 if (save_p)
7828 return save_expr (tem);
7829 else
7830 return fold (tem);
7835 /* If this is a comparison of a field, we may be able to simplify it. */
7836 if (((TREE_CODE (arg0) == COMPONENT_REF
7837 && lang_hooks.can_use_bit_fields_p ())
7838 || TREE_CODE (arg0) == BIT_FIELD_REF)
7839 && (code == EQ_EXPR || code == NE_EXPR)
7840 /* Handle the constant case even without -O
7841 to make sure the warnings are given. */
7842 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7844 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7845 if (t1)
7846 return t1;
7849 /* If this is a comparison of complex values and either or both sides
7850 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7851 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7852 This may prevent needless evaluations. */
7853 if ((code == EQ_EXPR || code == NE_EXPR)
7854 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7855 && (TREE_CODE (arg0) == COMPLEX_EXPR
7856 || TREE_CODE (arg1) == COMPLEX_EXPR
7857 || TREE_CODE (arg0) == COMPLEX_CST
7858 || TREE_CODE (arg1) == COMPLEX_CST))
7860 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7861 tree real0, imag0, real1, imag1;
7863 arg0 = save_expr (arg0);
7864 arg1 = save_expr (arg1);
7865 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7866 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7867 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7868 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7870 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7871 : TRUTH_ORIF_EXPR),
7872 type,
7873 fold (build (code, type, real0, real1)),
7874 fold (build (code, type, imag0, imag1))));
7877 /* Optimize comparisons of strlen vs zero to a compare of the
7878 first character of the string vs zero. To wit,
7879 strlen(ptr) == 0 => *ptr == 0
7880 strlen(ptr) != 0 => *ptr != 0
7881 Other cases should reduce to one of these two (or a constant)
7882 due to the return value of strlen being unsigned. */
7883 if ((code == EQ_EXPR || code == NE_EXPR)
7884 && integer_zerop (arg1)
7885 && TREE_CODE (arg0) == CALL_EXPR)
7887 tree fndecl = get_callee_fndecl (arg0);
7888 tree arglist;
7890 if (fndecl
7891 && DECL_BUILT_IN (fndecl)
7892 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7893 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7894 && (arglist = TREE_OPERAND (arg0, 1))
7895 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7896 && ! TREE_CHAIN (arglist))
7897 return fold (build (code, type,
7898 build1 (INDIRECT_REF, char_type_node,
7899 TREE_VALUE(arglist)),
7900 integer_zero_node));
7903 /* Both ARG0 and ARG1 are known to be constants at this point. */
7904 t1 = fold_relational_const (code, type, arg0, arg1);
7905 return (t1 == NULL_TREE ? t : t1);
7907 case COND_EXPR:
7908 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7909 so all simple results must be passed through pedantic_non_lvalue. */
7910 if (TREE_CODE (arg0) == INTEGER_CST)
7912 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
7913 /* Only optimize constant conditions when the selected branch
7914 has the same type as the COND_EXPR. This avoids optimizing
7915 away "c ? x : throw", where the throw has a void type. */
7916 if (! VOID_TYPE_P (TREE_TYPE (tem))
7917 || VOID_TYPE_P (type))
7918 return pedantic_non_lvalue (tem);
7919 return t;
7921 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
7922 return pedantic_omit_one_operand (type, arg1, arg0);
7924 /* If we have A op B ? A : C, we may be able to convert this to a
7925 simpler expression, depending on the operation and the values
7926 of B and C. Signed zeros prevent all of these transformations,
7927 for reasons given above each one. */
7929 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7930 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7931 arg1, TREE_OPERAND (arg0, 1))
7932 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7934 tree arg2 = TREE_OPERAND (t, 2);
7935 enum tree_code comp_code = TREE_CODE (arg0);
7937 STRIP_NOPS (arg2);
7939 /* If we have A op 0 ? A : -A, consider applying the following
7940 transformations:
7942 A == 0? A : -A same as -A
7943 A != 0? A : -A same as A
7944 A >= 0? A : -A same as abs (A)
7945 A > 0? A : -A same as abs (A)
7946 A <= 0? A : -A same as -abs (A)
7947 A < 0? A : -A same as -abs (A)
7949 None of these transformations work for modes with signed
7950 zeros. If A is +/-0, the first two transformations will
7951 change the sign of the result (from +0 to -0, or vice
7952 versa). The last four will fix the sign of the result,
7953 even though the original expressions could be positive or
7954 negative, depending on the sign of A.
7956 Note that all these transformations are correct if A is
7957 NaN, since the two alternatives (A and -A) are also NaNs. */
7958 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7959 ? real_zerop (TREE_OPERAND (arg0, 1))
7960 : integer_zerop (TREE_OPERAND (arg0, 1)))
7961 && TREE_CODE (arg2) == NEGATE_EXPR
7962 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
7963 switch (comp_code)
7965 case EQ_EXPR:
7966 tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
7967 tem = fold_convert (type, negate_expr (tem));
7968 return pedantic_non_lvalue (tem);
7969 case NE_EXPR:
7970 return pedantic_non_lvalue (fold_convert (type, arg1));
7971 case GE_EXPR:
7972 case GT_EXPR:
7973 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7974 arg1 = fold_convert (lang_hooks.types.signed_type
7975 (TREE_TYPE (arg1)), arg1);
7976 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
7977 return pedantic_non_lvalue (fold_convert (type, arg1));
7978 case LE_EXPR:
7979 case LT_EXPR:
7980 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7981 arg1 = fold_convert (lang_hooks.types.signed_type
7982 (TREE_TYPE (arg1)), arg1);
7983 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
7984 arg1 = negate_expr (fold_convert (type, arg1));
7985 return pedantic_non_lvalue (arg1);
7986 default:
7987 abort ();
7990 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
7991 A == 0 ? A : 0 is always 0 unless A is -0. Note that
7992 both transformations are correct when A is NaN: A != 0
7993 is then true, and A == 0 is false. */
7995 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
7997 if (comp_code == NE_EXPR)
7998 return pedantic_non_lvalue (fold_convert (type, arg1));
7999 else if (comp_code == EQ_EXPR)
8000 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8003 /* Try some transformations of A op B ? A : B.
8005 A == B? A : B same as B
8006 A != B? A : B same as A
8007 A >= B? A : B same as max (A, B)
8008 A > B? A : B same as max (B, A)
8009 A <= B? A : B same as min (A, B)
8010 A < B? A : B same as min (B, A)
8012 As above, these transformations don't work in the presence
8013 of signed zeros. For example, if A and B are zeros of
8014 opposite sign, the first two transformations will change
8015 the sign of the result. In the last four, the original
8016 expressions give different results for (A=+0, B=-0) and
8017 (A=-0, B=+0), but the transformed expressions do not.
8019 The first two transformations are correct if either A or B
8020 is a NaN. In the first transformation, the condition will
8021 be false, and B will indeed be chosen. In the case of the
8022 second transformation, the condition A != B will be true,
8023 and A will be chosen.
8025 The conversions to max() and min() are not correct if B is
8026 a number and A is not. The conditions in the original
8027 expressions will be false, so all four give B. The min()
8028 and max() versions would give a NaN instead. */
8029 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8030 arg2, TREE_OPERAND (arg0, 0)))
8032 tree comp_op0 = TREE_OPERAND (arg0, 0);
8033 tree comp_op1 = TREE_OPERAND (arg0, 1);
8034 tree comp_type = TREE_TYPE (comp_op0);
8036 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8037 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8039 comp_type = type;
8040 comp_op0 = arg1;
8041 comp_op1 = arg2;
8044 switch (comp_code)
8046 case EQ_EXPR:
8047 return pedantic_non_lvalue (fold_convert (type, arg2));
8048 case NE_EXPR:
8049 return pedantic_non_lvalue (fold_convert (type, arg1));
8050 case LE_EXPR:
8051 case LT_EXPR:
8052 /* In C++ a ?: expression can be an lvalue, so put the
8053 operand which will be used if they are equal first
8054 so that we can convert this back to the
8055 corresponding COND_EXPR. */
8056 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8057 return pedantic_non_lvalue (fold_convert
8058 (type, fold (build (MIN_EXPR, comp_type,
8059 (comp_code == LE_EXPR
8060 ? comp_op0 : comp_op1),
8061 (comp_code == LE_EXPR
8062 ? comp_op1 : comp_op0)))));
8063 break;
8064 case GE_EXPR:
8065 case GT_EXPR:
8066 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8067 return pedantic_non_lvalue (fold_convert
8068 (type, fold (build (MAX_EXPR, comp_type,
8069 (comp_code == GE_EXPR
8070 ? comp_op0 : comp_op1),
8071 (comp_code == GE_EXPR
8072 ? comp_op1 : comp_op0)))));
8073 break;
8074 default:
8075 abort ();
8079 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8080 we might still be able to simplify this. For example,
8081 if C1 is one less or one more than C2, this might have started
8082 out as a MIN or MAX and been transformed by this function.
8083 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8085 if (INTEGRAL_TYPE_P (type)
8086 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8087 && TREE_CODE (arg2) == INTEGER_CST)
8088 switch (comp_code)
8090 case EQ_EXPR:
8091 /* We can replace A with C1 in this case. */
8092 arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8093 return fold (build (code, type, TREE_OPERAND (t, 0), arg1,
8094 TREE_OPERAND (t, 2)));
8096 case LT_EXPR:
8097 /* If C1 is C2 + 1, this is min(A, C2). */
8098 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8099 && operand_equal_p (TREE_OPERAND (arg0, 1),
8100 const_binop (PLUS_EXPR, arg2,
8101 integer_one_node, 0), 1))
8102 return pedantic_non_lvalue
8103 (fold (build (MIN_EXPR, type, arg1, arg2)));
8104 break;
8106 case LE_EXPR:
8107 /* If C1 is C2 - 1, this is min(A, C2). */
8108 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8109 && operand_equal_p (TREE_OPERAND (arg0, 1),
8110 const_binop (MINUS_EXPR, arg2,
8111 integer_one_node, 0), 1))
8112 return pedantic_non_lvalue
8113 (fold (build (MIN_EXPR, type, arg1, arg2)));
8114 break;
8116 case GT_EXPR:
8117 /* If C1 is C2 - 1, this is max(A, C2). */
8118 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8119 && operand_equal_p (TREE_OPERAND (arg0, 1),
8120 const_binop (MINUS_EXPR, arg2,
8121 integer_one_node, 0), 1))
8122 return pedantic_non_lvalue
8123 (fold (build (MAX_EXPR, type, arg1, arg2)));
8124 break;
8126 case GE_EXPR:
8127 /* If C1 is C2 + 1, this is max(A, C2). */
8128 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8129 && operand_equal_p (TREE_OPERAND (arg0, 1),
8130 const_binop (PLUS_EXPR, arg2,
8131 integer_one_node, 0), 1))
8132 return pedantic_non_lvalue
8133 (fold (build (MAX_EXPR, type, arg1, arg2)));
8134 break;
8135 case NE_EXPR:
8136 break;
8137 default:
8138 abort ();
8142 /* If the second operand is simpler than the third, swap them
8143 since that produces better jump optimization results. */
8144 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8145 TREE_OPERAND (t, 2), false))
8147 /* See if this can be inverted. If it can't, possibly because
8148 it was a floating-point inequality comparison, don't do
8149 anything. */
8150 tem = invert_truthvalue (arg0);
8152 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8153 return fold (build (code, type, tem,
8154 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8157 /* Convert A ? 1 : 0 to simply A. */
8158 if (integer_onep (TREE_OPERAND (t, 1))
8159 && integer_zerop (TREE_OPERAND (t, 2))
8160 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8161 call to fold will try to move the conversion inside
8162 a COND, which will recurse. In that case, the COND_EXPR
8163 is probably the best choice, so leave it alone. */
8164 && type == TREE_TYPE (arg0))
8165 return pedantic_non_lvalue (arg0);
8167 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8168 over COND_EXPR in cases such as floating point comparisons. */
8169 if (integer_zerop (TREE_OPERAND (t, 1))
8170 && integer_onep (TREE_OPERAND (t, 2))
8171 && truth_value_p (TREE_CODE (arg0)))
8172 return pedantic_non_lvalue (fold_convert (type,
8173 invert_truthvalue (arg0)));
8175 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8176 operation is simply A & 2. */
8178 if (integer_zerop (TREE_OPERAND (t, 2))
8179 && TREE_CODE (arg0) == NE_EXPR
8180 && integer_zerop (TREE_OPERAND (arg0, 1))
8181 && integer_pow2p (arg1)
8182 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8183 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8184 arg1, 1))
8185 return pedantic_non_lvalue (fold_convert (type,
8186 TREE_OPERAND (arg0, 0)));
8188 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8189 if (integer_zerop (TREE_OPERAND (t, 2))
8190 && truth_value_p (TREE_CODE (arg0))
8191 && truth_value_p (TREE_CODE (arg1)))
8192 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8193 arg0, arg1)));
8195 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8196 if (integer_onep (TREE_OPERAND (t, 2))
8197 && truth_value_p (TREE_CODE (arg0))
8198 && truth_value_p (TREE_CODE (arg1)))
8200 /* Only perform transformation if ARG0 is easily inverted. */
8201 tem = invert_truthvalue (arg0);
8202 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8203 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8204 tem, arg1)));
8207 return t;
8209 case COMPOUND_EXPR:
8210 /* When pedantic, a compound expression can be neither an lvalue
8211 nor an integer constant expression. */
8212 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8213 return t;
8214 /* Don't let (0, 0) be null pointer constant. */
8215 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8216 : fold_convert (type, arg1);
8217 return pedantic_non_lvalue (tem);
8219 case COMPLEX_EXPR:
8220 if (wins)
8221 return build_complex (type, arg0, arg1);
8222 return t;
8224 case REALPART_EXPR:
8225 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8226 return t;
8227 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8228 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8229 TREE_OPERAND (arg0, 1));
8230 else if (TREE_CODE (arg0) == COMPLEX_CST)
8231 return TREE_REALPART (arg0);
8232 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8233 return fold (build (TREE_CODE (arg0), type,
8234 fold (build1 (REALPART_EXPR, type,
8235 TREE_OPERAND (arg0, 0))),
8236 fold (build1 (REALPART_EXPR,
8237 type, TREE_OPERAND (arg0, 1)))));
8238 return t;
8240 case IMAGPART_EXPR:
8241 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8242 return fold_convert (type, integer_zero_node);
8243 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8244 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8245 TREE_OPERAND (arg0, 0));
8246 else if (TREE_CODE (arg0) == COMPLEX_CST)
8247 return TREE_IMAGPART (arg0);
8248 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8249 return fold (build (TREE_CODE (arg0), type,
8250 fold (build1 (IMAGPART_EXPR, type,
8251 TREE_OPERAND (arg0, 0))),
8252 fold (build1 (IMAGPART_EXPR, type,
8253 TREE_OPERAND (arg0, 1)))));
8254 return t;
8256 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8257 appropriate. */
8258 case CLEANUP_POINT_EXPR:
8259 if (! has_cleanups (arg0))
8260 return TREE_OPERAND (t, 0);
8263 enum tree_code code0 = TREE_CODE (arg0);
8264 int kind0 = TREE_CODE_CLASS (code0);
8265 tree arg00 = TREE_OPERAND (arg0, 0);
8266 tree arg01;
8268 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8269 return fold (build1 (code0, type,
8270 fold (build1 (CLEANUP_POINT_EXPR,
8271 TREE_TYPE (arg00), arg00))));
8273 if (kind0 == '<' || kind0 == '2'
8274 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8275 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8276 || code0 == TRUTH_XOR_EXPR)
8278 arg01 = TREE_OPERAND (arg0, 1);
8280 if (TREE_CONSTANT (arg00)
8281 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8282 && ! has_cleanups (arg00)))
8283 return fold (build (code0, type, arg00,
8284 fold (build1 (CLEANUP_POINT_EXPR,
8285 TREE_TYPE (arg01), arg01))));
8287 if (TREE_CONSTANT (arg01))
8288 return fold (build (code0, type,
8289 fold (build1 (CLEANUP_POINT_EXPR,
8290 TREE_TYPE (arg00), arg00)),
8291 arg01));
8294 return t;
8297 case CALL_EXPR:
8298 /* Check for a built-in function. */
8299 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
8300 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
8301 == FUNCTION_DECL)
8302 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
8304 tree tmp = fold_builtin (t);
8305 if (tmp)
8306 return tmp;
8308 return t;
8310 default:
8311 return t;
8312 } /* switch (code) */
8315 #ifdef ENABLE_FOLD_CHECKING
8316 #undef fold
8318 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8319 static void fold_check_failed (tree, tree);
8320 void print_fold_checksum (tree);
8322 /* When --enable-checking=fold, compute a digest of expr before
8323 and after actual fold call to see if fold did not accidentally
8324 change original expr. */
8326 tree
8327 fold (tree expr)
8329 tree ret;
8330 struct md5_ctx ctx;
8331 unsigned char checksum_before[16], checksum_after[16];
8332 htab_t ht;
8334 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8335 md5_init_ctx (&ctx);
8336 fold_checksum_tree (expr, &ctx, ht);
8337 md5_finish_ctx (&ctx, checksum_before);
8338 htab_empty (ht);
8340 ret = fold_1 (expr);
8342 md5_init_ctx (&ctx);
8343 fold_checksum_tree (expr, &ctx, ht);
8344 md5_finish_ctx (&ctx, checksum_after);
8345 htab_delete (ht);
8347 if (memcmp (checksum_before, checksum_after, 16))
8348 fold_check_failed (expr, ret);
8350 return ret;
8353 void
8354 print_fold_checksum (tree expr)
8356 struct md5_ctx ctx;
8357 unsigned char checksum[16], cnt;
8358 htab_t ht;
8360 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8361 md5_init_ctx (&ctx);
8362 fold_checksum_tree (expr, &ctx, ht);
8363 md5_finish_ctx (&ctx, checksum);
8364 htab_delete (ht);
8365 for (cnt = 0; cnt < 16; ++cnt)
8366 fprintf (stderr, "%02x", checksum[cnt]);
8367 putc ('\n', stderr);
8370 static void
8371 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8373 internal_error ("fold check: original tree changed by fold");
8376 static void
8377 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8379 void **slot;
8380 enum tree_code code;
8381 char buf[sizeof (struct tree_decl)];
8382 int i, len;
8384 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8385 > sizeof (struct tree_decl)
8386 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8387 abort ();
8388 if (expr == NULL)
8389 return;
8390 slot = htab_find_slot (ht, expr, INSERT);
8391 if (*slot != NULL)
8392 return;
8393 *slot = expr;
8394 code = TREE_CODE (expr);
8395 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8397 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8398 memcpy (buf, expr, tree_size (expr));
8399 expr = (tree) buf;
8400 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8402 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8404 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8405 memcpy (buf, expr, tree_size (expr));
8406 expr = (tree) buf;
8407 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8409 else if (TREE_CODE_CLASS (code) == 't'
8410 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8412 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8413 memcpy (buf, expr, tree_size (expr));
8414 expr = (tree) buf;
8415 TYPE_POINTER_TO (expr) = NULL;
8416 TYPE_REFERENCE_TO (expr) = NULL;
8418 md5_process_bytes (expr, tree_size (expr), ctx);
8419 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8420 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8421 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8422 len = TREE_CODE_LENGTH (code);
8423 switch (TREE_CODE_CLASS (code))
8425 case 'c':
8426 switch (code)
8428 case STRING_CST:
8429 md5_process_bytes (TREE_STRING_POINTER (expr),
8430 TREE_STRING_LENGTH (expr), ctx);
8431 break;
8432 case COMPLEX_CST:
8433 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8434 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8435 break;
8436 case VECTOR_CST:
8437 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8438 break;
8439 default:
8440 break;
8442 break;
8443 case 'x':
8444 switch (code)
8446 case TREE_LIST:
8447 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8448 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8449 break;
8450 case TREE_VEC:
8451 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8452 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8453 break;
8454 default:
8455 break;
8457 break;
8458 case 'e':
8459 switch (code)
8461 case SAVE_EXPR: len = 2; break;
8462 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8463 case RTL_EXPR: len = 0; break;
8464 case WITH_CLEANUP_EXPR: len = 2; break;
8465 default: break;
8467 /* Fall through. */
8468 case 'r':
8469 case '<':
8470 case '1':
8471 case '2':
8472 case 's':
8473 for (i = 0; i < len; ++i)
8474 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8475 break;
8476 case 'd':
8477 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8478 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8479 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8480 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8481 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8482 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8483 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8484 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8485 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8486 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8487 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8488 break;
8489 case 't':
8490 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8491 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8492 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8493 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8494 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8495 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8496 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8497 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8498 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8499 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8500 break;
8501 default:
8502 break;
8506 #endif
8508 /* Perform constant folding and related simplification of initializer
8509 expression EXPR. This behaves identically to "fold" but ignores
8510 potential run-time traps and exceptions that fold must preserve. */
8512 tree
8513 fold_initializer (tree expr)
8515 int saved_signaling_nans = flag_signaling_nans;
8516 int saved_trapping_math = flag_trapping_math;
8517 int saved_trapv = flag_trapv;
8518 tree result;
8520 flag_signaling_nans = 0;
8521 flag_trapping_math = 0;
8522 flag_trapv = 0;
8524 result = fold (expr);
8526 flag_signaling_nans = saved_signaling_nans;
8527 flag_trapping_math = saved_trapping_math;
8528 flag_trapv = saved_trapv;
8530 return result;
8533 /* Determine if first argument is a multiple of second argument. Return 0 if
8534 it is not, or we cannot easily determined it to be.
8536 An example of the sort of thing we care about (at this point; this routine
8537 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8538 fold cases do now) is discovering that
8540 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8542 is a multiple of
8544 SAVE_EXPR (J * 8)
8546 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8548 This code also handles discovering that
8550 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8552 is a multiple of 8 so we don't have to worry about dealing with a
8553 possible remainder.
8555 Note that we *look* inside a SAVE_EXPR only to determine how it was
8556 calculated; it is not safe for fold to do much of anything else with the
8557 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8558 at run time. For example, the latter example above *cannot* be implemented
8559 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8560 evaluation time of the original SAVE_EXPR is not necessarily the same at
8561 the time the new expression is evaluated. The only optimization of this
8562 sort that would be valid is changing
8564 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8566 divided by 8 to
8568 SAVE_EXPR (I) * SAVE_EXPR (J)
8570 (where the same SAVE_EXPR (J) is used in the original and the
8571 transformed version). */
8573 static int
8574 multiple_of_p (tree type, tree top, tree bottom)
8576 if (operand_equal_p (top, bottom, 0))
8577 return 1;
8579 if (TREE_CODE (type) != INTEGER_TYPE)
8580 return 0;
8582 switch (TREE_CODE (top))
8584 case MULT_EXPR:
8585 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8586 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8588 case PLUS_EXPR:
8589 case MINUS_EXPR:
8590 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8591 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8593 case LSHIFT_EXPR:
8594 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8596 tree op1, t1;
8598 op1 = TREE_OPERAND (top, 1);
8599 /* const_binop may not detect overflow correctly,
8600 so check for it explicitly here. */
8601 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8602 > TREE_INT_CST_LOW (op1)
8603 && TREE_INT_CST_HIGH (op1) == 0
8604 && 0 != (t1 = fold_convert (type,
8605 const_binop (LSHIFT_EXPR,
8606 size_one_node,
8607 op1, 0)))
8608 && ! TREE_OVERFLOW (t1))
8609 return multiple_of_p (type, t1, bottom);
8611 return 0;
8613 case NOP_EXPR:
8614 /* Can't handle conversions from non-integral or wider integral type. */
8615 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8616 || (TYPE_PRECISION (type)
8617 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8618 return 0;
8620 /* .. fall through ... */
8622 case SAVE_EXPR:
8623 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8625 case INTEGER_CST:
8626 if (TREE_CODE (bottom) != INTEGER_CST
8627 || (TREE_UNSIGNED (type)
8628 && (tree_int_cst_sgn (top) < 0
8629 || tree_int_cst_sgn (bottom) < 0)))
8630 return 0;
8631 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8632 top, bottom, 0));
8634 default:
8635 return 0;
8639 /* Return true if `t' is known to be non-negative. */
8642 tree_expr_nonnegative_p (tree t)
8644 switch (TREE_CODE (t))
8646 case ABS_EXPR:
8647 return 1;
8649 case INTEGER_CST:
8650 return tree_int_cst_sgn (t) >= 0;
8652 case REAL_CST:
8653 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8655 case PLUS_EXPR:
8656 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8657 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8658 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8660 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8661 both unsigned and at least 2 bits shorter than the result. */
8662 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8663 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8664 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8666 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8667 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8668 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8669 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8671 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8672 TYPE_PRECISION (inner2)) + 1;
8673 return prec < TYPE_PRECISION (TREE_TYPE (t));
8676 break;
8678 case MULT_EXPR:
8679 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8681 /* x * x for floating point x is always non-negative. */
8682 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8683 return 1;
8684 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8685 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8688 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8689 both unsigned and their total bits is shorter than the result. */
8690 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8691 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8692 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8694 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8695 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8696 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8697 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8698 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8699 < TYPE_PRECISION (TREE_TYPE (t));
8701 return 0;
8703 case TRUNC_DIV_EXPR:
8704 case CEIL_DIV_EXPR:
8705 case FLOOR_DIV_EXPR:
8706 case ROUND_DIV_EXPR:
8707 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8708 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8710 case TRUNC_MOD_EXPR:
8711 case CEIL_MOD_EXPR:
8712 case FLOOR_MOD_EXPR:
8713 case ROUND_MOD_EXPR:
8714 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8716 case RDIV_EXPR:
8717 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8718 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8720 case BIT_AND_EXPR:
8721 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8722 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8723 case BIT_IOR_EXPR:
8724 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8725 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8727 case NOP_EXPR:
8729 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8730 tree outer_type = TREE_TYPE (t);
8732 if (TREE_CODE (outer_type) == REAL_TYPE)
8734 if (TREE_CODE (inner_type) == REAL_TYPE)
8735 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8736 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8738 if (TREE_UNSIGNED (inner_type))
8739 return 1;
8740 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8743 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8745 if (TREE_CODE (inner_type) == REAL_TYPE)
8746 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8747 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8748 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8749 && TREE_UNSIGNED (inner_type);
8752 break;
8754 case COND_EXPR:
8755 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8756 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8757 case COMPOUND_EXPR:
8758 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8759 case MIN_EXPR:
8760 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8761 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8762 case MAX_EXPR:
8763 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8764 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8765 case MODIFY_EXPR:
8766 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8767 case BIND_EXPR:
8768 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8769 case SAVE_EXPR:
8770 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8771 case NON_LVALUE_EXPR:
8772 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8773 case FLOAT_EXPR:
8774 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8775 case RTL_EXPR:
8776 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8778 case CALL_EXPR:
8780 tree fndecl = get_callee_fndecl (t);
8781 tree arglist = TREE_OPERAND (t, 1);
8782 if (fndecl
8783 && DECL_BUILT_IN (fndecl)
8784 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8785 switch (DECL_FUNCTION_CODE (fndecl))
8787 #define CASE_BUILTIN_F(BUILT_IN_FN) \
8788 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
8789 #define CASE_BUILTIN_I(BUILT_IN_FN) \
8790 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
8792 CASE_BUILTIN_F (BUILT_IN_ACOS)
8793 CASE_BUILTIN_F (BUILT_IN_ACOSH)
8794 CASE_BUILTIN_F (BUILT_IN_CABS)
8795 CASE_BUILTIN_F (BUILT_IN_COSH)
8796 CASE_BUILTIN_F (BUILT_IN_ERFC)
8797 CASE_BUILTIN_F (BUILT_IN_EXP)
8798 CASE_BUILTIN_F (BUILT_IN_EXP10)
8799 CASE_BUILTIN_F (BUILT_IN_EXP2)
8800 CASE_BUILTIN_F (BUILT_IN_FABS)
8801 CASE_BUILTIN_F (BUILT_IN_FDIM)
8802 CASE_BUILTIN_F (BUILT_IN_FREXP)
8803 CASE_BUILTIN_F (BUILT_IN_HYPOT)
8804 CASE_BUILTIN_F (BUILT_IN_POW10)
8805 CASE_BUILTIN_F (BUILT_IN_SQRT)
8806 CASE_BUILTIN_I (BUILT_IN_FFS)
8807 CASE_BUILTIN_I (BUILT_IN_PARITY)
8808 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
8809 /* Always true. */
8810 return 1;
8812 CASE_BUILTIN_F (BUILT_IN_ASINH)
8813 CASE_BUILTIN_F (BUILT_IN_ATAN)
8814 CASE_BUILTIN_F (BUILT_IN_ATANH)
8815 CASE_BUILTIN_F (BUILT_IN_CBRT)
8816 CASE_BUILTIN_F (BUILT_IN_CEIL)
8817 CASE_BUILTIN_F (BUILT_IN_ERF)
8818 CASE_BUILTIN_F (BUILT_IN_EXPM1)
8819 CASE_BUILTIN_F (BUILT_IN_FLOOR)
8820 CASE_BUILTIN_F (BUILT_IN_FMOD)
8821 CASE_BUILTIN_F (BUILT_IN_LDEXP)
8822 CASE_BUILTIN_F (BUILT_IN_LLRINT)
8823 CASE_BUILTIN_F (BUILT_IN_LLROUND)
8824 CASE_BUILTIN_F (BUILT_IN_LRINT)
8825 CASE_BUILTIN_F (BUILT_IN_LROUND)
8826 CASE_BUILTIN_F (BUILT_IN_MODF)
8827 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
8828 CASE_BUILTIN_F (BUILT_IN_POW)
8829 CASE_BUILTIN_F (BUILT_IN_RINT)
8830 CASE_BUILTIN_F (BUILT_IN_ROUND)
8831 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
8832 CASE_BUILTIN_F (BUILT_IN_SINH)
8833 CASE_BUILTIN_F (BUILT_IN_TANH)
8834 CASE_BUILTIN_F (BUILT_IN_TRUNC)
8835 /* True if the 1st argument is nonnegative. */
8836 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8838 CASE_BUILTIN_F(BUILT_IN_FMAX)
8839 /* True if the 1st OR 2nd arguments are nonnegative. */
8840 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
8841 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8843 CASE_BUILTIN_F(BUILT_IN_FMIN)
8844 /* True if the 1st AND 2nd arguments are nonnegative. */
8845 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
8846 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8848 CASE_BUILTIN_F(BUILT_IN_COPYSIGN)
8849 /* True if the 2nd argument is nonnegative. */
8850 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8852 default:
8853 break;
8854 #undef CASE_BUILTIN_F
8855 #undef CASE_BUILTIN_I
8859 /* ... fall through ... */
8861 default:
8862 if (truth_value_p (TREE_CODE (t)))
8863 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8864 return 1;
8867 /* We don't know sign of `t', so be conservative and return false. */
8868 return 0;
8871 /* Return true when T is an address and is known to be nonzero.
8872 For floating point we further ensure that T is not denormal.
8873 Similar logic is present in nonzero_address in rtlanal.h */
8875 static bool
8876 tree_expr_nonzero_p (tree t)
8878 tree type = TREE_TYPE (t);
8880 /* Doing something usefull for floating point would need more work. */
8881 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8882 return false;
8884 switch (TREE_CODE (t))
8886 case ABS_EXPR:
8887 if (!TREE_UNSIGNED (type) && !flag_wrapv)
8888 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
8890 case INTEGER_CST:
8891 return !integer_zerop (t);
8893 case PLUS_EXPR:
8894 if (!TREE_UNSIGNED (type) && !flag_wrapv)
8896 /* With the presence of negative values it is hard
8897 to say something. */
8898 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8899 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
8900 return false;
8901 /* One of operands must be positive and the other non-negative. */
8902 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
8903 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
8905 break;
8907 case MULT_EXPR:
8908 if (!TREE_UNSIGNED (type) && !flag_wrapv)
8910 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
8911 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
8913 break;
8915 case NOP_EXPR:
8917 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8918 tree outer_type = TREE_TYPE (t);
8920 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
8921 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
8923 break;
8925 case ADDR_EXPR:
8926 /* Weak declarations may link to NULL. */
8927 if (DECL_P (TREE_OPERAND (t, 0)))
8928 return !DECL_WEAK (TREE_OPERAND (t, 0));
8929 /* Constants and all other cases are never weak. */
8930 return true;
8932 case COND_EXPR:
8933 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
8934 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
8936 case MIN_EXPR:
8937 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
8938 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
8940 case MAX_EXPR:
8941 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
8943 /* When both operands are nonzero, then MAX must be too. */
8944 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
8945 return true;
8947 /* MAX where operand 0 is positive is positive. */
8948 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8950 /* MAX where operand 1 is positive is positive. */
8951 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
8952 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
8953 return true;
8954 break;
8956 case COMPOUND_EXPR:
8957 case MODIFY_EXPR:
8958 case BIND_EXPR:
8959 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
8961 case SAVE_EXPR:
8962 case NON_LVALUE_EXPR:
8963 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
8965 case BIT_IOR_EXPR:
8966 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
8967 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
8969 default:
8970 break;
8972 return false;
8975 /* Return true if `r' is known to be non-negative.
8976 Only handles constants at the moment. */
8979 rtl_expr_nonnegative_p (rtx r)
8981 switch (GET_CODE (r))
8983 case CONST_INT:
8984 return INTVAL (r) >= 0;
8986 case CONST_DOUBLE:
8987 if (GET_MODE (r) == VOIDmode)
8988 return CONST_DOUBLE_HIGH (r) >= 0;
8989 return 0;
8991 case CONST_VECTOR:
8993 int units, i;
8994 rtx elt;
8996 units = CONST_VECTOR_NUNITS (r);
8998 for (i = 0; i < units; ++i)
9000 elt = CONST_VECTOR_ELT (r, i);
9001 if (!rtl_expr_nonnegative_p (elt))
9002 return 0;
9005 return 1;
9008 case SYMBOL_REF:
9009 case LABEL_REF:
9010 /* These are always nonnegative. */
9011 return 1;
9013 default:
9014 return 0;
9018 /* Return the tree for neg (ARG0) when ARG0 is known to be either
9019 an integer constant or real constant.
9021 TYPE is the type of the result. */
9023 static tree
9024 fold_negate_const (tree arg0, tree type)
9026 tree t = NULL_TREE;
9028 if (TREE_CODE (arg0) == INTEGER_CST)
9030 unsigned HOST_WIDE_INT low;
9031 HOST_WIDE_INT high;
9032 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9033 TREE_INT_CST_HIGH (arg0),
9034 &low, &high);
9035 t = build_int_2 (low, high);
9036 TREE_TYPE (t) = type;
9037 TREE_OVERFLOW (t)
9038 = (TREE_OVERFLOW (arg0)
9039 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
9040 TREE_CONSTANT_OVERFLOW (t)
9041 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9043 else if (TREE_CODE (arg0) == REAL_CST)
9044 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9045 #ifdef ENABLE_CHECKING
9046 else
9047 abort ();
9048 #endif
9050 return t;
9053 /* Return the tree for abs (ARG0) when ARG0 is known to be either
9054 an integer constant or real constant.
9056 TYPE is the type of the result. */
9058 static tree
9059 fold_abs_const (tree arg0, tree type)
9061 tree t = NULL_TREE;
9063 if (TREE_CODE (arg0) == INTEGER_CST)
9065 /* If the value is unsigned, then the absolute value is
9066 the same as the ordinary value. */
9067 if (TREE_UNSIGNED (type))
9068 return arg0;
9069 /* Similarly, if the value is non-negative. */
9070 else if (INT_CST_LT (integer_minus_one_node, arg0))
9071 return arg0;
9072 /* If the value is negative, then the absolute value is
9073 its negation. */
9074 else
9076 unsigned HOST_WIDE_INT low;
9077 HOST_WIDE_INT high;
9078 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9079 TREE_INT_CST_HIGH (arg0),
9080 &low, &high);
9081 t = build_int_2 (low, high);
9082 TREE_TYPE (t) = type;
9083 TREE_OVERFLOW (t)
9084 = (TREE_OVERFLOW (arg0)
9085 | force_fit_type (t, overflow));
9086 TREE_CONSTANT_OVERFLOW (t)
9087 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9088 return t;
9091 else if (TREE_CODE (arg0) == REAL_CST)
9093 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
9094 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9095 else
9096 return arg0;
9098 #ifdef ENABLE_CHECKING
9099 else
9100 abort ();
9101 #endif
9103 return t;
9106 /* Given CODE, a relational operator, the target type, TYPE and two
9107 constant operands OP0 and OP1, return the result of the
9108 relational operation. If the result is not a compile time
9109 constant, then return NULL_TREE. */
9111 static tree
9112 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
9114 tree tem;
9115 int invert;
9117 /* From here on, the only cases we handle are when the result is
9118 known to be a constant.
9120 To compute GT, swap the arguments and do LT.
9121 To compute GE, do LT and invert the result.
9122 To compute LE, swap the arguments, do LT and invert the result.
9123 To compute NE, do EQ and invert the result.
9125 Therefore, the code below must handle only EQ and LT. */
9127 if (code == LE_EXPR || code == GT_EXPR)
9129 tem = op0, op0 = op1, op1 = tem;
9130 code = swap_tree_comparison (code);
9133 /* Note that it is safe to invert for real values here because we
9134 will check below in the one case that it matters. */
9136 tem = NULL_TREE;
9137 invert = 0;
9138 if (code == NE_EXPR || code == GE_EXPR)
9140 invert = 1;
9141 code = invert_tree_comparison (code);
9144 /* Compute a result for LT or EQ if args permit;
9145 Otherwise return T. */
9146 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9148 if (code == EQ_EXPR)
9149 tem = build_int_2 (tree_int_cst_equal (op0, op1), 0);
9150 else
9151 tem = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (op0))
9152 ? INT_CST_LT_UNSIGNED (op0, op1)
9153 : INT_CST_LT (op0, op1)),
9157 else if (code == EQ_EXPR && !TREE_SIDE_EFFECTS (op0)
9158 && integer_zerop (op1) && tree_expr_nonzero_p (op0))
9159 tem = build_int_2 (0, 0);
9161 /* Two real constants can be compared explicitly. */
9162 else if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
9164 /* If either operand is a NaN, the result is false with two
9165 exceptions: First, an NE_EXPR is true on NaNs, but that case
9166 is already handled correctly since we will be inverting the
9167 result for NE_EXPR. Second, if we had inverted a LE_EXPR
9168 or a GE_EXPR into a LT_EXPR, we must return true so that it
9169 will be inverted into false. */
9171 if (REAL_VALUE_ISNAN (TREE_REAL_CST (op0))
9172 || REAL_VALUE_ISNAN (TREE_REAL_CST (op1)))
9173 tem = build_int_2 (invert && code == LT_EXPR, 0);
9175 else if (code == EQ_EXPR)
9176 tem = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (op0),
9177 TREE_REAL_CST (op1)),
9179 else
9180 tem = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (op0),
9181 TREE_REAL_CST (op1)),
9185 if (tem == NULL_TREE)
9186 return NULL_TREE;
9188 if (invert)
9189 TREE_INT_CST_LOW (tem) ^= 1;
9191 TREE_TYPE (tem) = type;
9192 if (TREE_CODE (type) == BOOLEAN_TYPE)
9193 return (*lang_hooks.truthvalue_conversion) (tem);
9194 return tem;
9197 #include "gt-fold-const.h"