2004-11-09 H.J. Lu <hongjiu.lu@intel.com>
[official-gcc.git] / gcc / fold-const.c
bloba14bfb762d4965c8d7cc7de962efcfdee0a3e79f
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static tree build_zero_vector (tree);
93 static tree fold_convert_const (enum tree_code, tree, tree);
94 static enum tree_code invert_tree_comparison (enum tree_code, bool);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static tree combine_comparisons (enum tree_code, enum tree_code,
98 enum tree_code, tree, tree, tree);
99 static int truth_value_p (enum tree_code);
100 static int operand_equal_for_comparison_p (tree, tree, tree);
101 static int twoval_comparison_p (tree, tree *, tree *, int *);
102 static tree eval_subst (tree, tree, tree, tree, tree);
103 static tree pedantic_omit_one_operand (tree, tree, tree);
104 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
105 static tree make_bit_field_ref (tree, tree, int, int, int);
106 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
107 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (tree, int);
111 static tree sign_bit_p (tree, tree);
112 static int simple_operand_p (tree);
113 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
114 static tree make_range (tree, int *, tree *, tree *);
115 static tree build_range_check (tree, tree, int, tree, tree);
116 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
117 tree);
118 static tree fold_range_test (tree);
119 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
120 static tree unextend (tree, int, int, tree);
121 static tree fold_truthop (enum tree_code, tree, tree, tree);
122 static tree optimize_minmax_comparison (tree);
123 static tree extract_muldiv (tree, tree, enum tree_code, tree);
124 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
125 static int multiple_of_p (tree, tree, tree);
126 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
127 tree, int);
128 static bool fold_real_zero_addition_p (tree, tree, int);
129 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (tree, tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_relational_hi_lo (enum tree_code *, const tree,
138 tree *, tree *);
139 static bool tree_expr_nonzero_p (tree);
141 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
142 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
143 and SUM1. Then this yields nonzero if overflow occurred during the
144 addition.
146 Overflow occurs if A and B have the same sign, but A and SUM differ in
147 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
148 sign. */
149 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
151 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
152 We do that by representing the two-word integer in 4 words, with only
153 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
154 number. The value of the word is LOWPART + HIGHPART * BASE. */
156 #define LOWPART(x) \
157 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
158 #define HIGHPART(x) \
159 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
160 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
162 /* Unpack a two-word integer into 4 words.
163 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
164 WORDS points to the array of HOST_WIDE_INTs. */
166 static void
167 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
169 words[0] = LOWPART (low);
170 words[1] = HIGHPART (low);
171 words[2] = LOWPART (hi);
172 words[3] = HIGHPART (hi);
175 /* Pack an array of 4 words into a two-word integer.
176 WORDS points to the array of words.
177 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
179 static void
180 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
181 HOST_WIDE_INT *hi)
183 *low = words[0] + words[1] * BASE;
184 *hi = words[2] + words[3] * BASE;
187 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
188 in overflow of the value, when >0 we are only interested in signed
189 overflow, for <0 we are interested in any overflow. OVERFLOWED
190 indicates whether overflow has already occurred. CONST_OVERFLOWED
191 indicates whether constant overflow has already occurred. We force
192 T's value to be within range of T's type (by setting to 0 or 1 all
193 the bits outside the type's range). We set TREE_OVERFLOWED if,
194 OVERFLOWED is nonzero,
195 or OVERFLOWABLE is >0 and signed overflow occurs
196 or OVERFLOWABLE is <0 and any overflow occurs
197 We set TREE_CONSTANT_OVERFLOWED if,
198 CONST_OVERFLOWED is nonzero
199 or we set TREE_OVERFLOWED.
200 We return either the original T, or a copy. */
202 tree
203 force_fit_type (tree t, int overflowable,
204 bool overflowed, bool overflowed_const)
206 unsigned HOST_WIDE_INT low;
207 HOST_WIDE_INT high;
208 unsigned int prec;
209 int sign_extended_type;
211 gcc_assert (TREE_CODE (t) == INTEGER_CST);
213 low = TREE_INT_CST_LOW (t);
214 high = TREE_INT_CST_HIGH (t);
216 if (POINTER_TYPE_P (TREE_TYPE (t))
217 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
218 prec = POINTER_SIZE;
219 else
220 prec = TYPE_PRECISION (TREE_TYPE (t));
221 /* Size types *are* sign extended. */
222 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
223 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
224 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
226 /* First clear all bits that are beyond the type's precision. */
228 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
230 else if (prec > HOST_BITS_PER_WIDE_INT)
231 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
232 else
234 high = 0;
235 if (prec < HOST_BITS_PER_WIDE_INT)
236 low &= ~((HOST_WIDE_INT) (-1) << prec);
239 if (!sign_extended_type)
240 /* No sign extension */;
241 else if (prec == 2 * HOST_BITS_PER_WIDE_INT)
242 /* Correct width already. */;
243 else if (prec > HOST_BITS_PER_WIDE_INT)
245 /* Sign extend top half? */
246 if (high & ((unsigned HOST_WIDE_INT)1
247 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
248 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
250 else if (prec == HOST_BITS_PER_WIDE_INT)
252 if ((HOST_WIDE_INT)low < 0)
253 high = -1;
255 else
257 /* Sign extend bottom half? */
258 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
260 high = -1;
261 low |= (HOST_WIDE_INT)(-1) << prec;
265 /* If the value changed, return a new node. */
266 if (overflowed || overflowed_const
267 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
269 t = build_int_cst_wide (TREE_TYPE (t), low, high);
271 if (overflowed
272 || overflowable < 0
273 || (overflowable > 0 && sign_extended_type))
275 t = copy_node (t);
276 TREE_OVERFLOW (t) = 1;
277 TREE_CONSTANT_OVERFLOW (t) = 1;
279 else if (overflowed_const)
281 t = copy_node (t);
282 TREE_CONSTANT_OVERFLOW (t) = 1;
286 return t;
289 /* Add two doubleword integers with doubleword result.
290 Each argument is given as two `HOST_WIDE_INT' pieces.
291 One argument is L1 and H1; the other, L2 and H2.
292 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
295 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
296 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
297 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
299 unsigned HOST_WIDE_INT l;
300 HOST_WIDE_INT h;
302 l = l1 + l2;
303 h = h1 + h2 + (l < l1);
305 *lv = l;
306 *hv = h;
307 return OVERFLOW_SUM_SIGN (h1, h2, h);
310 /* Negate a doubleword integer with doubleword result.
311 Return nonzero if the operation overflows, assuming it's signed.
312 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
313 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
316 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
317 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
319 if (l1 == 0)
321 *lv = 0;
322 *hv = - h1;
323 return (*hv & h1) < 0;
325 else
327 *lv = -l1;
328 *hv = ~h1;
329 return 0;
333 /* Multiply two doubleword integers with doubleword result.
334 Return nonzero if the operation overflows, assuming it's signed.
335 Each argument is given as two `HOST_WIDE_INT' pieces.
336 One argument is L1 and H1; the other, L2 and H2.
337 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
340 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
341 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
342 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
344 HOST_WIDE_INT arg1[4];
345 HOST_WIDE_INT arg2[4];
346 HOST_WIDE_INT prod[4 * 2];
347 unsigned HOST_WIDE_INT carry;
348 int i, j, k;
349 unsigned HOST_WIDE_INT toplow, neglow;
350 HOST_WIDE_INT tophigh, neghigh;
352 encode (arg1, l1, h1);
353 encode (arg2, l2, h2);
355 memset (prod, 0, sizeof prod);
357 for (i = 0; i < 4; i++)
359 carry = 0;
360 for (j = 0; j < 4; j++)
362 k = i + j;
363 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
364 carry += arg1[i] * arg2[j];
365 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
366 carry += prod[k];
367 prod[k] = LOWPART (carry);
368 carry = HIGHPART (carry);
370 prod[i + 4] = carry;
373 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
375 /* Check for overflow by calculating the top half of the answer in full;
376 it should agree with the low half's sign bit. */
377 decode (prod + 4, &toplow, &tophigh);
378 if (h1 < 0)
380 neg_double (l2, h2, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 if (h2 < 0)
385 neg_double (l1, h1, &neglow, &neghigh);
386 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
388 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
391 /* Shift the doubleword integer in L1, H1 left by COUNT places
392 keeping only PREC bits of result.
393 Shift right if COUNT is negative.
394 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
395 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
397 void
398 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
399 HOST_WIDE_INT count, unsigned int prec,
400 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
402 unsigned HOST_WIDE_INT signmask;
404 if (count < 0)
406 rshift_double (l1, h1, -count, prec, lv, hv, arith);
407 return;
410 if (SHIFT_COUNT_TRUNCATED)
411 count %= prec;
413 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
415 /* Shifting by the host word size is undefined according to the
416 ANSI standard, so we must handle this as a special case. */
417 *hv = 0;
418 *lv = 0;
420 else if (count >= HOST_BITS_PER_WIDE_INT)
422 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
423 *lv = 0;
425 else
427 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
428 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
429 *lv = l1 << count;
432 /* Sign extend all bits that are beyond the precision. */
434 signmask = -((prec > HOST_BITS_PER_WIDE_INT
435 ? ((unsigned HOST_WIDE_INT) *hv
436 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
437 : (*lv >> (prec - 1))) & 1);
439 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
441 else if (prec >= HOST_BITS_PER_WIDE_INT)
443 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
444 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
446 else
448 *hv = signmask;
449 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
450 *lv |= signmask << prec;
454 /* Shift the doubleword integer in L1, H1 right by COUNT places
455 keeping only PREC bits of result. COUNT must be positive.
456 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
457 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
459 void
460 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
461 HOST_WIDE_INT count, unsigned int prec,
462 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
463 int arith)
465 unsigned HOST_WIDE_INT signmask;
467 signmask = (arith
468 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
469 : 0);
471 if (SHIFT_COUNT_TRUNCATED)
472 count %= prec;
474 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
476 /* Shifting by the host word size is undefined according to the
477 ANSI standard, so we must handle this as a special case. */
478 *hv = 0;
479 *lv = 0;
481 else if (count >= HOST_BITS_PER_WIDE_INT)
483 *hv = 0;
484 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
486 else
488 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
489 *lv = ((l1 >> count)
490 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
493 /* Zero / sign extend all bits that are beyond the precision. */
495 if (count >= (HOST_WIDE_INT)prec)
497 *hv = signmask;
498 *lv = signmask;
500 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
502 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
504 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
505 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
507 else
509 *hv = signmask;
510 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
511 *lv |= signmask << (prec - count);
515 /* Rotate the doubleword integer in L1, H1 left by COUNT places
516 keeping only PREC bits of result.
517 Rotate right if COUNT is negative.
518 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
520 void
521 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
522 HOST_WIDE_INT count, unsigned int prec,
523 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
525 unsigned HOST_WIDE_INT s1l, s2l;
526 HOST_WIDE_INT s1h, s2h;
528 count %= prec;
529 if (count < 0)
530 count += prec;
532 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
533 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
534 *lv = s1l | s2l;
535 *hv = s1h | s2h;
538 /* Rotate the doubleword integer in L1, H1 left by COUNT places
539 keeping only PREC bits of result. COUNT must be positive.
540 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
542 void
543 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
544 HOST_WIDE_INT count, unsigned int prec,
545 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
547 unsigned HOST_WIDE_INT s1l, s2l;
548 HOST_WIDE_INT s1h, s2h;
550 count %= prec;
551 if (count < 0)
552 count += prec;
554 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
555 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
556 *lv = s1l | s2l;
557 *hv = s1h | s2h;
560 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
561 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
562 CODE is a tree code for a kind of division, one of
563 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
564 or EXACT_DIV_EXPR
565 It controls how the quotient is rounded to an integer.
566 Return nonzero if the operation overflows.
567 UNS nonzero says do unsigned division. */
570 div_and_round_double (enum tree_code code, int uns,
571 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
572 HOST_WIDE_INT hnum_orig,
573 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
574 HOST_WIDE_INT hden_orig,
575 unsigned HOST_WIDE_INT *lquo,
576 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
577 HOST_WIDE_INT *hrem)
579 int quo_neg = 0;
580 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
581 HOST_WIDE_INT den[4], quo[4];
582 int i, j;
583 unsigned HOST_WIDE_INT work;
584 unsigned HOST_WIDE_INT carry = 0;
585 unsigned HOST_WIDE_INT lnum = lnum_orig;
586 HOST_WIDE_INT hnum = hnum_orig;
587 unsigned HOST_WIDE_INT lden = lden_orig;
588 HOST_WIDE_INT hden = hden_orig;
589 int overflow = 0;
591 if (hden == 0 && lden == 0)
592 overflow = 1, lden = 1;
594 /* Calculate quotient sign and convert operands to unsigned. */
595 if (!uns)
597 if (hnum < 0)
599 quo_neg = ~ quo_neg;
600 /* (minimum integer) / (-1) is the only overflow case. */
601 if (neg_double (lnum, hnum, &lnum, &hnum)
602 && ((HOST_WIDE_INT) lden & hden) == -1)
603 overflow = 1;
605 if (hden < 0)
607 quo_neg = ~ quo_neg;
608 neg_double (lden, hden, &lden, &hden);
612 if (hnum == 0 && hden == 0)
613 { /* single precision */
614 *hquo = *hrem = 0;
615 /* This unsigned division rounds toward zero. */
616 *lquo = lnum / lden;
617 goto finish_up;
620 if (hnum == 0)
621 { /* trivial case: dividend < divisor */
622 /* hden != 0 already checked. */
623 *hquo = *lquo = 0;
624 *hrem = hnum;
625 *lrem = lnum;
626 goto finish_up;
629 memset (quo, 0, sizeof quo);
631 memset (num, 0, sizeof num); /* to zero 9th element */
632 memset (den, 0, sizeof den);
634 encode (num, lnum, hnum);
635 encode (den, lden, hden);
637 /* Special code for when the divisor < BASE. */
638 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
640 /* hnum != 0 already checked. */
641 for (i = 4 - 1; i >= 0; i--)
643 work = num[i] + carry * BASE;
644 quo[i] = work / lden;
645 carry = work % lden;
648 else
650 /* Full double precision division,
651 with thanks to Don Knuth's "Seminumerical Algorithms". */
652 int num_hi_sig, den_hi_sig;
653 unsigned HOST_WIDE_INT quo_est, scale;
655 /* Find the highest nonzero divisor digit. */
656 for (i = 4 - 1;; i--)
657 if (den[i] != 0)
659 den_hi_sig = i;
660 break;
663 /* Insure that the first digit of the divisor is at least BASE/2.
664 This is required by the quotient digit estimation algorithm. */
666 scale = BASE / (den[den_hi_sig] + 1);
667 if (scale > 1)
668 { /* scale divisor and dividend */
669 carry = 0;
670 for (i = 0; i <= 4 - 1; i++)
672 work = (num[i] * scale) + carry;
673 num[i] = LOWPART (work);
674 carry = HIGHPART (work);
677 num[4] = carry;
678 carry = 0;
679 for (i = 0; i <= 4 - 1; i++)
681 work = (den[i] * scale) + carry;
682 den[i] = LOWPART (work);
683 carry = HIGHPART (work);
684 if (den[i] != 0) den_hi_sig = i;
688 num_hi_sig = 4;
690 /* Main loop */
691 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
693 /* Guess the next quotient digit, quo_est, by dividing the first
694 two remaining dividend digits by the high order quotient digit.
695 quo_est is never low and is at most 2 high. */
696 unsigned HOST_WIDE_INT tmp;
698 num_hi_sig = i + den_hi_sig + 1;
699 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
700 if (num[num_hi_sig] != den[den_hi_sig])
701 quo_est = work / den[den_hi_sig];
702 else
703 quo_est = BASE - 1;
705 /* Refine quo_est so it's usually correct, and at most one high. */
706 tmp = work - quo_est * den[den_hi_sig];
707 if (tmp < BASE
708 && (den[den_hi_sig - 1] * quo_est
709 > (tmp * BASE + num[num_hi_sig - 2])))
710 quo_est--;
712 /* Try QUO_EST as the quotient digit, by multiplying the
713 divisor by QUO_EST and subtracting from the remaining dividend.
714 Keep in mind that QUO_EST is the I - 1st digit. */
716 carry = 0;
717 for (j = 0; j <= den_hi_sig; j++)
719 work = quo_est * den[j] + carry;
720 carry = HIGHPART (work);
721 work = num[i + j] - LOWPART (work);
722 num[i + j] = LOWPART (work);
723 carry += HIGHPART (work) != 0;
726 /* If quo_est was high by one, then num[i] went negative and
727 we need to correct things. */
728 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
730 quo_est--;
731 carry = 0; /* add divisor back in */
732 for (j = 0; j <= den_hi_sig; j++)
734 work = num[i + j] + den[j] + carry;
735 carry = HIGHPART (work);
736 num[i + j] = LOWPART (work);
739 num [num_hi_sig] += carry;
742 /* Store the quotient digit. */
743 quo[i] = quo_est;
747 decode (quo, lquo, hquo);
749 finish_up:
750 /* If result is negative, make it so. */
751 if (quo_neg)
752 neg_double (*lquo, *hquo, lquo, hquo);
754 /* Compute trial remainder: rem = num - (quo * den) */
755 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
756 neg_double (*lrem, *hrem, lrem, hrem);
757 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
759 switch (code)
761 case TRUNC_DIV_EXPR:
762 case TRUNC_MOD_EXPR: /* round toward zero */
763 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
764 return overflow;
766 case FLOOR_DIV_EXPR:
767 case FLOOR_MOD_EXPR: /* round toward negative infinity */
768 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
770 /* quo = quo - 1; */
771 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
772 lquo, hquo);
774 else
775 return overflow;
776 break;
778 case CEIL_DIV_EXPR:
779 case CEIL_MOD_EXPR: /* round toward positive infinity */
780 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
782 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
783 lquo, hquo);
785 else
786 return overflow;
787 break;
789 case ROUND_DIV_EXPR:
790 case ROUND_MOD_EXPR: /* round to closest integer */
792 unsigned HOST_WIDE_INT labs_rem = *lrem;
793 HOST_WIDE_INT habs_rem = *hrem;
794 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
795 HOST_WIDE_INT habs_den = hden, htwice;
797 /* Get absolute values. */
798 if (*hrem < 0)
799 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
800 if (hden < 0)
801 neg_double (lden, hden, &labs_den, &habs_den);
803 /* If (2 * abs (lrem) >= abs (lden)) */
804 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
805 labs_rem, habs_rem, &ltwice, &htwice);
807 if (((unsigned HOST_WIDE_INT) habs_den
808 < (unsigned HOST_WIDE_INT) htwice)
809 || (((unsigned HOST_WIDE_INT) habs_den
810 == (unsigned HOST_WIDE_INT) htwice)
811 && (labs_den < ltwice)))
813 if (*hquo < 0)
814 /* quo = quo - 1; */
815 add_double (*lquo, *hquo,
816 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
817 else
818 /* quo = quo + 1; */
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
820 lquo, hquo);
822 else
823 return overflow;
825 break;
827 default:
828 gcc_unreachable ();
831 /* Compute true remainder: rem = num - (quo * den) */
832 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
833 neg_double (*lrem, *hrem, lrem, hrem);
834 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
835 return overflow;
838 /* Return true if built-in mathematical function specified by CODE
839 preserves the sign of it argument, i.e. -f(x) == f(-x). */
841 static bool
842 negate_mathfn_p (enum built_in_function code)
844 switch (code)
846 case BUILT_IN_ASIN:
847 case BUILT_IN_ASINF:
848 case BUILT_IN_ASINL:
849 case BUILT_IN_ATAN:
850 case BUILT_IN_ATANF:
851 case BUILT_IN_ATANL:
852 case BUILT_IN_SIN:
853 case BUILT_IN_SINF:
854 case BUILT_IN_SINL:
855 case BUILT_IN_TAN:
856 case BUILT_IN_TANF:
857 case BUILT_IN_TANL:
858 return true;
860 default:
861 break;
863 return false;
866 /* Check whether we may negate an integer constant T without causing
867 overflow. */
869 bool
870 may_negate_without_overflow_p (tree t)
872 unsigned HOST_WIDE_INT val;
873 unsigned int prec;
874 tree type;
876 gcc_assert (TREE_CODE (t) == INTEGER_CST);
878 type = TREE_TYPE (t);
879 if (TYPE_UNSIGNED (type))
880 return false;
882 prec = TYPE_PRECISION (type);
883 if (prec > HOST_BITS_PER_WIDE_INT)
885 if (TREE_INT_CST_LOW (t) != 0)
886 return true;
887 prec -= HOST_BITS_PER_WIDE_INT;
888 val = TREE_INT_CST_HIGH (t);
890 else
891 val = TREE_INT_CST_LOW (t);
892 if (prec < HOST_BITS_PER_WIDE_INT)
893 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
894 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
897 /* Determine whether an expression T can be cheaply negated using
898 the function negate_expr. */
900 static bool
901 negate_expr_p (tree t)
903 tree type;
905 if (t == 0)
906 return false;
908 type = TREE_TYPE (t);
910 STRIP_SIGN_NOPS (t);
911 switch (TREE_CODE (t))
913 case INTEGER_CST:
914 if (TYPE_UNSIGNED (type) || ! flag_trapv)
915 return true;
917 /* Check that -CST will not overflow type. */
918 return may_negate_without_overflow_p (t);
920 case REAL_CST:
921 case NEGATE_EXPR:
922 return true;
924 case COMPLEX_CST:
925 return negate_expr_p (TREE_REALPART (t))
926 && negate_expr_p (TREE_IMAGPART (t));
928 case PLUS_EXPR:
929 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
930 return false;
931 /* -(A + B) -> (-B) - A. */
932 if (negate_expr_p (TREE_OPERAND (t, 1))
933 && reorder_operands_p (TREE_OPERAND (t, 0),
934 TREE_OPERAND (t, 1)))
935 return true;
936 /* -(A + B) -> (-A) - B. */
937 return negate_expr_p (TREE_OPERAND (t, 0));
939 case MINUS_EXPR:
940 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
941 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
942 && reorder_operands_p (TREE_OPERAND (t, 0),
943 TREE_OPERAND (t, 1));
945 case MULT_EXPR:
946 if (TYPE_UNSIGNED (TREE_TYPE (t)))
947 break;
949 /* Fall through. */
951 case RDIV_EXPR:
952 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
953 return negate_expr_p (TREE_OPERAND (t, 1))
954 || negate_expr_p (TREE_OPERAND (t, 0));
955 break;
957 case NOP_EXPR:
958 /* Negate -((double)float) as (double)(-float). */
959 if (TREE_CODE (type) == REAL_TYPE)
961 tree tem = strip_float_extensions (t);
962 if (tem != t)
963 return negate_expr_p (tem);
965 break;
967 case CALL_EXPR:
968 /* Negate -f(x) as f(-x). */
969 if (negate_mathfn_p (builtin_mathfn_code (t)))
970 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
971 break;
973 case RSHIFT_EXPR:
974 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
975 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
977 tree op1 = TREE_OPERAND (t, 1);
978 if (TREE_INT_CST_HIGH (op1) == 0
979 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
980 == TREE_INT_CST_LOW (op1))
981 return true;
983 break;
985 default:
986 break;
988 return false;
991 /* Given T, an expression, return the negation of T. Allow for T to be
992 null, in which case return null. */
994 static tree
995 negate_expr (tree t)
997 tree type;
998 tree tem;
1000 if (t == 0)
1001 return 0;
1003 type = TREE_TYPE (t);
1004 STRIP_SIGN_NOPS (t);
1006 switch (TREE_CODE (t))
1008 case INTEGER_CST:
1009 tem = fold_negate_const (t, type);
1010 if (! TREE_OVERFLOW (tem)
1011 || TYPE_UNSIGNED (type)
1012 || ! flag_trapv)
1013 return tem;
1014 break;
1016 case REAL_CST:
1017 tem = fold_negate_const (t, type);
1018 /* Two's complement FP formats, such as c4x, may overflow. */
1019 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1020 return fold_convert (type, tem);
1021 break;
1023 case COMPLEX_CST:
1025 tree rpart = negate_expr (TREE_REALPART (t));
1026 tree ipart = negate_expr (TREE_IMAGPART (t));
1028 if ((TREE_CODE (rpart) == REAL_CST
1029 && TREE_CODE (ipart) == REAL_CST)
1030 || (TREE_CODE (rpart) == INTEGER_CST
1031 && TREE_CODE (ipart) == INTEGER_CST))
1032 return build_complex (type, rpart, ipart);
1034 break;
1036 case NEGATE_EXPR:
1037 return fold_convert (type, TREE_OPERAND (t, 0));
1039 case PLUS_EXPR:
1040 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1042 /* -(A + B) -> (-B) - A. */
1043 if (negate_expr_p (TREE_OPERAND (t, 1))
1044 && reorder_operands_p (TREE_OPERAND (t, 0),
1045 TREE_OPERAND (t, 1)))
1047 tem = negate_expr (TREE_OPERAND (t, 1));
1048 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1049 tem, TREE_OPERAND (t, 0)));
1050 return fold_convert (type, tem);
1053 /* -(A + B) -> (-A) - B. */
1054 if (negate_expr_p (TREE_OPERAND (t, 0)))
1056 tem = negate_expr (TREE_OPERAND (t, 0));
1057 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1058 tem, TREE_OPERAND (t, 1)));
1059 return fold_convert (type, tem);
1062 break;
1064 case MINUS_EXPR:
1065 /* - (A - B) -> B - A */
1066 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1067 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1068 return fold_convert (type,
1069 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1070 TREE_OPERAND (t, 1),
1071 TREE_OPERAND (t, 0))));
1072 break;
1074 case MULT_EXPR:
1075 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1076 break;
1078 /* Fall through. */
1080 case RDIV_EXPR:
1081 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1083 tem = TREE_OPERAND (t, 1);
1084 if (negate_expr_p (tem))
1085 return fold_convert (type,
1086 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1087 TREE_OPERAND (t, 0),
1088 negate_expr (tem))));
1089 tem = TREE_OPERAND (t, 0);
1090 if (negate_expr_p (tem))
1091 return fold_convert (type,
1092 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1093 negate_expr (tem),
1094 TREE_OPERAND (t, 1))));
1096 break;
1098 case NOP_EXPR:
1099 /* Convert -((double)float) into (double)(-float). */
1100 if (TREE_CODE (type) == REAL_TYPE)
1102 tem = strip_float_extensions (t);
1103 if (tem != t && negate_expr_p (tem))
1104 return fold_convert (type, negate_expr (tem));
1106 break;
1108 case CALL_EXPR:
1109 /* Negate -f(x) as f(-x). */
1110 if (negate_mathfn_p (builtin_mathfn_code (t))
1111 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1113 tree fndecl, arg, arglist;
1115 fndecl = get_callee_fndecl (t);
1116 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1117 arglist = build_tree_list (NULL_TREE, arg);
1118 return build_function_call_expr (fndecl, arglist);
1120 break;
1122 case RSHIFT_EXPR:
1123 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1124 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1126 tree op1 = TREE_OPERAND (t, 1);
1127 if (TREE_INT_CST_HIGH (op1) == 0
1128 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1129 == TREE_INT_CST_LOW (op1))
1131 tree ntype = TYPE_UNSIGNED (type)
1132 ? lang_hooks.types.signed_type (type)
1133 : lang_hooks.types.unsigned_type (type);
1134 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1135 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1136 return fold_convert (type, temp);
1139 break;
1141 default:
1142 break;
1145 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1146 return fold_convert (type, tem);
1149 /* Split a tree IN into a constant, literal and variable parts that could be
1150 combined with CODE to make IN. "constant" means an expression with
1151 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1152 commutative arithmetic operation. Store the constant part into *CONP,
1153 the literal in *LITP and return the variable part. If a part isn't
1154 present, set it to null. If the tree does not decompose in this way,
1155 return the entire tree as the variable part and the other parts as null.
1157 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1158 case, we negate an operand that was subtracted. Except if it is a
1159 literal for which we use *MINUS_LITP instead.
1161 If NEGATE_P is true, we are negating all of IN, again except a literal
1162 for which we use *MINUS_LITP instead.
1164 If IN is itself a literal or constant, return it as appropriate.
1166 Note that we do not guarantee that any of the three values will be the
1167 same type as IN, but they will have the same signedness and mode. */
1169 static tree
1170 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1171 tree *minus_litp, int negate_p)
1173 tree var = 0;
1175 *conp = 0;
1176 *litp = 0;
1177 *minus_litp = 0;
1179 /* Strip any conversions that don't change the machine mode or signedness. */
1180 STRIP_SIGN_NOPS (in);
1182 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1183 *litp = in;
1184 else if (TREE_CODE (in) == code
1185 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1186 /* We can associate addition and subtraction together (even
1187 though the C standard doesn't say so) for integers because
1188 the value is not affected. For reals, the value might be
1189 affected, so we can't. */
1190 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1191 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1193 tree op0 = TREE_OPERAND (in, 0);
1194 tree op1 = TREE_OPERAND (in, 1);
1195 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1196 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1198 /* First see if either of the operands is a literal, then a constant. */
1199 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1200 *litp = op0, op0 = 0;
1201 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1202 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1204 if (op0 != 0 && TREE_CONSTANT (op0))
1205 *conp = op0, op0 = 0;
1206 else if (op1 != 0 && TREE_CONSTANT (op1))
1207 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1209 /* If we haven't dealt with either operand, this is not a case we can
1210 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1211 if (op0 != 0 && op1 != 0)
1212 var = in;
1213 else if (op0 != 0)
1214 var = op0;
1215 else
1216 var = op1, neg_var_p = neg1_p;
1218 /* Now do any needed negations. */
1219 if (neg_litp_p)
1220 *minus_litp = *litp, *litp = 0;
1221 if (neg_conp_p)
1222 *conp = negate_expr (*conp);
1223 if (neg_var_p)
1224 var = negate_expr (var);
1226 else if (TREE_CONSTANT (in))
1227 *conp = in;
1228 else
1229 var = in;
1231 if (negate_p)
1233 if (*litp)
1234 *minus_litp = *litp, *litp = 0;
1235 else if (*minus_litp)
1236 *litp = *minus_litp, *minus_litp = 0;
1237 *conp = negate_expr (*conp);
1238 var = negate_expr (var);
1241 return var;
1244 /* Re-associate trees split by the above function. T1 and T2 are either
1245 expressions to associate or null. Return the new expression, if any. If
1246 we build an operation, do it in TYPE and with CODE. */
1248 static tree
1249 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1251 if (t1 == 0)
1252 return t2;
1253 else if (t2 == 0)
1254 return t1;
1256 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1257 try to fold this since we will have infinite recursion. But do
1258 deal with any NEGATE_EXPRs. */
1259 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1260 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1262 if (code == PLUS_EXPR)
1264 if (TREE_CODE (t1) == NEGATE_EXPR)
1265 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1266 fold_convert (type, TREE_OPERAND (t1, 0)));
1267 else if (TREE_CODE (t2) == NEGATE_EXPR)
1268 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1269 fold_convert (type, TREE_OPERAND (t2, 0)));
1271 return build2 (code, type, fold_convert (type, t1),
1272 fold_convert (type, t2));
1275 return fold (build2 (code, type, fold_convert (type, t1),
1276 fold_convert (type, t2)));
1279 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1280 to produce a new constant.
1282 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1284 tree
1285 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1287 unsigned HOST_WIDE_INT int1l, int2l;
1288 HOST_WIDE_INT int1h, int2h;
1289 unsigned HOST_WIDE_INT low;
1290 HOST_WIDE_INT hi;
1291 unsigned HOST_WIDE_INT garbagel;
1292 HOST_WIDE_INT garbageh;
1293 tree t;
1294 tree type = TREE_TYPE (arg1);
1295 int uns = TYPE_UNSIGNED (type);
1296 int is_sizetype
1297 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1298 int overflow = 0;
1299 int no_overflow = 0;
1301 int1l = TREE_INT_CST_LOW (arg1);
1302 int1h = TREE_INT_CST_HIGH (arg1);
1303 int2l = TREE_INT_CST_LOW (arg2);
1304 int2h = TREE_INT_CST_HIGH (arg2);
1306 switch (code)
1308 case BIT_IOR_EXPR:
1309 low = int1l | int2l, hi = int1h | int2h;
1310 break;
1312 case BIT_XOR_EXPR:
1313 low = int1l ^ int2l, hi = int1h ^ int2h;
1314 break;
1316 case BIT_AND_EXPR:
1317 low = int1l & int2l, hi = int1h & int2h;
1318 break;
1320 case RSHIFT_EXPR:
1321 int2l = -int2l;
1322 case LSHIFT_EXPR:
1323 /* It's unclear from the C standard whether shifts can overflow.
1324 The following code ignores overflow; perhaps a C standard
1325 interpretation ruling is needed. */
1326 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1327 &low, &hi, !uns);
1328 no_overflow = 1;
1329 break;
1331 case RROTATE_EXPR:
1332 int2l = - int2l;
1333 case LROTATE_EXPR:
1334 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1335 &low, &hi);
1336 break;
1338 case PLUS_EXPR:
1339 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1340 break;
1342 case MINUS_EXPR:
1343 neg_double (int2l, int2h, &low, &hi);
1344 add_double (int1l, int1h, low, hi, &low, &hi);
1345 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1346 break;
1348 case MULT_EXPR:
1349 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1350 break;
1352 case TRUNC_DIV_EXPR:
1353 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1354 case EXACT_DIV_EXPR:
1355 /* This is a shortcut for a common special case. */
1356 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1357 && ! TREE_CONSTANT_OVERFLOW (arg1)
1358 && ! TREE_CONSTANT_OVERFLOW (arg2)
1359 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1361 if (code == CEIL_DIV_EXPR)
1362 int1l += int2l - 1;
1364 low = int1l / int2l, hi = 0;
1365 break;
1368 /* ... fall through ... */
1370 case ROUND_DIV_EXPR:
1371 if (int2h == 0 && int2l == 1)
1373 low = int1l, hi = int1h;
1374 break;
1376 if (int1l == int2l && int1h == int2h
1377 && ! (int1l == 0 && int1h == 0))
1379 low = 1, hi = 0;
1380 break;
1382 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1383 &low, &hi, &garbagel, &garbageh);
1384 break;
1386 case TRUNC_MOD_EXPR:
1387 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1388 /* This is a shortcut for a common special case. */
1389 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1390 && ! TREE_CONSTANT_OVERFLOW (arg1)
1391 && ! TREE_CONSTANT_OVERFLOW (arg2)
1392 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1394 if (code == CEIL_MOD_EXPR)
1395 int1l += int2l - 1;
1396 low = int1l % int2l, hi = 0;
1397 break;
1400 /* ... fall through ... */
1402 case ROUND_MOD_EXPR:
1403 overflow = div_and_round_double (code, uns,
1404 int1l, int1h, int2l, int2h,
1405 &garbagel, &garbageh, &low, &hi);
1406 break;
1408 case MIN_EXPR:
1409 case MAX_EXPR:
1410 if (uns)
1411 low = (((unsigned HOST_WIDE_INT) int1h
1412 < (unsigned HOST_WIDE_INT) int2h)
1413 || (((unsigned HOST_WIDE_INT) int1h
1414 == (unsigned HOST_WIDE_INT) int2h)
1415 && int1l < int2l));
1416 else
1417 low = (int1h < int2h
1418 || (int1h == int2h && int1l < int2l));
1420 if (low == (code == MIN_EXPR))
1421 low = int1l, hi = int1h;
1422 else
1423 low = int2l, hi = int2h;
1424 break;
1426 default:
1427 gcc_unreachable ();
1430 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1432 if (notrunc)
1434 /* Propagate overflow flags ourselves. */
1435 if (((!uns || is_sizetype) && overflow)
1436 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1438 t = copy_node (t);
1439 TREE_OVERFLOW (t) = 1;
1440 TREE_CONSTANT_OVERFLOW (t) = 1;
1442 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1444 t = copy_node (t);
1445 TREE_CONSTANT_OVERFLOW (t) = 1;
1448 else
1449 t = force_fit_type (t, 1,
1450 ((!uns || is_sizetype) && overflow)
1451 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1452 TREE_CONSTANT_OVERFLOW (arg1)
1453 | TREE_CONSTANT_OVERFLOW (arg2));
1455 return t;
1458 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1459 constant. We assume ARG1 and ARG2 have the same data type, or at least
1460 are the same kind of constant and the same machine mode.
1462 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1464 static tree
1465 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1467 STRIP_NOPS (arg1);
1468 STRIP_NOPS (arg2);
1470 if (TREE_CODE (arg1) == INTEGER_CST)
1471 return int_const_binop (code, arg1, arg2, notrunc);
1473 if (TREE_CODE (arg1) == REAL_CST)
1475 enum machine_mode mode;
1476 REAL_VALUE_TYPE d1;
1477 REAL_VALUE_TYPE d2;
1478 REAL_VALUE_TYPE value;
1479 tree t, type;
1481 d1 = TREE_REAL_CST (arg1);
1482 d2 = TREE_REAL_CST (arg2);
1484 type = TREE_TYPE (arg1);
1485 mode = TYPE_MODE (type);
1487 /* Don't perform operation if we honor signaling NaNs and
1488 either operand is a NaN. */
1489 if (HONOR_SNANS (mode)
1490 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1491 return NULL_TREE;
1493 /* Don't perform operation if it would raise a division
1494 by zero exception. */
1495 if (code == RDIV_EXPR
1496 && REAL_VALUES_EQUAL (d2, dconst0)
1497 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1498 return NULL_TREE;
1500 /* If either operand is a NaN, just return it. Otherwise, set up
1501 for floating-point trap; we return an overflow. */
1502 if (REAL_VALUE_ISNAN (d1))
1503 return arg1;
1504 else if (REAL_VALUE_ISNAN (d2))
1505 return arg2;
1507 REAL_ARITHMETIC (value, code, d1, d2);
1509 t = build_real (type, real_value_truncate (mode, value));
1511 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1512 TREE_CONSTANT_OVERFLOW (t)
1513 = TREE_OVERFLOW (t)
1514 | TREE_CONSTANT_OVERFLOW (arg1)
1515 | TREE_CONSTANT_OVERFLOW (arg2);
1516 return t;
1518 if (TREE_CODE (arg1) == COMPLEX_CST)
1520 tree type = TREE_TYPE (arg1);
1521 tree r1 = TREE_REALPART (arg1);
1522 tree i1 = TREE_IMAGPART (arg1);
1523 tree r2 = TREE_REALPART (arg2);
1524 tree i2 = TREE_IMAGPART (arg2);
1525 tree t;
1527 switch (code)
1529 case PLUS_EXPR:
1530 t = build_complex (type,
1531 const_binop (PLUS_EXPR, r1, r2, notrunc),
1532 const_binop (PLUS_EXPR, i1, i2, notrunc));
1533 break;
1535 case MINUS_EXPR:
1536 t = build_complex (type,
1537 const_binop (MINUS_EXPR, r1, r2, notrunc),
1538 const_binop (MINUS_EXPR, i1, i2, notrunc));
1539 break;
1541 case MULT_EXPR:
1542 t = build_complex (type,
1543 const_binop (MINUS_EXPR,
1544 const_binop (MULT_EXPR,
1545 r1, r2, notrunc),
1546 const_binop (MULT_EXPR,
1547 i1, i2, notrunc),
1548 notrunc),
1549 const_binop (PLUS_EXPR,
1550 const_binop (MULT_EXPR,
1551 r1, i2, notrunc),
1552 const_binop (MULT_EXPR,
1553 i1, r2, notrunc),
1554 notrunc));
1555 break;
1557 case RDIV_EXPR:
1559 tree magsquared
1560 = const_binop (PLUS_EXPR,
1561 const_binop (MULT_EXPR, r2, r2, notrunc),
1562 const_binop (MULT_EXPR, i2, i2, notrunc),
1563 notrunc);
1565 t = build_complex (type,
1566 const_binop
1567 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1568 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1569 const_binop (PLUS_EXPR,
1570 const_binop (MULT_EXPR, r1, r2,
1571 notrunc),
1572 const_binop (MULT_EXPR, i1, i2,
1573 notrunc),
1574 notrunc),
1575 magsquared, notrunc),
1576 const_binop
1577 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1578 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1579 const_binop (MINUS_EXPR,
1580 const_binop (MULT_EXPR, i1, r2,
1581 notrunc),
1582 const_binop (MULT_EXPR, r1, i2,
1583 notrunc),
1584 notrunc),
1585 magsquared, notrunc));
1587 break;
1589 default:
1590 gcc_unreachable ();
1592 return t;
1594 return 0;
1597 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1598 indicates which particular sizetype to create. */
1600 tree
1601 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1603 return build_int_cst (sizetype_tab[(int) kind], number);
1606 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1607 is a tree code. The type of the result is taken from the operands.
1608 Both must be the same type integer type and it must be a size type.
1609 If the operands are constant, so is the result. */
1611 tree
1612 size_binop (enum tree_code code, tree arg0, tree arg1)
1614 tree type = TREE_TYPE (arg0);
1616 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1617 && type == TREE_TYPE (arg1));
1619 /* Handle the special case of two integer constants faster. */
1620 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1622 /* And some specific cases even faster than that. */
1623 if (code == PLUS_EXPR && integer_zerop (arg0))
1624 return arg1;
1625 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1626 && integer_zerop (arg1))
1627 return arg0;
1628 else if (code == MULT_EXPR && integer_onep (arg0))
1629 return arg1;
1631 /* Handle general case of two integer constants. */
1632 return int_const_binop (code, arg0, arg1, 0);
1635 if (arg0 == error_mark_node || arg1 == error_mark_node)
1636 return error_mark_node;
1638 return fold (build2 (code, type, arg0, arg1));
1641 /* Given two values, either both of sizetype or both of bitsizetype,
1642 compute the difference between the two values. Return the value
1643 in signed type corresponding to the type of the operands. */
1645 tree
1646 size_diffop (tree arg0, tree arg1)
1648 tree type = TREE_TYPE (arg0);
1649 tree ctype;
1651 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1652 && type == TREE_TYPE (arg1));
1654 /* If the type is already signed, just do the simple thing. */
1655 if (!TYPE_UNSIGNED (type))
1656 return size_binop (MINUS_EXPR, arg0, arg1);
1658 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1660 /* If either operand is not a constant, do the conversions to the signed
1661 type and subtract. The hardware will do the right thing with any
1662 overflow in the subtraction. */
1663 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1664 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1665 fold_convert (ctype, arg1));
1667 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1668 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1669 overflow) and negate (which can't either). Special-case a result
1670 of zero while we're here. */
1671 if (tree_int_cst_equal (arg0, arg1))
1672 return fold_convert (ctype, integer_zero_node);
1673 else if (tree_int_cst_lt (arg1, arg0))
1674 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1675 else
1676 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1677 fold_convert (ctype, size_binop (MINUS_EXPR,
1678 arg1, arg0)));
1681 /* Construct a vector of zero elements of vector type TYPE. */
1683 static tree
1684 build_zero_vector (tree type)
1686 tree elem, list;
1687 int i, units;
1689 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1690 units = TYPE_VECTOR_SUBPARTS (type);
1692 list = NULL_TREE;
1693 for (i = 0; i < units; i++)
1694 list = tree_cons (NULL_TREE, elem, list);
1695 return build_vector (type, list);
1699 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1700 type TYPE. If no simplification can be done return NULL_TREE. */
1702 static tree
1703 fold_convert_const (enum tree_code code, tree type, tree arg1)
1705 int overflow = 0;
1706 tree t;
1708 if (TREE_TYPE (arg1) == type)
1709 return arg1;
1711 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1713 if (TREE_CODE (arg1) == INTEGER_CST)
1715 /* If we would build a constant wider than GCC supports,
1716 leave the conversion unfolded. */
1717 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1718 return NULL_TREE;
1720 /* Given an integer constant, make new constant with new type,
1721 appropriately sign-extended or truncated. */
1722 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1723 TREE_INT_CST_HIGH (arg1));
1725 t = force_fit_type (t,
1726 /* Don't set the overflow when
1727 converting a pointer */
1728 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1729 (TREE_INT_CST_HIGH (arg1) < 0
1730 && (TYPE_UNSIGNED (type)
1731 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1732 | TREE_OVERFLOW (arg1),
1733 TREE_CONSTANT_OVERFLOW (arg1));
1734 return t;
1736 else if (TREE_CODE (arg1) == REAL_CST)
1738 /* The following code implements the floating point to integer
1739 conversion rules required by the Java Language Specification,
1740 that IEEE NaNs are mapped to zero and values that overflow
1741 the target precision saturate, i.e. values greater than
1742 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1743 are mapped to INT_MIN. These semantics are allowed by the
1744 C and C++ standards that simply state that the behavior of
1745 FP-to-integer conversion is unspecified upon overflow. */
1747 HOST_WIDE_INT high, low;
1748 REAL_VALUE_TYPE r;
1749 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1751 switch (code)
1753 case FIX_TRUNC_EXPR:
1754 real_trunc (&r, VOIDmode, &x);
1755 break;
1757 case FIX_CEIL_EXPR:
1758 real_ceil (&r, VOIDmode, &x);
1759 break;
1761 case FIX_FLOOR_EXPR:
1762 real_floor (&r, VOIDmode, &x);
1763 break;
1765 case FIX_ROUND_EXPR:
1766 real_round (&r, VOIDmode, &x);
1767 break;
1769 default:
1770 gcc_unreachable ();
1773 /* If R is NaN, return zero and show we have an overflow. */
1774 if (REAL_VALUE_ISNAN (r))
1776 overflow = 1;
1777 high = 0;
1778 low = 0;
1781 /* See if R is less than the lower bound or greater than the
1782 upper bound. */
1784 if (! overflow)
1786 tree lt = TYPE_MIN_VALUE (type);
1787 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1788 if (REAL_VALUES_LESS (r, l))
1790 overflow = 1;
1791 high = TREE_INT_CST_HIGH (lt);
1792 low = TREE_INT_CST_LOW (lt);
1796 if (! overflow)
1798 tree ut = TYPE_MAX_VALUE (type);
1799 if (ut)
1801 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1802 if (REAL_VALUES_LESS (u, r))
1804 overflow = 1;
1805 high = TREE_INT_CST_HIGH (ut);
1806 low = TREE_INT_CST_LOW (ut);
1811 if (! overflow)
1812 REAL_VALUE_TO_INT (&low, &high, r);
1814 t = build_int_cst_wide (type, low, high);
1816 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1817 TREE_CONSTANT_OVERFLOW (arg1));
1818 return t;
1821 else if (TREE_CODE (type) == REAL_TYPE)
1823 if (TREE_CODE (arg1) == INTEGER_CST)
1824 return build_real_from_int_cst (type, arg1);
1825 if (TREE_CODE (arg1) == REAL_CST)
1827 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1829 /* We make a copy of ARG1 so that we don't modify an
1830 existing constant tree. */
1831 t = copy_node (arg1);
1832 TREE_TYPE (t) = type;
1833 return t;
1836 t = build_real (type,
1837 real_value_truncate (TYPE_MODE (type),
1838 TREE_REAL_CST (arg1)));
1840 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1841 TREE_CONSTANT_OVERFLOW (t)
1842 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1843 return t;
1846 return NULL_TREE;
1849 /* Convert expression ARG to type TYPE. Used by the middle-end for
1850 simple conversions in preference to calling the front-end's convert. */
1852 tree
1853 fold_convert (tree type, tree arg)
1855 tree orig = TREE_TYPE (arg);
1856 tree tem;
1858 if (type == orig)
1859 return arg;
1861 if (TREE_CODE (arg) == ERROR_MARK
1862 || TREE_CODE (type) == ERROR_MARK
1863 || TREE_CODE (orig) == ERROR_MARK)
1864 return error_mark_node;
1866 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1867 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1868 TYPE_MAIN_VARIANT (orig)))
1869 return fold (build1 (NOP_EXPR, type, arg));
1871 switch (TREE_CODE (type))
1873 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1874 case POINTER_TYPE: case REFERENCE_TYPE:
1875 case OFFSET_TYPE:
1876 if (TREE_CODE (arg) == INTEGER_CST)
1878 tem = fold_convert_const (NOP_EXPR, type, arg);
1879 if (tem != NULL_TREE)
1880 return tem;
1882 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1883 || TREE_CODE (orig) == OFFSET_TYPE)
1884 return fold (build1 (NOP_EXPR, type, arg));
1885 if (TREE_CODE (orig) == COMPLEX_TYPE)
1887 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1888 return fold_convert (type, tem);
1890 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1891 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1892 return fold (build1 (NOP_EXPR, type, arg));
1894 case REAL_TYPE:
1895 if (TREE_CODE (arg) == INTEGER_CST)
1897 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1898 if (tem != NULL_TREE)
1899 return tem;
1901 else if (TREE_CODE (arg) == REAL_CST)
1903 tem = fold_convert_const (NOP_EXPR, type, arg);
1904 if (tem != NULL_TREE)
1905 return tem;
1908 switch (TREE_CODE (orig))
1910 case INTEGER_TYPE: case CHAR_TYPE:
1911 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1912 case POINTER_TYPE: case REFERENCE_TYPE:
1913 return fold (build1 (FLOAT_EXPR, type, arg));
1915 case REAL_TYPE:
1916 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1917 type, arg));
1919 case COMPLEX_TYPE:
1920 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1921 return fold_convert (type, tem);
1923 default:
1924 gcc_unreachable ();
1927 case COMPLEX_TYPE:
1928 switch (TREE_CODE (orig))
1930 case INTEGER_TYPE: case CHAR_TYPE:
1931 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1932 case POINTER_TYPE: case REFERENCE_TYPE:
1933 case REAL_TYPE:
1934 return build2 (COMPLEX_EXPR, type,
1935 fold_convert (TREE_TYPE (type), arg),
1936 fold_convert (TREE_TYPE (type), integer_zero_node));
1937 case COMPLEX_TYPE:
1939 tree rpart, ipart;
1941 if (TREE_CODE (arg) == COMPLEX_EXPR)
1943 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1944 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1945 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1948 arg = save_expr (arg);
1949 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1950 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1951 rpart = fold_convert (TREE_TYPE (type), rpart);
1952 ipart = fold_convert (TREE_TYPE (type), ipart);
1953 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1956 default:
1957 gcc_unreachable ();
1960 case VECTOR_TYPE:
1961 if (integer_zerop (arg))
1962 return build_zero_vector (type);
1963 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1964 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1965 || TREE_CODE (orig) == VECTOR_TYPE);
1966 return fold (build1 (NOP_EXPR, type, arg));
1968 case VOID_TYPE:
1969 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
1971 default:
1972 gcc_unreachable ();
1976 /* Return an expr equal to X but certainly not valid as an lvalue. */
1978 tree
1979 non_lvalue (tree x)
1981 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
1982 us. */
1983 if (in_gimple_form)
1984 return x;
1986 /* We only need to wrap lvalue tree codes. */
1987 switch (TREE_CODE (x))
1989 case VAR_DECL:
1990 case PARM_DECL:
1991 case RESULT_DECL:
1992 case LABEL_DECL:
1993 case FUNCTION_DECL:
1994 case SSA_NAME:
1996 case COMPONENT_REF:
1997 case INDIRECT_REF:
1998 case ALIGN_INDIRECT_REF:
1999 case MISALIGNED_INDIRECT_REF:
2000 case ARRAY_REF:
2001 case ARRAY_RANGE_REF:
2002 case BIT_FIELD_REF:
2003 case OBJ_TYPE_REF:
2005 case REALPART_EXPR:
2006 case IMAGPART_EXPR:
2007 case PREINCREMENT_EXPR:
2008 case PREDECREMENT_EXPR:
2009 case SAVE_EXPR:
2010 case TRY_CATCH_EXPR:
2011 case WITH_CLEANUP_EXPR:
2012 case COMPOUND_EXPR:
2013 case MODIFY_EXPR:
2014 case TARGET_EXPR:
2015 case COND_EXPR:
2016 case BIND_EXPR:
2017 case MIN_EXPR:
2018 case MAX_EXPR:
2019 break;
2021 default:
2022 /* Assume the worst for front-end tree codes. */
2023 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2024 break;
2025 return x;
2027 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2030 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2031 Zero means allow extended lvalues. */
2033 int pedantic_lvalues;
2035 /* When pedantic, return an expr equal to X but certainly not valid as a
2036 pedantic lvalue. Otherwise, return X. */
2038 static tree
2039 pedantic_non_lvalue (tree x)
2041 if (pedantic_lvalues)
2042 return non_lvalue (x);
2043 else
2044 return x;
2047 /* Given a tree comparison code, return the code that is the logical inverse
2048 of the given code. It is not safe to do this for floating-point
2049 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2050 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2052 static enum tree_code
2053 invert_tree_comparison (enum tree_code code, bool honor_nans)
2055 if (honor_nans && flag_trapping_math)
2056 return ERROR_MARK;
2058 switch (code)
2060 case EQ_EXPR:
2061 return NE_EXPR;
2062 case NE_EXPR:
2063 return EQ_EXPR;
2064 case GT_EXPR:
2065 return honor_nans ? UNLE_EXPR : LE_EXPR;
2066 case GE_EXPR:
2067 return honor_nans ? UNLT_EXPR : LT_EXPR;
2068 case LT_EXPR:
2069 return honor_nans ? UNGE_EXPR : GE_EXPR;
2070 case LE_EXPR:
2071 return honor_nans ? UNGT_EXPR : GT_EXPR;
2072 case LTGT_EXPR:
2073 return UNEQ_EXPR;
2074 case UNEQ_EXPR:
2075 return LTGT_EXPR;
2076 case UNGT_EXPR:
2077 return LE_EXPR;
2078 case UNGE_EXPR:
2079 return LT_EXPR;
2080 case UNLT_EXPR:
2081 return GE_EXPR;
2082 case UNLE_EXPR:
2083 return GT_EXPR;
2084 case ORDERED_EXPR:
2085 return UNORDERED_EXPR;
2086 case UNORDERED_EXPR:
2087 return ORDERED_EXPR;
2088 default:
2089 gcc_unreachable ();
2093 /* Similar, but return the comparison that results if the operands are
2094 swapped. This is safe for floating-point. */
2096 enum tree_code
2097 swap_tree_comparison (enum tree_code code)
2099 switch (code)
2101 case EQ_EXPR:
2102 case NE_EXPR:
2103 return code;
2104 case GT_EXPR:
2105 return LT_EXPR;
2106 case GE_EXPR:
2107 return LE_EXPR;
2108 case LT_EXPR:
2109 return GT_EXPR;
2110 case LE_EXPR:
2111 return GE_EXPR;
2112 default:
2113 gcc_unreachable ();
2118 /* Convert a comparison tree code from an enum tree_code representation
2119 into a compcode bit-based encoding. This function is the inverse of
2120 compcode_to_comparison. */
2122 static enum comparison_code
2123 comparison_to_compcode (enum tree_code code)
2125 switch (code)
2127 case LT_EXPR:
2128 return COMPCODE_LT;
2129 case EQ_EXPR:
2130 return COMPCODE_EQ;
2131 case LE_EXPR:
2132 return COMPCODE_LE;
2133 case GT_EXPR:
2134 return COMPCODE_GT;
2135 case NE_EXPR:
2136 return COMPCODE_NE;
2137 case GE_EXPR:
2138 return COMPCODE_GE;
2139 case ORDERED_EXPR:
2140 return COMPCODE_ORD;
2141 case UNORDERED_EXPR:
2142 return COMPCODE_UNORD;
2143 case UNLT_EXPR:
2144 return COMPCODE_UNLT;
2145 case UNEQ_EXPR:
2146 return COMPCODE_UNEQ;
2147 case UNLE_EXPR:
2148 return COMPCODE_UNLE;
2149 case UNGT_EXPR:
2150 return COMPCODE_UNGT;
2151 case LTGT_EXPR:
2152 return COMPCODE_LTGT;
2153 case UNGE_EXPR:
2154 return COMPCODE_UNGE;
2155 default:
2156 gcc_unreachable ();
2160 /* Convert a compcode bit-based encoding of a comparison operator back
2161 to GCC's enum tree_code representation. This function is the
2162 inverse of comparison_to_compcode. */
2164 static enum tree_code
2165 compcode_to_comparison (enum comparison_code code)
2167 switch (code)
2169 case COMPCODE_LT:
2170 return LT_EXPR;
2171 case COMPCODE_EQ:
2172 return EQ_EXPR;
2173 case COMPCODE_LE:
2174 return LE_EXPR;
2175 case COMPCODE_GT:
2176 return GT_EXPR;
2177 case COMPCODE_NE:
2178 return NE_EXPR;
2179 case COMPCODE_GE:
2180 return GE_EXPR;
2181 case COMPCODE_ORD:
2182 return ORDERED_EXPR;
2183 case COMPCODE_UNORD:
2184 return UNORDERED_EXPR;
2185 case COMPCODE_UNLT:
2186 return UNLT_EXPR;
2187 case COMPCODE_UNEQ:
2188 return UNEQ_EXPR;
2189 case COMPCODE_UNLE:
2190 return UNLE_EXPR;
2191 case COMPCODE_UNGT:
2192 return UNGT_EXPR;
2193 case COMPCODE_LTGT:
2194 return LTGT_EXPR;
2195 case COMPCODE_UNGE:
2196 return UNGE_EXPR;
2197 default:
2198 gcc_unreachable ();
2202 /* Return a tree for the comparison which is the combination of
2203 doing the AND or OR (depending on CODE) of the two operations LCODE
2204 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2205 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2206 if this makes the transformation invalid. */
2208 tree
2209 combine_comparisons (enum tree_code code, enum tree_code lcode,
2210 enum tree_code rcode, tree truth_type,
2211 tree ll_arg, tree lr_arg)
2213 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2214 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2215 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2216 enum comparison_code compcode;
2218 switch (code)
2220 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2221 compcode = lcompcode & rcompcode;
2222 break;
2224 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2225 compcode = lcompcode | rcompcode;
2226 break;
2228 default:
2229 return NULL_TREE;
2232 if (!honor_nans)
2234 /* Eliminate unordered comparisons, as well as LTGT and ORD
2235 which are not used unless the mode has NaNs. */
2236 compcode &= ~COMPCODE_UNORD;
2237 if (compcode == COMPCODE_LTGT)
2238 compcode = COMPCODE_NE;
2239 else if (compcode == COMPCODE_ORD)
2240 compcode = COMPCODE_TRUE;
2242 else if (flag_trapping_math)
2244 /* Check that the original operation and the optimized ones will trap
2245 under the same condition. */
2246 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2247 && (lcompcode != COMPCODE_EQ)
2248 && (lcompcode != COMPCODE_ORD);
2249 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2250 && (rcompcode != COMPCODE_EQ)
2251 && (rcompcode != COMPCODE_ORD);
2252 bool trap = (compcode & COMPCODE_UNORD) == 0
2253 && (compcode != COMPCODE_EQ)
2254 && (compcode != COMPCODE_ORD);
2256 /* In a short-circuited boolean expression the LHS might be
2257 such that the RHS, if evaluated, will never trap. For
2258 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2259 if neither x nor y is NaN. (This is a mixed blessing: for
2260 example, the expression above will never trap, hence
2261 optimizing it to x < y would be invalid). */
2262 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2263 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2264 rtrap = false;
2266 /* If the comparison was short-circuited, and only the RHS
2267 trapped, we may now generate a spurious trap. */
2268 if (rtrap && !ltrap
2269 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2270 return NULL_TREE;
2272 /* If we changed the conditions that cause a trap, we lose. */
2273 if ((ltrap || rtrap) != trap)
2274 return NULL_TREE;
2277 if (compcode == COMPCODE_TRUE)
2278 return constant_boolean_node (true, truth_type);
2279 else if (compcode == COMPCODE_FALSE)
2280 return constant_boolean_node (false, truth_type);
2281 else
2282 return fold (build2 (compcode_to_comparison (compcode),
2283 truth_type, ll_arg, lr_arg));
2286 /* Return nonzero if CODE is a tree code that represents a truth value. */
2288 static int
2289 truth_value_p (enum tree_code code)
2291 return (TREE_CODE_CLASS (code) == tcc_comparison
2292 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2293 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2294 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2297 /* Return nonzero if two operands (typically of the same tree node)
2298 are necessarily equal. If either argument has side-effects this
2299 function returns zero. FLAGS modifies behavior as follows:
2301 If OEP_ONLY_CONST is set, only return nonzero for constants.
2302 This function tests whether the operands are indistinguishable;
2303 it does not test whether they are equal using C's == operation.
2304 The distinction is important for IEEE floating point, because
2305 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2306 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2308 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2309 even though it may hold multiple values during a function.
2310 This is because a GCC tree node guarantees that nothing else is
2311 executed between the evaluation of its "operands" (which may often
2312 be evaluated in arbitrary order). Hence if the operands themselves
2313 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2314 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2315 unset means assuming isochronic (or instantaneous) tree equivalence.
2316 Unless comparing arbitrary expression trees, such as from different
2317 statements, this flag can usually be left unset.
2319 If OEP_PURE_SAME is set, then pure functions with identical arguments
2320 are considered the same. It is used when the caller has other ways
2321 to ensure that global memory is unchanged in between. */
2324 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2326 /* If one is specified and the other isn't, they aren't equal and if
2327 neither is specified, they are.
2329 ??? This is temporary and is meant only to handle the cases of the
2330 optional operands for COMPONENT_REF and ARRAY_REF. */
2331 if ((arg0 && !arg1) || (!arg0 && arg1))
2332 return 0;
2333 else if (!arg0 && !arg1)
2334 return 1;
2335 /* If either is ERROR_MARK, they aren't equal. */
2336 else if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2337 return 0;
2339 /* If both types don't have the same signedness, then we can't consider
2340 them equal. We must check this before the STRIP_NOPS calls
2341 because they may change the signedness of the arguments. */
2342 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2343 return 0;
2345 STRIP_NOPS (arg0);
2346 STRIP_NOPS (arg1);
2348 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2349 /* This is needed for conversions and for COMPONENT_REF.
2350 Might as well play it safe and always test this. */
2351 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2352 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2353 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2354 return 0;
2356 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2357 We don't care about side effects in that case because the SAVE_EXPR
2358 takes care of that for us. In all other cases, two expressions are
2359 equal if they have no side effects. If we have two identical
2360 expressions with side effects that should be treated the same due
2361 to the only side effects being identical SAVE_EXPR's, that will
2362 be detected in the recursive calls below. */
2363 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2364 && (TREE_CODE (arg0) == SAVE_EXPR
2365 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2366 return 1;
2368 /* Next handle constant cases, those for which we can return 1 even
2369 if ONLY_CONST is set. */
2370 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2371 switch (TREE_CODE (arg0))
2373 case INTEGER_CST:
2374 return (! TREE_CONSTANT_OVERFLOW (arg0)
2375 && ! TREE_CONSTANT_OVERFLOW (arg1)
2376 && tree_int_cst_equal (arg0, arg1));
2378 case REAL_CST:
2379 return (! TREE_CONSTANT_OVERFLOW (arg0)
2380 && ! TREE_CONSTANT_OVERFLOW (arg1)
2381 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2382 TREE_REAL_CST (arg1)));
2384 case VECTOR_CST:
2386 tree v1, v2;
2388 if (TREE_CONSTANT_OVERFLOW (arg0)
2389 || TREE_CONSTANT_OVERFLOW (arg1))
2390 return 0;
2392 v1 = TREE_VECTOR_CST_ELTS (arg0);
2393 v2 = TREE_VECTOR_CST_ELTS (arg1);
2394 while (v1 && v2)
2396 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2397 flags))
2398 return 0;
2399 v1 = TREE_CHAIN (v1);
2400 v2 = TREE_CHAIN (v2);
2403 return 1;
2406 case COMPLEX_CST:
2407 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2408 flags)
2409 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2410 flags));
2412 case STRING_CST:
2413 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2414 && ! memcmp (TREE_STRING_POINTER (arg0),
2415 TREE_STRING_POINTER (arg1),
2416 TREE_STRING_LENGTH (arg0)));
2418 case ADDR_EXPR:
2419 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2421 default:
2422 break;
2425 if (flags & OEP_ONLY_CONST)
2426 return 0;
2428 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2430 case tcc_unary:
2431 /* Two conversions are equal only if signedness and modes match. */
2432 switch (TREE_CODE (arg0))
2434 case NOP_EXPR:
2435 case CONVERT_EXPR:
2436 case FIX_CEIL_EXPR:
2437 case FIX_TRUNC_EXPR:
2438 case FIX_FLOOR_EXPR:
2439 case FIX_ROUND_EXPR:
2440 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2441 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2442 return 0;
2443 break;
2444 default:
2445 break;
2448 return operand_equal_p (TREE_OPERAND (arg0, 0),
2449 TREE_OPERAND (arg1, 0), flags);
2451 case tcc_comparison:
2452 case tcc_binary:
2453 if (operand_equal_p (TREE_OPERAND (arg0, 0),
2454 TREE_OPERAND (arg1, 0), flags)
2455 && operand_equal_p (TREE_OPERAND (arg0, 1),
2456 TREE_OPERAND (arg1, 1), flags))
2457 return 1;
2459 /* For commutative ops, allow the other order. */
2460 return (commutative_tree_code (TREE_CODE (arg0))
2461 && operand_equal_p (TREE_OPERAND (arg0, 0),
2462 TREE_OPERAND (arg1, 1), flags)
2463 && operand_equal_p (TREE_OPERAND (arg0, 1),
2464 TREE_OPERAND (arg1, 0), flags));
2466 case tcc_reference:
2467 /* If either of the pointer (or reference) expressions we are
2468 dereferencing contain a side effect, these cannot be equal. */
2469 if (TREE_SIDE_EFFECTS (arg0)
2470 || TREE_SIDE_EFFECTS (arg1))
2471 return 0;
2473 switch (TREE_CODE (arg0))
2475 case INDIRECT_REF:
2476 case ALIGN_INDIRECT_REF:
2477 case MISALIGNED_INDIRECT_REF:
2478 case REALPART_EXPR:
2479 case IMAGPART_EXPR:
2480 return operand_equal_p (TREE_OPERAND (arg0, 0),
2481 TREE_OPERAND (arg1, 0), flags);
2483 case ARRAY_REF:
2484 case ARRAY_RANGE_REF:
2485 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2486 TREE_OPERAND (arg1, 0), flags)
2487 && operand_equal_p (TREE_OPERAND (arg0, 1),
2488 TREE_OPERAND (arg1, 1), flags)
2489 && operand_equal_p (TREE_OPERAND (arg0, 2),
2490 TREE_OPERAND (arg1, 2), flags)
2491 && operand_equal_p (TREE_OPERAND (arg0, 3),
2492 TREE_OPERAND (arg1, 3), flags));
2495 case COMPONENT_REF:
2496 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2497 TREE_OPERAND (arg1, 0), flags)
2498 && operand_equal_p (TREE_OPERAND (arg0, 1),
2499 TREE_OPERAND (arg1, 1), flags)
2500 && operand_equal_p (TREE_OPERAND (arg0, 2),
2501 TREE_OPERAND (arg1, 2), flags));
2504 case BIT_FIELD_REF:
2505 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2506 TREE_OPERAND (arg1, 0), flags)
2507 && operand_equal_p (TREE_OPERAND (arg0, 1),
2508 TREE_OPERAND (arg1, 1), flags)
2509 && operand_equal_p (TREE_OPERAND (arg0, 2),
2510 TREE_OPERAND (arg1, 2), flags));
2511 default:
2512 return 0;
2515 case tcc_expression:
2516 switch (TREE_CODE (arg0))
2518 case ADDR_EXPR:
2519 case TRUTH_NOT_EXPR:
2520 return operand_equal_p (TREE_OPERAND (arg0, 0),
2521 TREE_OPERAND (arg1, 0), flags);
2523 case TRUTH_ANDIF_EXPR:
2524 case TRUTH_ORIF_EXPR:
2525 return operand_equal_p (TREE_OPERAND (arg0, 0),
2526 TREE_OPERAND (arg1, 0), flags)
2527 && operand_equal_p (TREE_OPERAND (arg0, 1),
2528 TREE_OPERAND (arg1, 1), flags);
2530 case TRUTH_AND_EXPR:
2531 case TRUTH_OR_EXPR:
2532 case TRUTH_XOR_EXPR:
2533 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2534 TREE_OPERAND (arg1, 0), flags)
2535 && operand_equal_p (TREE_OPERAND (arg0, 1),
2536 TREE_OPERAND (arg1, 1), flags))
2537 || (operand_equal_p (TREE_OPERAND (arg0, 0),
2538 TREE_OPERAND (arg1, 1), flags)
2539 && operand_equal_p (TREE_OPERAND (arg0, 1),
2540 TREE_OPERAND (arg1, 0), flags));
2542 case CALL_EXPR:
2543 /* If the CALL_EXPRs call different functions, then they
2544 clearly can not be equal. */
2545 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2546 TREE_OPERAND (arg1, 0), flags))
2547 return 0;
2550 unsigned int cef = call_expr_flags (arg0);
2551 if (flags & OEP_PURE_SAME)
2552 cef &= ECF_CONST | ECF_PURE;
2553 else
2554 cef &= ECF_CONST;
2555 if (!cef)
2556 return 0;
2559 /* Now see if all the arguments are the same. operand_equal_p
2560 does not handle TREE_LIST, so we walk the operands here
2561 feeding them to operand_equal_p. */
2562 arg0 = TREE_OPERAND (arg0, 1);
2563 arg1 = TREE_OPERAND (arg1, 1);
2564 while (arg0 && arg1)
2566 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2567 flags))
2568 return 0;
2570 arg0 = TREE_CHAIN (arg0);
2571 arg1 = TREE_CHAIN (arg1);
2574 /* If we get here and both argument lists are exhausted
2575 then the CALL_EXPRs are equal. */
2576 return ! (arg0 || arg1);
2578 default:
2579 return 0;
2582 case tcc_declaration:
2583 /* Consider __builtin_sqrt equal to sqrt. */
2584 return (TREE_CODE (arg0) == FUNCTION_DECL
2585 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2586 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2587 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2589 default:
2590 return 0;
2594 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2595 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2597 When in doubt, return 0. */
2599 static int
2600 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2602 int unsignedp1, unsignedpo;
2603 tree primarg0, primarg1, primother;
2604 unsigned int correct_width;
2606 if (operand_equal_p (arg0, arg1, 0))
2607 return 1;
2609 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2610 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2611 return 0;
2613 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2614 and see if the inner values are the same. This removes any
2615 signedness comparison, which doesn't matter here. */
2616 primarg0 = arg0, primarg1 = arg1;
2617 STRIP_NOPS (primarg0);
2618 STRIP_NOPS (primarg1);
2619 if (operand_equal_p (primarg0, primarg1, 0))
2620 return 1;
2622 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2623 actual comparison operand, ARG0.
2625 First throw away any conversions to wider types
2626 already present in the operands. */
2628 primarg1 = get_narrower (arg1, &unsignedp1);
2629 primother = get_narrower (other, &unsignedpo);
2631 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2632 if (unsignedp1 == unsignedpo
2633 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2634 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2636 tree type = TREE_TYPE (arg0);
2638 /* Make sure shorter operand is extended the right way
2639 to match the longer operand. */
2640 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2641 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2643 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2644 return 1;
2647 return 0;
2650 /* See if ARG is an expression that is either a comparison or is performing
2651 arithmetic on comparisons. The comparisons must only be comparing
2652 two different values, which will be stored in *CVAL1 and *CVAL2; if
2653 they are nonzero it means that some operands have already been found.
2654 No variables may be used anywhere else in the expression except in the
2655 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2656 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2658 If this is true, return 1. Otherwise, return zero. */
2660 static int
2661 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2663 enum tree_code code = TREE_CODE (arg);
2664 enum tree_code_class class = TREE_CODE_CLASS (code);
2666 /* We can handle some of the tcc_expression cases here. */
2667 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2668 class = tcc_unary;
2669 else if (class == tcc_expression
2670 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2671 || code == COMPOUND_EXPR))
2672 class = tcc_binary;
2674 else if (class == tcc_expression && code == SAVE_EXPR
2675 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2677 /* If we've already found a CVAL1 or CVAL2, this expression is
2678 two complex to handle. */
2679 if (*cval1 || *cval2)
2680 return 0;
2682 class = tcc_unary;
2683 *save_p = 1;
2686 switch (class)
2688 case tcc_unary:
2689 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2691 case tcc_binary:
2692 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2693 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2694 cval1, cval2, save_p));
2696 case tcc_constant:
2697 return 1;
2699 case tcc_expression:
2700 if (code == COND_EXPR)
2701 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2702 cval1, cval2, save_p)
2703 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2704 cval1, cval2, save_p)
2705 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2706 cval1, cval2, save_p));
2707 return 0;
2709 case tcc_comparison:
2710 /* First see if we can handle the first operand, then the second. For
2711 the second operand, we know *CVAL1 can't be zero. It must be that
2712 one side of the comparison is each of the values; test for the
2713 case where this isn't true by failing if the two operands
2714 are the same. */
2716 if (operand_equal_p (TREE_OPERAND (arg, 0),
2717 TREE_OPERAND (arg, 1), 0))
2718 return 0;
2720 if (*cval1 == 0)
2721 *cval1 = TREE_OPERAND (arg, 0);
2722 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2724 else if (*cval2 == 0)
2725 *cval2 = TREE_OPERAND (arg, 0);
2726 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2728 else
2729 return 0;
2731 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2733 else if (*cval2 == 0)
2734 *cval2 = TREE_OPERAND (arg, 1);
2735 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2737 else
2738 return 0;
2740 return 1;
2742 default:
2743 return 0;
2747 /* ARG is a tree that is known to contain just arithmetic operations and
2748 comparisons. Evaluate the operations in the tree substituting NEW0 for
2749 any occurrence of OLD0 as an operand of a comparison and likewise for
2750 NEW1 and OLD1. */
2752 static tree
2753 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2755 tree type = TREE_TYPE (arg);
2756 enum tree_code code = TREE_CODE (arg);
2757 enum tree_code_class class = TREE_CODE_CLASS (code);
2759 /* We can handle some of the tcc_expression cases here. */
2760 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2761 class = tcc_unary;
2762 else if (class == tcc_expression
2763 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2764 class = tcc_binary;
2766 switch (class)
2768 case tcc_unary:
2769 return fold (build1 (code, type,
2770 eval_subst (TREE_OPERAND (arg, 0),
2771 old0, new0, old1, new1)));
2773 case tcc_binary:
2774 return fold (build2 (code, type,
2775 eval_subst (TREE_OPERAND (arg, 0),
2776 old0, new0, old1, new1),
2777 eval_subst (TREE_OPERAND (arg, 1),
2778 old0, new0, old1, new1)));
2780 case tcc_expression:
2781 switch (code)
2783 case SAVE_EXPR:
2784 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2786 case COMPOUND_EXPR:
2787 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2789 case COND_EXPR:
2790 return fold (build3 (code, type,
2791 eval_subst (TREE_OPERAND (arg, 0),
2792 old0, new0, old1, new1),
2793 eval_subst (TREE_OPERAND (arg, 1),
2794 old0, new0, old1, new1),
2795 eval_subst (TREE_OPERAND (arg, 2),
2796 old0, new0, old1, new1)));
2797 default:
2798 break;
2800 /* Fall through - ??? */
2802 case tcc_comparison:
2804 tree arg0 = TREE_OPERAND (arg, 0);
2805 tree arg1 = TREE_OPERAND (arg, 1);
2807 /* We need to check both for exact equality and tree equality. The
2808 former will be true if the operand has a side-effect. In that
2809 case, we know the operand occurred exactly once. */
2811 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2812 arg0 = new0;
2813 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2814 arg0 = new1;
2816 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2817 arg1 = new0;
2818 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2819 arg1 = new1;
2821 return fold (build2 (code, type, arg0, arg1));
2824 default:
2825 return arg;
2829 /* Return a tree for the case when the result of an expression is RESULT
2830 converted to TYPE and OMITTED was previously an operand of the expression
2831 but is now not needed (e.g., we folded OMITTED * 0).
2833 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2834 the conversion of RESULT to TYPE. */
2836 tree
2837 omit_one_operand (tree type, tree result, tree omitted)
2839 tree t = fold_convert (type, result);
2841 if (TREE_SIDE_EFFECTS (omitted))
2842 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2844 return non_lvalue (t);
2847 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2849 static tree
2850 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2852 tree t = fold_convert (type, result);
2854 if (TREE_SIDE_EFFECTS (omitted))
2855 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2857 return pedantic_non_lvalue (t);
2860 /* Return a tree for the case when the result of an expression is RESULT
2861 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2862 of the expression but are now not needed.
2864 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2865 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2866 evaluated before OMITTED2. Otherwise, if neither has side effects,
2867 just do the conversion of RESULT to TYPE. */
2869 tree
2870 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2872 tree t = fold_convert (type, result);
2874 if (TREE_SIDE_EFFECTS (omitted2))
2875 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2876 if (TREE_SIDE_EFFECTS (omitted1))
2877 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2879 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2883 /* Return a simplified tree node for the truth-negation of ARG. This
2884 never alters ARG itself. We assume that ARG is an operation that
2885 returns a truth value (0 or 1).
2887 FIXME: one would think we would fold the result, but it causes
2888 problems with the dominator optimizer. */
2889 tree
2890 invert_truthvalue (tree arg)
2892 tree type = TREE_TYPE (arg);
2893 enum tree_code code = TREE_CODE (arg);
2895 if (code == ERROR_MARK)
2896 return arg;
2898 /* If this is a comparison, we can simply invert it, except for
2899 floating-point non-equality comparisons, in which case we just
2900 enclose a TRUTH_NOT_EXPR around what we have. */
2902 if (TREE_CODE_CLASS (code) == tcc_comparison)
2904 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2905 if (FLOAT_TYPE_P (op_type)
2906 && flag_trapping_math
2907 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2908 && code != NE_EXPR && code != EQ_EXPR)
2909 return build1 (TRUTH_NOT_EXPR, type, arg);
2910 else
2912 code = invert_tree_comparison (code,
2913 HONOR_NANS (TYPE_MODE (op_type)));
2914 if (code == ERROR_MARK)
2915 return build1 (TRUTH_NOT_EXPR, type, arg);
2916 else
2917 return build2 (code, type,
2918 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2922 switch (code)
2924 case INTEGER_CST:
2925 return fold_convert (type,
2926 build_int_cst (NULL_TREE, integer_zerop (arg)));
2928 case TRUTH_AND_EXPR:
2929 return build2 (TRUTH_OR_EXPR, type,
2930 invert_truthvalue (TREE_OPERAND (arg, 0)),
2931 invert_truthvalue (TREE_OPERAND (arg, 1)));
2933 case TRUTH_OR_EXPR:
2934 return build2 (TRUTH_AND_EXPR, type,
2935 invert_truthvalue (TREE_OPERAND (arg, 0)),
2936 invert_truthvalue (TREE_OPERAND (arg, 1)));
2938 case TRUTH_XOR_EXPR:
2939 /* Here we can invert either operand. We invert the first operand
2940 unless the second operand is a TRUTH_NOT_EXPR in which case our
2941 result is the XOR of the first operand with the inside of the
2942 negation of the second operand. */
2944 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2945 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2946 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2947 else
2948 return build2 (TRUTH_XOR_EXPR, type,
2949 invert_truthvalue (TREE_OPERAND (arg, 0)),
2950 TREE_OPERAND (arg, 1));
2952 case TRUTH_ANDIF_EXPR:
2953 return build2 (TRUTH_ORIF_EXPR, type,
2954 invert_truthvalue (TREE_OPERAND (arg, 0)),
2955 invert_truthvalue (TREE_OPERAND (arg, 1)));
2957 case TRUTH_ORIF_EXPR:
2958 return build2 (TRUTH_ANDIF_EXPR, type,
2959 invert_truthvalue (TREE_OPERAND (arg, 0)),
2960 invert_truthvalue (TREE_OPERAND (arg, 1)));
2962 case TRUTH_NOT_EXPR:
2963 return TREE_OPERAND (arg, 0);
2965 case COND_EXPR:
2966 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2967 invert_truthvalue (TREE_OPERAND (arg, 1)),
2968 invert_truthvalue (TREE_OPERAND (arg, 2)));
2970 case COMPOUND_EXPR:
2971 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2972 invert_truthvalue (TREE_OPERAND (arg, 1)));
2974 case NON_LVALUE_EXPR:
2975 return invert_truthvalue (TREE_OPERAND (arg, 0));
2977 case NOP_EXPR:
2978 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2979 break;
2981 case CONVERT_EXPR:
2982 case FLOAT_EXPR:
2983 return build1 (TREE_CODE (arg), type,
2984 invert_truthvalue (TREE_OPERAND (arg, 0)));
2986 case BIT_AND_EXPR:
2987 if (!integer_onep (TREE_OPERAND (arg, 1)))
2988 break;
2989 return build2 (EQ_EXPR, type, arg,
2990 fold_convert (type, integer_zero_node));
2992 case SAVE_EXPR:
2993 return build1 (TRUTH_NOT_EXPR, type, arg);
2995 case CLEANUP_POINT_EXPR:
2996 return build1 (CLEANUP_POINT_EXPR, type,
2997 invert_truthvalue (TREE_OPERAND (arg, 0)));
2999 default:
3000 break;
3002 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3003 return build1 (TRUTH_NOT_EXPR, type, arg);
3006 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3007 operands are another bit-wise operation with a common input. If so,
3008 distribute the bit operations to save an operation and possibly two if
3009 constants are involved. For example, convert
3010 (A | B) & (A | C) into A | (B & C)
3011 Further simplification will occur if B and C are constants.
3013 If this optimization cannot be done, 0 will be returned. */
3015 static tree
3016 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3018 tree common;
3019 tree left, right;
3021 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3022 || TREE_CODE (arg0) == code
3023 || (TREE_CODE (arg0) != BIT_AND_EXPR
3024 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3025 return 0;
3027 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3029 common = TREE_OPERAND (arg0, 0);
3030 left = TREE_OPERAND (arg0, 1);
3031 right = TREE_OPERAND (arg1, 1);
3033 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3035 common = TREE_OPERAND (arg0, 0);
3036 left = TREE_OPERAND (arg0, 1);
3037 right = TREE_OPERAND (arg1, 0);
3039 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3041 common = TREE_OPERAND (arg0, 1);
3042 left = TREE_OPERAND (arg0, 0);
3043 right = TREE_OPERAND (arg1, 1);
3045 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3047 common = TREE_OPERAND (arg0, 1);
3048 left = TREE_OPERAND (arg0, 0);
3049 right = TREE_OPERAND (arg1, 0);
3051 else
3052 return 0;
3054 return fold (build2 (TREE_CODE (arg0), type, common,
3055 fold (build2 (code, type, left, right))));
3058 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3059 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3061 static tree
3062 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3063 int unsignedp)
3065 tree result = build3 (BIT_FIELD_REF, type, inner,
3066 size_int (bitsize), bitsize_int (bitpos));
3068 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3070 return result;
3073 /* Optimize a bit-field compare.
3075 There are two cases: First is a compare against a constant and the
3076 second is a comparison of two items where the fields are at the same
3077 bit position relative to the start of a chunk (byte, halfword, word)
3078 large enough to contain it. In these cases we can avoid the shift
3079 implicit in bitfield extractions.
3081 For constants, we emit a compare of the shifted constant with the
3082 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3083 compared. For two fields at the same position, we do the ANDs with the
3084 similar mask and compare the result of the ANDs.
3086 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3087 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3088 are the left and right operands of the comparison, respectively.
3090 If the optimization described above can be done, we return the resulting
3091 tree. Otherwise we return zero. */
3093 static tree
3094 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3095 tree lhs, tree rhs)
3097 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3098 tree type = TREE_TYPE (lhs);
3099 tree signed_type, unsigned_type;
3100 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3101 enum machine_mode lmode, rmode, nmode;
3102 int lunsignedp, runsignedp;
3103 int lvolatilep = 0, rvolatilep = 0;
3104 tree linner, rinner = NULL_TREE;
3105 tree mask;
3106 tree offset;
3108 /* Get all the information about the extractions being done. If the bit size
3109 if the same as the size of the underlying object, we aren't doing an
3110 extraction at all and so can do nothing. We also don't want to
3111 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3112 then will no longer be able to replace it. */
3113 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3114 &lunsignedp, &lvolatilep);
3115 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3116 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3117 return 0;
3119 if (!const_p)
3121 /* If this is not a constant, we can only do something if bit positions,
3122 sizes, and signedness are the same. */
3123 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3124 &runsignedp, &rvolatilep);
3126 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3127 || lunsignedp != runsignedp || offset != 0
3128 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3129 return 0;
3132 /* See if we can find a mode to refer to this field. We should be able to,
3133 but fail if we can't. */
3134 nmode = get_best_mode (lbitsize, lbitpos,
3135 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3136 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3137 TYPE_ALIGN (TREE_TYPE (rinner))),
3138 word_mode, lvolatilep || rvolatilep);
3139 if (nmode == VOIDmode)
3140 return 0;
3142 /* Set signed and unsigned types of the precision of this mode for the
3143 shifts below. */
3144 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3145 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3147 /* Compute the bit position and size for the new reference and our offset
3148 within it. If the new reference is the same size as the original, we
3149 won't optimize anything, so return zero. */
3150 nbitsize = GET_MODE_BITSIZE (nmode);
3151 nbitpos = lbitpos & ~ (nbitsize - 1);
3152 lbitpos -= nbitpos;
3153 if (nbitsize == lbitsize)
3154 return 0;
3156 if (BYTES_BIG_ENDIAN)
3157 lbitpos = nbitsize - lbitsize - lbitpos;
3159 /* Make the mask to be used against the extracted field. */
3160 mask = build_int_cst (unsigned_type, -1);
3161 mask = force_fit_type (mask, 0, false, false);
3162 mask = fold_convert (unsigned_type, mask);
3163 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3164 mask = const_binop (RSHIFT_EXPR, mask,
3165 size_int (nbitsize - lbitsize - lbitpos), 0);
3167 if (! const_p)
3168 /* If not comparing with constant, just rework the comparison
3169 and return. */
3170 return build2 (code, compare_type,
3171 build2 (BIT_AND_EXPR, unsigned_type,
3172 make_bit_field_ref (linner, unsigned_type,
3173 nbitsize, nbitpos, 1),
3174 mask),
3175 build2 (BIT_AND_EXPR, unsigned_type,
3176 make_bit_field_ref (rinner, unsigned_type,
3177 nbitsize, nbitpos, 1),
3178 mask));
3180 /* Otherwise, we are handling the constant case. See if the constant is too
3181 big for the field. Warn and return a tree of for 0 (false) if so. We do
3182 this not only for its own sake, but to avoid having to test for this
3183 error case below. If we didn't, we might generate wrong code.
3185 For unsigned fields, the constant shifted right by the field length should
3186 be all zero. For signed fields, the high-order bits should agree with
3187 the sign bit. */
3189 if (lunsignedp)
3191 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3192 fold_convert (unsigned_type, rhs),
3193 size_int (lbitsize), 0)))
3195 warning ("comparison is always %d due to width of bit-field",
3196 code == NE_EXPR);
3197 return constant_boolean_node (code == NE_EXPR, compare_type);
3200 else
3202 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3203 size_int (lbitsize - 1), 0);
3204 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3206 warning ("comparison is always %d due to width of bit-field",
3207 code == NE_EXPR);
3208 return constant_boolean_node (code == NE_EXPR, compare_type);
3212 /* Single-bit compares should always be against zero. */
3213 if (lbitsize == 1 && ! integer_zerop (rhs))
3215 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3216 rhs = fold_convert (type, integer_zero_node);
3219 /* Make a new bitfield reference, shift the constant over the
3220 appropriate number of bits and mask it with the computed mask
3221 (in case this was a signed field). If we changed it, make a new one. */
3222 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3223 if (lvolatilep)
3225 TREE_SIDE_EFFECTS (lhs) = 1;
3226 TREE_THIS_VOLATILE (lhs) = 1;
3229 rhs = fold (const_binop (BIT_AND_EXPR,
3230 const_binop (LSHIFT_EXPR,
3231 fold_convert (unsigned_type, rhs),
3232 size_int (lbitpos), 0),
3233 mask, 0));
3235 return build2 (code, compare_type,
3236 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3237 rhs);
3240 /* Subroutine for fold_truthop: decode a field reference.
3242 If EXP is a comparison reference, we return the innermost reference.
3244 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3245 set to the starting bit number.
3247 If the innermost field can be completely contained in a mode-sized
3248 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3250 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3251 otherwise it is not changed.
3253 *PUNSIGNEDP is set to the signedness of the field.
3255 *PMASK is set to the mask used. This is either contained in a
3256 BIT_AND_EXPR or derived from the width of the field.
3258 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3260 Return 0 if this is not a component reference or is one that we can't
3261 do anything with. */
3263 static tree
3264 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3265 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3266 int *punsignedp, int *pvolatilep,
3267 tree *pmask, tree *pand_mask)
3269 tree outer_type = 0;
3270 tree and_mask = 0;
3271 tree mask, inner, offset;
3272 tree unsigned_type;
3273 unsigned int precision;
3275 /* All the optimizations using this function assume integer fields.
3276 There are problems with FP fields since the type_for_size call
3277 below can fail for, e.g., XFmode. */
3278 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3279 return 0;
3281 /* We are interested in the bare arrangement of bits, so strip everything
3282 that doesn't affect the machine mode. However, record the type of the
3283 outermost expression if it may matter below. */
3284 if (TREE_CODE (exp) == NOP_EXPR
3285 || TREE_CODE (exp) == CONVERT_EXPR
3286 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3287 outer_type = TREE_TYPE (exp);
3288 STRIP_NOPS (exp);
3290 if (TREE_CODE (exp) == BIT_AND_EXPR)
3292 and_mask = TREE_OPERAND (exp, 1);
3293 exp = TREE_OPERAND (exp, 0);
3294 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3295 if (TREE_CODE (and_mask) != INTEGER_CST)
3296 return 0;
3299 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3300 punsignedp, pvolatilep);
3301 if ((inner == exp && and_mask == 0)
3302 || *pbitsize < 0 || offset != 0
3303 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3304 return 0;
3306 /* If the number of bits in the reference is the same as the bitsize of
3307 the outer type, then the outer type gives the signedness. Otherwise
3308 (in case of a small bitfield) the signedness is unchanged. */
3309 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3310 *punsignedp = TYPE_UNSIGNED (outer_type);
3312 /* Compute the mask to access the bitfield. */
3313 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3314 precision = TYPE_PRECISION (unsigned_type);
3316 mask = build_int_cst (unsigned_type, -1);
3317 mask = force_fit_type (mask, 0, false, false);
3319 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3320 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3322 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3323 if (and_mask != 0)
3324 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3325 fold_convert (unsigned_type, and_mask), mask));
3327 *pmask = mask;
3328 *pand_mask = and_mask;
3329 return inner;
3332 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3333 bit positions. */
3335 static int
3336 all_ones_mask_p (tree mask, int size)
3338 tree type = TREE_TYPE (mask);
3339 unsigned int precision = TYPE_PRECISION (type);
3340 tree tmask;
3342 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3343 tmask = force_fit_type (tmask, 0, false, false);
3345 return
3346 tree_int_cst_equal (mask,
3347 const_binop (RSHIFT_EXPR,
3348 const_binop (LSHIFT_EXPR, tmask,
3349 size_int (precision - size),
3351 size_int (precision - size), 0));
3354 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3355 represents the sign bit of EXP's type. If EXP represents a sign
3356 or zero extension, also test VAL against the unextended type.
3357 The return value is the (sub)expression whose sign bit is VAL,
3358 or NULL_TREE otherwise. */
3360 static tree
3361 sign_bit_p (tree exp, tree val)
3363 unsigned HOST_WIDE_INT mask_lo, lo;
3364 HOST_WIDE_INT mask_hi, hi;
3365 int width;
3366 tree t;
3368 /* Tree EXP must have an integral type. */
3369 t = TREE_TYPE (exp);
3370 if (! INTEGRAL_TYPE_P (t))
3371 return NULL_TREE;
3373 /* Tree VAL must be an integer constant. */
3374 if (TREE_CODE (val) != INTEGER_CST
3375 || TREE_CONSTANT_OVERFLOW (val))
3376 return NULL_TREE;
3378 width = TYPE_PRECISION (t);
3379 if (width > HOST_BITS_PER_WIDE_INT)
3381 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3382 lo = 0;
3384 mask_hi = ((unsigned HOST_WIDE_INT) -1
3385 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3386 mask_lo = -1;
3388 else
3390 hi = 0;
3391 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3393 mask_hi = 0;
3394 mask_lo = ((unsigned HOST_WIDE_INT) -1
3395 >> (HOST_BITS_PER_WIDE_INT - width));
3398 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3399 treat VAL as if it were unsigned. */
3400 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3401 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3402 return exp;
3404 /* Handle extension from a narrower type. */
3405 if (TREE_CODE (exp) == NOP_EXPR
3406 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3407 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3409 return NULL_TREE;
3412 /* Subroutine for fold_truthop: determine if an operand is simple enough
3413 to be evaluated unconditionally. */
3415 static int
3416 simple_operand_p (tree exp)
3418 /* Strip any conversions that don't change the machine mode. */
3419 while ((TREE_CODE (exp) == NOP_EXPR
3420 || TREE_CODE (exp) == CONVERT_EXPR)
3421 && (TYPE_MODE (TREE_TYPE (exp))
3422 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3423 exp = TREE_OPERAND (exp, 0);
3425 return (CONSTANT_CLASS_P (exp)
3426 || (DECL_P (exp)
3427 && ! TREE_ADDRESSABLE (exp)
3428 && ! TREE_THIS_VOLATILE (exp)
3429 && ! DECL_NONLOCAL (exp)
3430 /* Don't regard global variables as simple. They may be
3431 allocated in ways unknown to the compiler (shared memory,
3432 #pragma weak, etc). */
3433 && ! TREE_PUBLIC (exp)
3434 && ! DECL_EXTERNAL (exp)
3435 /* Loading a static variable is unduly expensive, but global
3436 registers aren't expensive. */
3437 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3440 /* The following functions are subroutines to fold_range_test and allow it to
3441 try to change a logical combination of comparisons into a range test.
3443 For example, both
3444 X == 2 || X == 3 || X == 4 || X == 5
3446 X >= 2 && X <= 5
3447 are converted to
3448 (unsigned) (X - 2) <= 3
3450 We describe each set of comparisons as being either inside or outside
3451 a range, using a variable named like IN_P, and then describe the
3452 range with a lower and upper bound. If one of the bounds is omitted,
3453 it represents either the highest or lowest value of the type.
3455 In the comments below, we represent a range by two numbers in brackets
3456 preceded by a "+" to designate being inside that range, or a "-" to
3457 designate being outside that range, so the condition can be inverted by
3458 flipping the prefix. An omitted bound is represented by a "-". For
3459 example, "- [-, 10]" means being outside the range starting at the lowest
3460 possible value and ending at 10, in other words, being greater than 10.
3461 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3462 always false.
3464 We set up things so that the missing bounds are handled in a consistent
3465 manner so neither a missing bound nor "true" and "false" need to be
3466 handled using a special case. */
3468 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3469 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3470 and UPPER1_P are nonzero if the respective argument is an upper bound
3471 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3472 must be specified for a comparison. ARG1 will be converted to ARG0's
3473 type if both are specified. */
3475 static tree
3476 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3477 tree arg1, int upper1_p)
3479 tree tem;
3480 int result;
3481 int sgn0, sgn1;
3483 /* If neither arg represents infinity, do the normal operation.
3484 Else, if not a comparison, return infinity. Else handle the special
3485 comparison rules. Note that most of the cases below won't occur, but
3486 are handled for consistency. */
3488 if (arg0 != 0 && arg1 != 0)
3490 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3491 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3492 STRIP_NOPS (tem);
3493 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3496 if (TREE_CODE_CLASS (code) != tcc_comparison)
3497 return 0;
3499 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3500 for neither. In real maths, we cannot assume open ended ranges are
3501 the same. But, this is computer arithmetic, where numbers are finite.
3502 We can therefore make the transformation of any unbounded range with
3503 the value Z, Z being greater than any representable number. This permits
3504 us to treat unbounded ranges as equal. */
3505 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3506 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3507 switch (code)
3509 case EQ_EXPR:
3510 result = sgn0 == sgn1;
3511 break;
3512 case NE_EXPR:
3513 result = sgn0 != sgn1;
3514 break;
3515 case LT_EXPR:
3516 result = sgn0 < sgn1;
3517 break;
3518 case LE_EXPR:
3519 result = sgn0 <= sgn1;
3520 break;
3521 case GT_EXPR:
3522 result = sgn0 > sgn1;
3523 break;
3524 case GE_EXPR:
3525 result = sgn0 >= sgn1;
3526 break;
3527 default:
3528 gcc_unreachable ();
3531 return constant_boolean_node (result, type);
3534 /* Given EXP, a logical expression, set the range it is testing into
3535 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3536 actually being tested. *PLOW and *PHIGH will be made of the same type
3537 as the returned expression. If EXP is not a comparison, we will most
3538 likely not be returning a useful value and range. */
3540 static tree
3541 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3543 enum tree_code code;
3544 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3545 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3546 int in_p, n_in_p;
3547 tree low, high, n_low, n_high;
3549 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3550 and see if we can refine the range. Some of the cases below may not
3551 happen, but it doesn't seem worth worrying about this. We "continue"
3552 the outer loop when we've changed something; otherwise we "break"
3553 the switch, which will "break" the while. */
3555 in_p = 0;
3556 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3558 while (1)
3560 code = TREE_CODE (exp);
3561 exp_type = TREE_TYPE (exp);
3563 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3565 if (first_rtl_op (code) > 0)
3566 arg0 = TREE_OPERAND (exp, 0);
3567 if (TREE_CODE_CLASS (code) == tcc_comparison
3568 || TREE_CODE_CLASS (code) == tcc_unary
3569 || TREE_CODE_CLASS (code) == tcc_binary)
3570 arg0_type = TREE_TYPE (arg0);
3571 if (TREE_CODE_CLASS (code) == tcc_binary
3572 || TREE_CODE_CLASS (code) == tcc_comparison
3573 || (TREE_CODE_CLASS (code) == tcc_expression
3574 && TREE_CODE_LENGTH (code) > 1))
3575 arg1 = TREE_OPERAND (exp, 1);
3578 switch (code)
3580 case TRUTH_NOT_EXPR:
3581 in_p = ! in_p, exp = arg0;
3582 continue;
3584 case EQ_EXPR: case NE_EXPR:
3585 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3586 /* We can only do something if the range is testing for zero
3587 and if the second operand is an integer constant. Note that
3588 saying something is "in" the range we make is done by
3589 complementing IN_P since it will set in the initial case of
3590 being not equal to zero; "out" is leaving it alone. */
3591 if (low == 0 || high == 0
3592 || ! integer_zerop (low) || ! integer_zerop (high)
3593 || TREE_CODE (arg1) != INTEGER_CST)
3594 break;
3596 switch (code)
3598 case NE_EXPR: /* - [c, c] */
3599 low = high = arg1;
3600 break;
3601 case EQ_EXPR: /* + [c, c] */
3602 in_p = ! in_p, low = high = arg1;
3603 break;
3604 case GT_EXPR: /* - [-, c] */
3605 low = 0, high = arg1;
3606 break;
3607 case GE_EXPR: /* + [c, -] */
3608 in_p = ! in_p, low = arg1, high = 0;
3609 break;
3610 case LT_EXPR: /* - [c, -] */
3611 low = arg1, high = 0;
3612 break;
3613 case LE_EXPR: /* + [-, c] */
3614 in_p = ! in_p, low = 0, high = arg1;
3615 break;
3616 default:
3617 gcc_unreachable ();
3620 /* If this is an unsigned comparison, we also know that EXP is
3621 greater than or equal to zero. We base the range tests we make
3622 on that fact, so we record it here so we can parse existing
3623 range tests. We test arg0_type since often the return type
3624 of, e.g. EQ_EXPR, is boolean. */
3625 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3627 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3628 in_p, low, high, 1,
3629 fold_convert (arg0_type, integer_zero_node),
3630 NULL_TREE))
3631 break;
3633 in_p = n_in_p, low = n_low, high = n_high;
3635 /* If the high bound is missing, but we have a nonzero low
3636 bound, reverse the range so it goes from zero to the low bound
3637 minus 1. */
3638 if (high == 0 && low && ! integer_zerop (low))
3640 in_p = ! in_p;
3641 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3642 integer_one_node, 0);
3643 low = fold_convert (arg0_type, integer_zero_node);
3647 exp = arg0;
3648 continue;
3650 case NEGATE_EXPR:
3651 /* (-x) IN [a,b] -> x in [-b, -a] */
3652 n_low = range_binop (MINUS_EXPR, exp_type,
3653 fold_convert (exp_type, integer_zero_node),
3654 0, high, 1);
3655 n_high = range_binop (MINUS_EXPR, exp_type,
3656 fold_convert (exp_type, integer_zero_node),
3657 0, low, 0);
3658 low = n_low, high = n_high;
3659 exp = arg0;
3660 continue;
3662 case BIT_NOT_EXPR:
3663 /* ~ X -> -X - 1 */
3664 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3665 fold_convert (exp_type, integer_one_node));
3666 continue;
3668 case PLUS_EXPR: case MINUS_EXPR:
3669 if (TREE_CODE (arg1) != INTEGER_CST)
3670 break;
3672 /* If EXP is signed, any overflow in the computation is undefined,
3673 so we don't worry about it so long as our computations on
3674 the bounds don't overflow. For unsigned, overflow is defined
3675 and this is exactly the right thing. */
3676 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3677 arg0_type, low, 0, arg1, 0);
3678 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3679 arg0_type, high, 1, arg1, 0);
3680 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3681 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3682 break;
3684 /* Check for an unsigned range which has wrapped around the maximum
3685 value thus making n_high < n_low, and normalize it. */
3686 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3688 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3689 integer_one_node, 0);
3690 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3691 integer_one_node, 0);
3693 /* If the range is of the form +/- [ x+1, x ], we won't
3694 be able to normalize it. But then, it represents the
3695 whole range or the empty set, so make it
3696 +/- [ -, - ]. */
3697 if (tree_int_cst_equal (n_low, low)
3698 && tree_int_cst_equal (n_high, high))
3699 low = high = 0;
3700 else
3701 in_p = ! in_p;
3703 else
3704 low = n_low, high = n_high;
3706 exp = arg0;
3707 continue;
3709 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3710 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3711 break;
3713 if (! INTEGRAL_TYPE_P (arg0_type)
3714 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3715 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3716 break;
3718 n_low = low, n_high = high;
3720 if (n_low != 0)
3721 n_low = fold_convert (arg0_type, n_low);
3723 if (n_high != 0)
3724 n_high = fold_convert (arg0_type, n_high);
3727 /* If we're converting arg0 from an unsigned type, to exp,
3728 a signed type, we will be doing the comparison as unsigned.
3729 The tests above have already verified that LOW and HIGH
3730 are both positive.
3732 So we have to ensure that we will handle large unsigned
3733 values the same way that the current signed bounds treat
3734 negative values. */
3736 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3738 tree high_positive;
3739 tree equiv_type = lang_hooks.types.type_for_mode
3740 (TYPE_MODE (arg0_type), 1);
3742 /* A range without an upper bound is, naturally, unbounded.
3743 Since convert would have cropped a very large value, use
3744 the max value for the destination type. */
3745 high_positive
3746 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3747 : TYPE_MAX_VALUE (arg0_type);
3749 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3750 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3751 fold_convert (arg0_type,
3752 high_positive),
3753 fold_convert (arg0_type,
3754 integer_one_node)));
3756 /* If the low bound is specified, "and" the range with the
3757 range for which the original unsigned value will be
3758 positive. */
3759 if (low != 0)
3761 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3762 1, n_low, n_high, 1,
3763 fold_convert (arg0_type,
3764 integer_zero_node),
3765 high_positive))
3766 break;
3768 in_p = (n_in_p == in_p);
3770 else
3772 /* Otherwise, "or" the range with the range of the input
3773 that will be interpreted as negative. */
3774 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3775 0, n_low, n_high, 1,
3776 fold_convert (arg0_type,
3777 integer_zero_node),
3778 high_positive))
3779 break;
3781 in_p = (in_p != n_in_p);
3785 exp = arg0;
3786 low = n_low, high = n_high;
3787 continue;
3789 default:
3790 break;
3793 break;
3796 /* If EXP is a constant, we can evaluate whether this is true or false. */
3797 if (TREE_CODE (exp) == INTEGER_CST)
3799 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3800 exp, 0, low, 0))
3801 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3802 exp, 1, high, 1)));
3803 low = high = 0;
3804 exp = 0;
3807 *pin_p = in_p, *plow = low, *phigh = high;
3808 return exp;
3811 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3812 type, TYPE, return an expression to test if EXP is in (or out of, depending
3813 on IN_P) the range. Return 0 if the test couldn't be created. */
3815 static tree
3816 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3818 tree etype = TREE_TYPE (exp);
3819 tree value;
3821 if (! in_p)
3823 value = build_range_check (type, exp, 1, low, high);
3824 if (value != 0)
3825 return invert_truthvalue (value);
3827 return 0;
3830 if (low == 0 && high == 0)
3831 return fold_convert (type, integer_one_node);
3833 if (low == 0)
3834 return fold (build2 (LE_EXPR, type, exp, high));
3836 if (high == 0)
3837 return fold (build2 (GE_EXPR, type, exp, low));
3839 if (operand_equal_p (low, high, 0))
3840 return fold (build2 (EQ_EXPR, type, exp, low));
3842 if (integer_zerop (low))
3844 if (! TYPE_UNSIGNED (etype))
3846 etype = lang_hooks.types.unsigned_type (etype);
3847 high = fold_convert (etype, high);
3848 exp = fold_convert (etype, exp);
3850 return build_range_check (type, exp, 1, 0, high);
3853 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3854 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3856 unsigned HOST_WIDE_INT lo;
3857 HOST_WIDE_INT hi;
3858 int prec;
3860 prec = TYPE_PRECISION (etype);
3861 if (prec <= HOST_BITS_PER_WIDE_INT)
3863 hi = 0;
3864 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3866 else
3868 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3869 lo = (unsigned HOST_WIDE_INT) -1;
3872 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3874 if (TYPE_UNSIGNED (etype))
3876 etype = lang_hooks.types.signed_type (etype);
3877 exp = fold_convert (etype, exp);
3879 return fold (build2 (GT_EXPR, type, exp,
3880 fold_convert (etype, integer_zero_node)));
3884 value = const_binop (MINUS_EXPR, high, low, 0);
3885 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3887 tree utype, minv, maxv;
3889 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3890 for the type in question, as we rely on this here. */
3891 switch (TREE_CODE (etype))
3893 case INTEGER_TYPE:
3894 case ENUMERAL_TYPE:
3895 case CHAR_TYPE:
3896 utype = lang_hooks.types.unsigned_type (etype);
3897 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3898 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3899 integer_one_node, 1);
3900 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3901 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3902 minv, 1, maxv, 1)))
3904 etype = utype;
3905 high = fold_convert (etype, high);
3906 low = fold_convert (etype, low);
3907 exp = fold_convert (etype, exp);
3908 value = const_binop (MINUS_EXPR, high, low, 0);
3910 break;
3911 default:
3912 break;
3916 if (value != 0 && ! TREE_OVERFLOW (value))
3917 return build_range_check (type,
3918 fold (build2 (MINUS_EXPR, etype, exp, low)),
3919 1, fold_convert (etype, integer_zero_node),
3920 value);
3922 return 0;
3925 /* Given two ranges, see if we can merge them into one. Return 1 if we
3926 can, 0 if we can't. Set the output range into the specified parameters. */
3928 static int
3929 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3930 tree high0, int in1_p, tree low1, tree high1)
3932 int no_overlap;
3933 int subset;
3934 int temp;
3935 tree tem;
3936 int in_p;
3937 tree low, high;
3938 int lowequal = ((low0 == 0 && low1 == 0)
3939 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3940 low0, 0, low1, 0)));
3941 int highequal = ((high0 == 0 && high1 == 0)
3942 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3943 high0, 1, high1, 1)));
3945 /* Make range 0 be the range that starts first, or ends last if they
3946 start at the same value. Swap them if it isn't. */
3947 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3948 low0, 0, low1, 0))
3949 || (lowequal
3950 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3951 high1, 1, high0, 1))))
3953 temp = in0_p, in0_p = in1_p, in1_p = temp;
3954 tem = low0, low0 = low1, low1 = tem;
3955 tem = high0, high0 = high1, high1 = tem;
3958 /* Now flag two cases, whether the ranges are disjoint or whether the
3959 second range is totally subsumed in the first. Note that the tests
3960 below are simplified by the ones above. */
3961 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3962 high0, 1, low1, 0));
3963 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3964 high1, 1, high0, 1));
3966 /* We now have four cases, depending on whether we are including or
3967 excluding the two ranges. */
3968 if (in0_p && in1_p)
3970 /* If they don't overlap, the result is false. If the second range
3971 is a subset it is the result. Otherwise, the range is from the start
3972 of the second to the end of the first. */
3973 if (no_overlap)
3974 in_p = 0, low = high = 0;
3975 else if (subset)
3976 in_p = 1, low = low1, high = high1;
3977 else
3978 in_p = 1, low = low1, high = high0;
3981 else if (in0_p && ! in1_p)
3983 /* If they don't overlap, the result is the first range. If they are
3984 equal, the result is false. If the second range is a subset of the
3985 first, and the ranges begin at the same place, we go from just after
3986 the end of the first range to the end of the second. If the second
3987 range is not a subset of the first, or if it is a subset and both
3988 ranges end at the same place, the range starts at the start of the
3989 first range and ends just before the second range.
3990 Otherwise, we can't describe this as a single range. */
3991 if (no_overlap)
3992 in_p = 1, low = low0, high = high0;
3993 else if (lowequal && highequal)
3994 in_p = 0, low = high = 0;
3995 else if (subset && lowequal)
3997 in_p = 1, high = high0;
3998 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3999 integer_one_node, 0);
4001 else if (! subset || highequal)
4003 in_p = 1, low = low0;
4004 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4005 integer_one_node, 0);
4007 else
4008 return 0;
4011 else if (! in0_p && in1_p)
4013 /* If they don't overlap, the result is the second range. If the second
4014 is a subset of the first, the result is false. Otherwise,
4015 the range starts just after the first range and ends at the
4016 end of the second. */
4017 if (no_overlap)
4018 in_p = 1, low = low1, high = high1;
4019 else if (subset || highequal)
4020 in_p = 0, low = high = 0;
4021 else
4023 in_p = 1, high = high1;
4024 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4025 integer_one_node, 0);
4029 else
4031 /* The case where we are excluding both ranges. Here the complex case
4032 is if they don't overlap. In that case, the only time we have a
4033 range is if they are adjacent. If the second is a subset of the
4034 first, the result is the first. Otherwise, the range to exclude
4035 starts at the beginning of the first range and ends at the end of the
4036 second. */
4037 if (no_overlap)
4039 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4040 range_binop (PLUS_EXPR, NULL_TREE,
4041 high0, 1,
4042 integer_one_node, 1),
4043 1, low1, 0)))
4044 in_p = 0, low = low0, high = high1;
4045 else
4047 /* Canonicalize - [min, x] into - [-, x]. */
4048 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4049 switch (TREE_CODE (TREE_TYPE (low0)))
4051 case ENUMERAL_TYPE:
4052 if (TYPE_PRECISION (TREE_TYPE (low0))
4053 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4054 break;
4055 /* FALLTHROUGH */
4056 case INTEGER_TYPE:
4057 case CHAR_TYPE:
4058 if (tree_int_cst_equal (low0,
4059 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4060 low0 = 0;
4061 break;
4062 case POINTER_TYPE:
4063 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4064 && integer_zerop (low0))
4065 low0 = 0;
4066 break;
4067 default:
4068 break;
4071 /* Canonicalize - [x, max] into - [x, -]. */
4072 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4073 switch (TREE_CODE (TREE_TYPE (high1)))
4075 case ENUMERAL_TYPE:
4076 if (TYPE_PRECISION (TREE_TYPE (high1))
4077 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4078 break;
4079 /* FALLTHROUGH */
4080 case INTEGER_TYPE:
4081 case CHAR_TYPE:
4082 if (tree_int_cst_equal (high1,
4083 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4084 high1 = 0;
4085 break;
4086 case POINTER_TYPE:
4087 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4088 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4089 high1, 1,
4090 integer_one_node, 1)))
4091 high1 = 0;
4092 break;
4093 default:
4094 break;
4097 /* The ranges might be also adjacent between the maximum and
4098 minimum values of the given type. For
4099 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4100 return + [x + 1, y - 1]. */
4101 if (low0 == 0 && high1 == 0)
4103 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4104 integer_one_node, 1);
4105 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4106 integer_one_node, 0);
4107 if (low == 0 || high == 0)
4108 return 0;
4110 in_p = 1;
4112 else
4113 return 0;
4116 else if (subset)
4117 in_p = 0, low = low0, high = high0;
4118 else
4119 in_p = 0, low = low0, high = high1;
4122 *pin_p = in_p, *plow = low, *phigh = high;
4123 return 1;
4127 /* Subroutine of fold, looking inside expressions of the form
4128 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4129 of the COND_EXPR. This function is being used also to optimize
4130 A op B ? C : A, by reversing the comparison first.
4132 Return a folded expression whose code is not a COND_EXPR
4133 anymore, or NULL_TREE if no folding opportunity is found. */
4135 static tree
4136 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4138 enum tree_code comp_code = TREE_CODE (arg0);
4139 tree arg00 = TREE_OPERAND (arg0, 0);
4140 tree arg01 = TREE_OPERAND (arg0, 1);
4141 tree arg1_type = TREE_TYPE (arg1);
4142 tree tem;
4144 STRIP_NOPS (arg1);
4145 STRIP_NOPS (arg2);
4147 /* If we have A op 0 ? A : -A, consider applying the following
4148 transformations:
4150 A == 0? A : -A same as -A
4151 A != 0? A : -A same as A
4152 A >= 0? A : -A same as abs (A)
4153 A > 0? A : -A same as abs (A)
4154 A <= 0? A : -A same as -abs (A)
4155 A < 0? A : -A same as -abs (A)
4157 None of these transformations work for modes with signed
4158 zeros. If A is +/-0, the first two transformations will
4159 change the sign of the result (from +0 to -0, or vice
4160 versa). The last four will fix the sign of the result,
4161 even though the original expressions could be positive or
4162 negative, depending on the sign of A.
4164 Note that all these transformations are correct if A is
4165 NaN, since the two alternatives (A and -A) are also NaNs. */
4166 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4167 ? real_zerop (arg01)
4168 : integer_zerop (arg01))
4169 && TREE_CODE (arg2) == NEGATE_EXPR
4170 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4171 switch (comp_code)
4173 case EQ_EXPR:
4174 case UNEQ_EXPR:
4175 tem = fold_convert (arg1_type, arg1);
4176 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4177 case NE_EXPR:
4178 case LTGT_EXPR:
4179 return pedantic_non_lvalue (fold_convert (type, arg1));
4180 case UNGE_EXPR:
4181 case UNGT_EXPR:
4182 if (flag_trapping_math)
4183 break;
4184 /* Fall through. */
4185 case GE_EXPR:
4186 case GT_EXPR:
4187 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4188 arg1 = fold_convert (lang_hooks.types.signed_type
4189 (TREE_TYPE (arg1)), arg1);
4190 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4191 return pedantic_non_lvalue (fold_convert (type, tem));
4192 case UNLE_EXPR:
4193 case UNLT_EXPR:
4194 if (flag_trapping_math)
4195 break;
4196 case LE_EXPR:
4197 case LT_EXPR:
4198 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4199 arg1 = fold_convert (lang_hooks.types.signed_type
4200 (TREE_TYPE (arg1)), arg1);
4201 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4202 return negate_expr (fold_convert (type, tem));
4203 default:
4204 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4205 break;
4208 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4209 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4210 both transformations are correct when A is NaN: A != 0
4211 is then true, and A == 0 is false. */
4213 if (integer_zerop (arg01) && integer_zerop (arg2))
4215 if (comp_code == NE_EXPR)
4216 return pedantic_non_lvalue (fold_convert (type, arg1));
4217 else if (comp_code == EQ_EXPR)
4218 return fold_convert (type, integer_zero_node);
4221 /* Try some transformations of A op B ? A : B.
4223 A == B? A : B same as B
4224 A != B? A : B same as A
4225 A >= B? A : B same as max (A, B)
4226 A > B? A : B same as max (B, A)
4227 A <= B? A : B same as min (A, B)
4228 A < B? A : B same as min (B, A)
4230 As above, these transformations don't work in the presence
4231 of signed zeros. For example, if A and B are zeros of
4232 opposite sign, the first two transformations will change
4233 the sign of the result. In the last four, the original
4234 expressions give different results for (A=+0, B=-0) and
4235 (A=-0, B=+0), but the transformed expressions do not.
4237 The first two transformations are correct if either A or B
4238 is a NaN. In the first transformation, the condition will
4239 be false, and B will indeed be chosen. In the case of the
4240 second transformation, the condition A != B will be true,
4241 and A will be chosen.
4243 The conversions to max() and min() are not correct if B is
4244 a number and A is not. The conditions in the original
4245 expressions will be false, so all four give B. The min()
4246 and max() versions would give a NaN instead. */
4247 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4249 tree comp_op0 = arg00;
4250 tree comp_op1 = arg01;
4251 tree comp_type = TREE_TYPE (comp_op0);
4253 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4254 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4256 comp_type = type;
4257 comp_op0 = arg1;
4258 comp_op1 = arg2;
4261 switch (comp_code)
4263 case EQ_EXPR:
4264 return pedantic_non_lvalue (fold_convert (type, arg2));
4265 case NE_EXPR:
4266 return pedantic_non_lvalue (fold_convert (type, arg1));
4267 case LE_EXPR:
4268 case LT_EXPR:
4269 case UNLE_EXPR:
4270 case UNLT_EXPR:
4271 /* In C++ a ?: expression can be an lvalue, so put the
4272 operand which will be used if they are equal first
4273 so that we can convert this back to the
4274 corresponding COND_EXPR. */
4275 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4277 comp_op0 = fold_convert (comp_type, comp_op0);
4278 comp_op1 = fold_convert (comp_type, comp_op1);
4279 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4280 ? fold (build2 (MIN_EXPR, comp_type, comp_op0, comp_op1))
4281 : fold (build2 (MIN_EXPR, comp_type, comp_op1, comp_op0));
4282 return pedantic_non_lvalue (fold_convert (type, tem));
4284 break;
4285 case GE_EXPR:
4286 case GT_EXPR:
4287 case UNGE_EXPR:
4288 case UNGT_EXPR:
4289 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4291 comp_op0 = fold_convert (comp_type, comp_op0);
4292 comp_op1 = fold_convert (comp_type, comp_op1);
4293 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4294 ? fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1))
4295 : fold (build2 (MAX_EXPR, comp_type, comp_op1, comp_op0));
4296 return pedantic_non_lvalue (fold_convert (type, tem));
4298 break;
4299 case UNEQ_EXPR:
4300 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4301 return pedantic_non_lvalue (fold_convert (type, arg2));
4302 break;
4303 case LTGT_EXPR:
4304 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4305 return pedantic_non_lvalue (fold_convert (type, arg1));
4306 break;
4307 default:
4308 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4309 break;
4313 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4314 we might still be able to simplify this. For example,
4315 if C1 is one less or one more than C2, this might have started
4316 out as a MIN or MAX and been transformed by this function.
4317 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4319 if (INTEGRAL_TYPE_P (type)
4320 && TREE_CODE (arg01) == INTEGER_CST
4321 && TREE_CODE (arg2) == INTEGER_CST)
4322 switch (comp_code)
4324 case EQ_EXPR:
4325 /* We can replace A with C1 in this case. */
4326 arg1 = fold_convert (type, arg01);
4327 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4329 case LT_EXPR:
4330 /* If C1 is C2 + 1, this is min(A, C2). */
4331 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4332 OEP_ONLY_CONST)
4333 && operand_equal_p (arg01,
4334 const_binop (PLUS_EXPR, arg2,
4335 integer_one_node, 0),
4336 OEP_ONLY_CONST))
4337 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4338 type, arg1, arg2)));
4339 break;
4341 case LE_EXPR:
4342 /* If C1 is C2 - 1, this is min(A, C2). */
4343 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4344 OEP_ONLY_CONST)
4345 && operand_equal_p (arg01,
4346 const_binop (MINUS_EXPR, arg2,
4347 integer_one_node, 0),
4348 OEP_ONLY_CONST))
4349 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4350 type, arg1, arg2)));
4351 break;
4353 case GT_EXPR:
4354 /* If C1 is C2 - 1, this is max(A, C2). */
4355 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4356 OEP_ONLY_CONST)
4357 && operand_equal_p (arg01,
4358 const_binop (MINUS_EXPR, arg2,
4359 integer_one_node, 0),
4360 OEP_ONLY_CONST))
4361 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4362 type, arg1, arg2)));
4363 break;
4365 case GE_EXPR:
4366 /* If C1 is C2 + 1, this is max(A, C2). */
4367 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4368 OEP_ONLY_CONST)
4369 && operand_equal_p (arg01,
4370 const_binop (PLUS_EXPR, arg2,
4371 integer_one_node, 0),
4372 OEP_ONLY_CONST))
4373 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4374 type, arg1, arg2)));
4375 break;
4376 case NE_EXPR:
4377 break;
4378 default:
4379 gcc_unreachable ();
4382 return NULL_TREE;
4387 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
4388 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4389 #endif
4391 /* EXP is some logical combination of boolean tests. See if we can
4392 merge it into some range test. Return the new tree if so. */
4394 static tree
4395 fold_range_test (tree exp)
4397 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4398 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4399 int in0_p, in1_p, in_p;
4400 tree low0, low1, low, high0, high1, high;
4401 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4402 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4403 tree tem;
4405 /* If this is an OR operation, invert both sides; we will invert
4406 again at the end. */
4407 if (or_op)
4408 in0_p = ! in0_p, in1_p = ! in1_p;
4410 /* If both expressions are the same, if we can merge the ranges, and we
4411 can build the range test, return it or it inverted. If one of the
4412 ranges is always true or always false, consider it to be the same
4413 expression as the other. */
4414 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4415 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4416 in1_p, low1, high1)
4417 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4418 lhs != 0 ? lhs
4419 : rhs != 0 ? rhs : integer_zero_node,
4420 in_p, low, high))))
4421 return or_op ? invert_truthvalue (tem) : tem;
4423 /* On machines where the branch cost is expensive, if this is a
4424 short-circuited branch and the underlying object on both sides
4425 is the same, make a non-short-circuit operation. */
4426 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4427 && lhs != 0 && rhs != 0
4428 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4429 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4430 && operand_equal_p (lhs, rhs, 0))
4432 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4433 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4434 which cases we can't do this. */
4435 if (simple_operand_p (lhs))
4436 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4437 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4438 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4439 TREE_OPERAND (exp, 1));
4441 else if (lang_hooks.decls.global_bindings_p () == 0
4442 && ! CONTAINS_PLACEHOLDER_P (lhs))
4444 tree common = save_expr (lhs);
4446 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4447 or_op ? ! in0_p : in0_p,
4448 low0, high0))
4449 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4450 or_op ? ! in1_p : in1_p,
4451 low1, high1))))
4452 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4453 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4454 TREE_TYPE (exp), lhs, rhs);
4458 return 0;
4461 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4462 bit value. Arrange things so the extra bits will be set to zero if and
4463 only if C is signed-extended to its full width. If MASK is nonzero,
4464 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4466 static tree
4467 unextend (tree c, int p, int unsignedp, tree mask)
4469 tree type = TREE_TYPE (c);
4470 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4471 tree temp;
4473 if (p == modesize || unsignedp)
4474 return c;
4476 /* We work by getting just the sign bit into the low-order bit, then
4477 into the high-order bit, then sign-extend. We then XOR that value
4478 with C. */
4479 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4480 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4482 /* We must use a signed type in order to get an arithmetic right shift.
4483 However, we must also avoid introducing accidental overflows, so that
4484 a subsequent call to integer_zerop will work. Hence we must
4485 do the type conversion here. At this point, the constant is either
4486 zero or one, and the conversion to a signed type can never overflow.
4487 We could get an overflow if this conversion is done anywhere else. */
4488 if (TYPE_UNSIGNED (type))
4489 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4491 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4492 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4493 if (mask != 0)
4494 temp = const_binop (BIT_AND_EXPR, temp,
4495 fold_convert (TREE_TYPE (c), mask), 0);
4496 /* If necessary, convert the type back to match the type of C. */
4497 if (TYPE_UNSIGNED (type))
4498 temp = fold_convert (type, temp);
4500 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4503 /* Find ways of folding logical expressions of LHS and RHS:
4504 Try to merge two comparisons to the same innermost item.
4505 Look for range tests like "ch >= '0' && ch <= '9'".
4506 Look for combinations of simple terms on machines with expensive branches
4507 and evaluate the RHS unconditionally.
4509 For example, if we have p->a == 2 && p->b == 4 and we can make an
4510 object large enough to span both A and B, we can do this with a comparison
4511 against the object ANDed with the a mask.
4513 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4514 operations to do this with one comparison.
4516 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4517 function and the one above.
4519 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4520 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4522 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4523 two operands.
4525 We return the simplified tree or 0 if no optimization is possible. */
4527 static tree
4528 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4530 /* If this is the "or" of two comparisons, we can do something if
4531 the comparisons are NE_EXPR. If this is the "and", we can do something
4532 if the comparisons are EQ_EXPR. I.e.,
4533 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4535 WANTED_CODE is this operation code. For single bit fields, we can
4536 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4537 comparison for one-bit fields. */
4539 enum tree_code wanted_code;
4540 enum tree_code lcode, rcode;
4541 tree ll_arg, lr_arg, rl_arg, rr_arg;
4542 tree ll_inner, lr_inner, rl_inner, rr_inner;
4543 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4544 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4545 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4546 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4547 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4548 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4549 enum machine_mode lnmode, rnmode;
4550 tree ll_mask, lr_mask, rl_mask, rr_mask;
4551 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4552 tree l_const, r_const;
4553 tree lntype, rntype, result;
4554 int first_bit, end_bit;
4555 int volatilep;
4557 /* Start by getting the comparison codes. Fail if anything is volatile.
4558 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4559 it were surrounded with a NE_EXPR. */
4561 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4562 return 0;
4564 lcode = TREE_CODE (lhs);
4565 rcode = TREE_CODE (rhs);
4567 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4569 lhs = build2 (NE_EXPR, truth_type, lhs,
4570 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4571 lcode = NE_EXPR;
4574 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4576 rhs = build2 (NE_EXPR, truth_type, rhs,
4577 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4578 rcode = NE_EXPR;
4581 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4582 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4583 return 0;
4585 ll_arg = TREE_OPERAND (lhs, 0);
4586 lr_arg = TREE_OPERAND (lhs, 1);
4587 rl_arg = TREE_OPERAND (rhs, 0);
4588 rr_arg = TREE_OPERAND (rhs, 1);
4590 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4591 if (simple_operand_p (ll_arg)
4592 && simple_operand_p (lr_arg))
4594 tree result;
4595 if (operand_equal_p (ll_arg, rl_arg, 0)
4596 && operand_equal_p (lr_arg, rr_arg, 0))
4598 result = combine_comparisons (code, lcode, rcode,
4599 truth_type, ll_arg, lr_arg);
4600 if (result)
4601 return result;
4603 else if (operand_equal_p (ll_arg, rr_arg, 0)
4604 && operand_equal_p (lr_arg, rl_arg, 0))
4606 result = combine_comparisons (code, lcode,
4607 swap_tree_comparison (rcode),
4608 truth_type, ll_arg, lr_arg);
4609 if (result)
4610 return result;
4614 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4615 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4617 /* If the RHS can be evaluated unconditionally and its operands are
4618 simple, it wins to evaluate the RHS unconditionally on machines
4619 with expensive branches. In this case, this isn't a comparison
4620 that can be merged. Avoid doing this if the RHS is a floating-point
4621 comparison since those can trap. */
4623 if (BRANCH_COST >= 2
4624 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4625 && simple_operand_p (rl_arg)
4626 && simple_operand_p (rr_arg))
4628 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4629 if (code == TRUTH_OR_EXPR
4630 && lcode == NE_EXPR && integer_zerop (lr_arg)
4631 && rcode == NE_EXPR && integer_zerop (rr_arg)
4632 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4633 return build2 (NE_EXPR, truth_type,
4634 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4635 ll_arg, rl_arg),
4636 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4638 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4639 if (code == TRUTH_AND_EXPR
4640 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4641 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4642 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4643 return build2 (EQ_EXPR, truth_type,
4644 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4645 ll_arg, rl_arg),
4646 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4648 return build2 (code, truth_type, lhs, rhs);
4651 /* See if the comparisons can be merged. Then get all the parameters for
4652 each side. */
4654 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4655 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4656 return 0;
4658 volatilep = 0;
4659 ll_inner = decode_field_reference (ll_arg,
4660 &ll_bitsize, &ll_bitpos, &ll_mode,
4661 &ll_unsignedp, &volatilep, &ll_mask,
4662 &ll_and_mask);
4663 lr_inner = decode_field_reference (lr_arg,
4664 &lr_bitsize, &lr_bitpos, &lr_mode,
4665 &lr_unsignedp, &volatilep, &lr_mask,
4666 &lr_and_mask);
4667 rl_inner = decode_field_reference (rl_arg,
4668 &rl_bitsize, &rl_bitpos, &rl_mode,
4669 &rl_unsignedp, &volatilep, &rl_mask,
4670 &rl_and_mask);
4671 rr_inner = decode_field_reference (rr_arg,
4672 &rr_bitsize, &rr_bitpos, &rr_mode,
4673 &rr_unsignedp, &volatilep, &rr_mask,
4674 &rr_and_mask);
4676 /* It must be true that the inner operation on the lhs of each
4677 comparison must be the same if we are to be able to do anything.
4678 Then see if we have constants. If not, the same must be true for
4679 the rhs's. */
4680 if (volatilep || ll_inner == 0 || rl_inner == 0
4681 || ! operand_equal_p (ll_inner, rl_inner, 0))
4682 return 0;
4684 if (TREE_CODE (lr_arg) == INTEGER_CST
4685 && TREE_CODE (rr_arg) == INTEGER_CST)
4686 l_const = lr_arg, r_const = rr_arg;
4687 else if (lr_inner == 0 || rr_inner == 0
4688 || ! operand_equal_p (lr_inner, rr_inner, 0))
4689 return 0;
4690 else
4691 l_const = r_const = 0;
4693 /* If either comparison code is not correct for our logical operation,
4694 fail. However, we can convert a one-bit comparison against zero into
4695 the opposite comparison against that bit being set in the field. */
4697 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4698 if (lcode != wanted_code)
4700 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4702 /* Make the left operand unsigned, since we are only interested
4703 in the value of one bit. Otherwise we are doing the wrong
4704 thing below. */
4705 ll_unsignedp = 1;
4706 l_const = ll_mask;
4708 else
4709 return 0;
4712 /* This is analogous to the code for l_const above. */
4713 if (rcode != wanted_code)
4715 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4717 rl_unsignedp = 1;
4718 r_const = rl_mask;
4720 else
4721 return 0;
4724 /* After this point all optimizations will generate bit-field
4725 references, which we might not want. */
4726 if (! lang_hooks.can_use_bit_fields_p ())
4727 return 0;
4729 /* See if we can find a mode that contains both fields being compared on
4730 the left. If we can't, fail. Otherwise, update all constants and masks
4731 to be relative to a field of that size. */
4732 first_bit = MIN (ll_bitpos, rl_bitpos);
4733 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4734 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4735 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4736 volatilep);
4737 if (lnmode == VOIDmode)
4738 return 0;
4740 lnbitsize = GET_MODE_BITSIZE (lnmode);
4741 lnbitpos = first_bit & ~ (lnbitsize - 1);
4742 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4743 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4745 if (BYTES_BIG_ENDIAN)
4747 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4748 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4751 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4752 size_int (xll_bitpos), 0);
4753 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4754 size_int (xrl_bitpos), 0);
4756 if (l_const)
4758 l_const = fold_convert (lntype, l_const);
4759 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4760 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4761 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4762 fold (build1 (BIT_NOT_EXPR,
4763 lntype, ll_mask)),
4764 0)))
4766 warning ("comparison is always %d", wanted_code == NE_EXPR);
4768 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4771 if (r_const)
4773 r_const = fold_convert (lntype, r_const);
4774 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4775 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4776 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4777 fold (build1 (BIT_NOT_EXPR,
4778 lntype, rl_mask)),
4779 0)))
4781 warning ("comparison is always %d", wanted_code == NE_EXPR);
4783 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4787 /* If the right sides are not constant, do the same for it. Also,
4788 disallow this optimization if a size or signedness mismatch occurs
4789 between the left and right sides. */
4790 if (l_const == 0)
4792 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4793 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4794 /* Make sure the two fields on the right
4795 correspond to the left without being swapped. */
4796 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4797 return 0;
4799 first_bit = MIN (lr_bitpos, rr_bitpos);
4800 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4801 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4802 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4803 volatilep);
4804 if (rnmode == VOIDmode)
4805 return 0;
4807 rnbitsize = GET_MODE_BITSIZE (rnmode);
4808 rnbitpos = first_bit & ~ (rnbitsize - 1);
4809 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4810 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4812 if (BYTES_BIG_ENDIAN)
4814 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4815 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4818 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4819 size_int (xlr_bitpos), 0);
4820 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4821 size_int (xrr_bitpos), 0);
4823 /* Make a mask that corresponds to both fields being compared.
4824 Do this for both items being compared. If the operands are the
4825 same size and the bits being compared are in the same position
4826 then we can do this by masking both and comparing the masked
4827 results. */
4828 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4829 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4830 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4832 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4833 ll_unsignedp || rl_unsignedp);
4834 if (! all_ones_mask_p (ll_mask, lnbitsize))
4835 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4837 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4838 lr_unsignedp || rr_unsignedp);
4839 if (! all_ones_mask_p (lr_mask, rnbitsize))
4840 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4842 return build2 (wanted_code, truth_type, lhs, rhs);
4845 /* There is still another way we can do something: If both pairs of
4846 fields being compared are adjacent, we may be able to make a wider
4847 field containing them both.
4849 Note that we still must mask the lhs/rhs expressions. Furthermore,
4850 the mask must be shifted to account for the shift done by
4851 make_bit_field_ref. */
4852 if ((ll_bitsize + ll_bitpos == rl_bitpos
4853 && lr_bitsize + lr_bitpos == rr_bitpos)
4854 || (ll_bitpos == rl_bitpos + rl_bitsize
4855 && lr_bitpos == rr_bitpos + rr_bitsize))
4857 tree type;
4859 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4860 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4861 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4862 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4864 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4865 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4866 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4867 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4869 /* Convert to the smaller type before masking out unwanted bits. */
4870 type = lntype;
4871 if (lntype != rntype)
4873 if (lnbitsize > rnbitsize)
4875 lhs = fold_convert (rntype, lhs);
4876 ll_mask = fold_convert (rntype, ll_mask);
4877 type = rntype;
4879 else if (lnbitsize < rnbitsize)
4881 rhs = fold_convert (lntype, rhs);
4882 lr_mask = fold_convert (lntype, lr_mask);
4883 type = lntype;
4887 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4888 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4890 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4891 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4893 return build2 (wanted_code, truth_type, lhs, rhs);
4896 return 0;
4899 /* Handle the case of comparisons with constants. If there is something in
4900 common between the masks, those bits of the constants must be the same.
4901 If not, the condition is always false. Test for this to avoid generating
4902 incorrect code below. */
4903 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4904 if (! integer_zerop (result)
4905 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4906 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4908 if (wanted_code == NE_EXPR)
4910 warning ("%<or%> of unmatched not-equal tests is always 1");
4911 return constant_boolean_node (true, truth_type);
4913 else
4915 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4916 return constant_boolean_node (false, truth_type);
4920 /* Construct the expression we will return. First get the component
4921 reference we will make. Unless the mask is all ones the width of
4922 that field, perform the mask operation. Then compare with the
4923 merged constant. */
4924 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4925 ll_unsignedp || rl_unsignedp);
4927 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4928 if (! all_ones_mask_p (ll_mask, lnbitsize))
4929 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4931 return build2 (wanted_code, truth_type, result,
4932 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4935 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4936 constant. */
4938 static tree
4939 optimize_minmax_comparison (tree t)
4941 tree type = TREE_TYPE (t);
4942 tree arg0 = TREE_OPERAND (t, 0);
4943 enum tree_code op_code;
4944 tree comp_const = TREE_OPERAND (t, 1);
4945 tree minmax_const;
4946 int consts_equal, consts_lt;
4947 tree inner;
4949 STRIP_SIGN_NOPS (arg0);
4951 op_code = TREE_CODE (arg0);
4952 minmax_const = TREE_OPERAND (arg0, 1);
4953 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4954 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4955 inner = TREE_OPERAND (arg0, 0);
4957 /* If something does not permit us to optimize, return the original tree. */
4958 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4959 || TREE_CODE (comp_const) != INTEGER_CST
4960 || TREE_CONSTANT_OVERFLOW (comp_const)
4961 || TREE_CODE (minmax_const) != INTEGER_CST
4962 || TREE_CONSTANT_OVERFLOW (minmax_const))
4963 return t;
4965 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4966 and GT_EXPR, doing the rest with recursive calls using logical
4967 simplifications. */
4968 switch (TREE_CODE (t))
4970 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4971 return
4972 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4974 case GE_EXPR:
4975 return
4976 fold (build2 (TRUTH_ORIF_EXPR, type,
4977 optimize_minmax_comparison
4978 (build2 (EQ_EXPR, type, arg0, comp_const)),
4979 optimize_minmax_comparison
4980 (build2 (GT_EXPR, type, arg0, comp_const))));
4982 case EQ_EXPR:
4983 if (op_code == MAX_EXPR && consts_equal)
4984 /* MAX (X, 0) == 0 -> X <= 0 */
4985 return fold (build2 (LE_EXPR, type, inner, comp_const));
4987 else if (op_code == MAX_EXPR && consts_lt)
4988 /* MAX (X, 0) == 5 -> X == 5 */
4989 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4991 else if (op_code == MAX_EXPR)
4992 /* MAX (X, 0) == -1 -> false */
4993 return omit_one_operand (type, integer_zero_node, inner);
4995 else if (consts_equal)
4996 /* MIN (X, 0) == 0 -> X >= 0 */
4997 return fold (build2 (GE_EXPR, type, inner, comp_const));
4999 else if (consts_lt)
5000 /* MIN (X, 0) == 5 -> false */
5001 return omit_one_operand (type, integer_zero_node, inner);
5003 else
5004 /* MIN (X, 0) == -1 -> X == -1 */
5005 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5007 case GT_EXPR:
5008 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5009 /* MAX (X, 0) > 0 -> X > 0
5010 MAX (X, 0) > 5 -> X > 5 */
5011 return fold (build2 (GT_EXPR, type, inner, comp_const));
5013 else if (op_code == MAX_EXPR)
5014 /* MAX (X, 0) > -1 -> true */
5015 return omit_one_operand (type, integer_one_node, inner);
5017 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5018 /* MIN (X, 0) > 0 -> false
5019 MIN (X, 0) > 5 -> false */
5020 return omit_one_operand (type, integer_zero_node, inner);
5022 else
5023 /* MIN (X, 0) > -1 -> X > -1 */
5024 return fold (build2 (GT_EXPR, type, inner, comp_const));
5026 default:
5027 return t;
5031 /* T is an integer expression that is being multiplied, divided, or taken a
5032 modulus (CODE says which and what kind of divide or modulus) by a
5033 constant C. See if we can eliminate that operation by folding it with
5034 other operations already in T. WIDE_TYPE, if non-null, is a type that
5035 should be used for the computation if wider than our type.
5037 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5038 (X * 2) + (Y * 4). We must, however, be assured that either the original
5039 expression would not overflow or that overflow is undefined for the type
5040 in the language in question.
5042 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5043 the machine has a multiply-accumulate insn or that this is part of an
5044 addressing calculation.
5046 If we return a non-null expression, it is an equivalent form of the
5047 original computation, but need not be in the original type. */
5049 static tree
5050 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5052 /* To avoid exponential search depth, refuse to allow recursion past
5053 three levels. Beyond that (1) it's highly unlikely that we'll find
5054 something interesting and (2) we've probably processed it before
5055 when we built the inner expression. */
5057 static int depth;
5058 tree ret;
5060 if (depth > 3)
5061 return NULL;
5063 depth++;
5064 ret = extract_muldiv_1 (t, c, code, wide_type);
5065 depth--;
5067 return ret;
5070 static tree
5071 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5073 tree type = TREE_TYPE (t);
5074 enum tree_code tcode = TREE_CODE (t);
5075 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5076 > GET_MODE_SIZE (TYPE_MODE (type)))
5077 ? wide_type : type);
5078 tree t1, t2;
5079 int same_p = tcode == code;
5080 tree op0 = NULL_TREE, op1 = NULL_TREE;
5082 /* Don't deal with constants of zero here; they confuse the code below. */
5083 if (integer_zerop (c))
5084 return NULL_TREE;
5086 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5087 op0 = TREE_OPERAND (t, 0);
5089 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5090 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5092 /* Note that we need not handle conditional operations here since fold
5093 already handles those cases. So just do arithmetic here. */
5094 switch (tcode)
5096 case INTEGER_CST:
5097 /* For a constant, we can always simplify if we are a multiply
5098 or (for divide and modulus) if it is a multiple of our constant. */
5099 if (code == MULT_EXPR
5100 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5101 return const_binop (code, fold_convert (ctype, t),
5102 fold_convert (ctype, c), 0);
5103 break;
5105 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5106 /* If op0 is an expression ... */
5107 if ((COMPARISON_CLASS_P (op0)
5108 || UNARY_CLASS_P (op0)
5109 || BINARY_CLASS_P (op0)
5110 || EXPRESSION_CLASS_P (op0))
5111 /* ... and is unsigned, and its type is smaller than ctype,
5112 then we cannot pass through as widening. */
5113 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5114 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5115 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5116 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5117 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5118 /* ... or this is a truncation (t is narrower than op0),
5119 then we cannot pass through this narrowing. */
5120 || (GET_MODE_SIZE (TYPE_MODE (type))
5121 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5122 /* ... or signedness changes for division or modulus,
5123 then we cannot pass through this conversion. */
5124 || (code != MULT_EXPR
5125 && (TYPE_UNSIGNED (ctype)
5126 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5127 break;
5129 /* Pass the constant down and see if we can make a simplification. If
5130 we can, replace this expression with the inner simplification for
5131 possible later conversion to our or some other type. */
5132 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5133 && TREE_CODE (t2) == INTEGER_CST
5134 && ! TREE_CONSTANT_OVERFLOW (t2)
5135 && (0 != (t1 = extract_muldiv (op0, t2, code,
5136 code == MULT_EXPR
5137 ? ctype : NULL_TREE))))
5138 return t1;
5139 break;
5141 case NEGATE_EXPR: case ABS_EXPR:
5142 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5143 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5144 break;
5146 case MIN_EXPR: case MAX_EXPR:
5147 /* If widening the type changes the signedness, then we can't perform
5148 this optimization as that changes the result. */
5149 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5150 break;
5152 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5153 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5154 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5156 if (tree_int_cst_sgn (c) < 0)
5157 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5159 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5160 fold_convert (ctype, t2)));
5162 break;
5164 case LSHIFT_EXPR: case RSHIFT_EXPR:
5165 /* If the second operand is constant, this is a multiplication
5166 or floor division, by a power of two, so we can treat it that
5167 way unless the multiplier or divisor overflows. Signed
5168 left-shift overflow is implementation-defined rather than
5169 undefined in C90, so do not convert signed left shift into
5170 multiplication. */
5171 if (TREE_CODE (op1) == INTEGER_CST
5172 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5173 /* const_binop may not detect overflow correctly,
5174 so check for it explicitly here. */
5175 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5176 && TREE_INT_CST_HIGH (op1) == 0
5177 && 0 != (t1 = fold_convert (ctype,
5178 const_binop (LSHIFT_EXPR,
5179 size_one_node,
5180 op1, 0)))
5181 && ! TREE_OVERFLOW (t1))
5182 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5183 ? MULT_EXPR : FLOOR_DIV_EXPR,
5184 ctype, fold_convert (ctype, op0), t1),
5185 c, code, wide_type);
5186 break;
5188 case PLUS_EXPR: case MINUS_EXPR:
5189 /* See if we can eliminate the operation on both sides. If we can, we
5190 can return a new PLUS or MINUS. If we can't, the only remaining
5191 cases where we can do anything are if the second operand is a
5192 constant. */
5193 t1 = extract_muldiv (op0, c, code, wide_type);
5194 t2 = extract_muldiv (op1, c, code, wide_type);
5195 if (t1 != 0 && t2 != 0
5196 && (code == MULT_EXPR
5197 /* If not multiplication, we can only do this if both operands
5198 are divisible by c. */
5199 || (multiple_of_p (ctype, op0, c)
5200 && multiple_of_p (ctype, op1, c))))
5201 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5202 fold_convert (ctype, t2)));
5204 /* If this was a subtraction, negate OP1 and set it to be an addition.
5205 This simplifies the logic below. */
5206 if (tcode == MINUS_EXPR)
5207 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5209 if (TREE_CODE (op1) != INTEGER_CST)
5210 break;
5212 /* If either OP1 or C are negative, this optimization is not safe for
5213 some of the division and remainder types while for others we need
5214 to change the code. */
5215 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5217 if (code == CEIL_DIV_EXPR)
5218 code = FLOOR_DIV_EXPR;
5219 else if (code == FLOOR_DIV_EXPR)
5220 code = CEIL_DIV_EXPR;
5221 else if (code != MULT_EXPR
5222 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5223 break;
5226 /* If it's a multiply or a division/modulus operation of a multiple
5227 of our constant, do the operation and verify it doesn't overflow. */
5228 if (code == MULT_EXPR
5229 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5231 op1 = const_binop (code, fold_convert (ctype, op1),
5232 fold_convert (ctype, c), 0);
5233 /* We allow the constant to overflow with wrapping semantics. */
5234 if (op1 == 0
5235 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5236 break;
5238 else
5239 break;
5241 /* If we have an unsigned type is not a sizetype, we cannot widen
5242 the operation since it will change the result if the original
5243 computation overflowed. */
5244 if (TYPE_UNSIGNED (ctype)
5245 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5246 && ctype != type)
5247 break;
5249 /* If we were able to eliminate our operation from the first side,
5250 apply our operation to the second side and reform the PLUS. */
5251 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5252 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5254 /* The last case is if we are a multiply. In that case, we can
5255 apply the distributive law to commute the multiply and addition
5256 if the multiplication of the constants doesn't overflow. */
5257 if (code == MULT_EXPR)
5258 return fold (build2 (tcode, ctype,
5259 fold (build2 (code, ctype,
5260 fold_convert (ctype, op0),
5261 fold_convert (ctype, c))),
5262 op1));
5264 break;
5266 case MULT_EXPR:
5267 /* We have a special case here if we are doing something like
5268 (C * 8) % 4 since we know that's zero. */
5269 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5270 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5271 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5272 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5273 return omit_one_operand (type, integer_zero_node, op0);
5275 /* ... fall through ... */
5277 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5278 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5279 /* If we can extract our operation from the LHS, do so and return a
5280 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5281 do something only if the second operand is a constant. */
5282 if (same_p
5283 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5284 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5285 fold_convert (ctype, op1)));
5286 else if (tcode == MULT_EXPR && code == MULT_EXPR
5287 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5288 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5289 fold_convert (ctype, t1)));
5290 else if (TREE_CODE (op1) != INTEGER_CST)
5291 return 0;
5293 /* If these are the same operation types, we can associate them
5294 assuming no overflow. */
5295 if (tcode == code
5296 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5297 fold_convert (ctype, c), 0))
5298 && ! TREE_OVERFLOW (t1))
5299 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5301 /* If these operations "cancel" each other, we have the main
5302 optimizations of this pass, which occur when either constant is a
5303 multiple of the other, in which case we replace this with either an
5304 operation or CODE or TCODE.
5306 If we have an unsigned type that is not a sizetype, we cannot do
5307 this since it will change the result if the original computation
5308 overflowed. */
5309 if ((! TYPE_UNSIGNED (ctype)
5310 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5311 && ! flag_wrapv
5312 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5313 || (tcode == MULT_EXPR
5314 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5315 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5317 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5318 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5319 fold_convert (ctype,
5320 const_binop (TRUNC_DIV_EXPR,
5321 op1, c, 0))));
5322 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5323 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5324 fold_convert (ctype,
5325 const_binop (TRUNC_DIV_EXPR,
5326 c, op1, 0))));
5328 break;
5330 default:
5331 break;
5334 return 0;
5337 /* Return a node which has the indicated constant VALUE (either 0 or
5338 1), and is of the indicated TYPE. */
5340 tree
5341 constant_boolean_node (int value, tree type)
5343 if (type == integer_type_node)
5344 return value ? integer_one_node : integer_zero_node;
5345 else if (type == boolean_type_node)
5346 return value ? boolean_true_node : boolean_false_node;
5347 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5348 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5349 : integer_zero_node);
5350 else
5351 return build_int_cst (type, value);
5354 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5355 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5356 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5357 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5358 COND is the first argument to CODE; otherwise (as in the example
5359 given here), it is the second argument. TYPE is the type of the
5360 original expression. Return NULL_TREE if no simplification is
5361 possible. */
5363 static tree
5364 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
5365 tree cond, tree arg, int cond_first_p)
5367 tree test, true_value, false_value;
5368 tree lhs = NULL_TREE;
5369 tree rhs = NULL_TREE;
5371 /* This transformation is only worthwhile if we don't have to wrap
5372 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5373 one of the branches once its pushed inside the COND_EXPR. */
5374 if (!TREE_CONSTANT (arg))
5375 return NULL_TREE;
5377 if (TREE_CODE (cond) == COND_EXPR)
5379 test = TREE_OPERAND (cond, 0);
5380 true_value = TREE_OPERAND (cond, 1);
5381 false_value = TREE_OPERAND (cond, 2);
5382 /* If this operand throws an expression, then it does not make
5383 sense to try to perform a logical or arithmetic operation
5384 involving it. */
5385 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5386 lhs = true_value;
5387 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5388 rhs = false_value;
5390 else
5392 tree testtype = TREE_TYPE (cond);
5393 test = cond;
5394 true_value = constant_boolean_node (true, testtype);
5395 false_value = constant_boolean_node (false, testtype);
5398 if (lhs == 0)
5399 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5400 : build2 (code, type, arg, true_value));
5401 if (rhs == 0)
5402 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5403 : build2 (code, type, arg, false_value));
5405 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5406 return fold_convert (type, test);
5410 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5412 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5413 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5414 ADDEND is the same as X.
5416 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5417 and finite. The problematic cases are when X is zero, and its mode
5418 has signed zeros. In the case of rounding towards -infinity,
5419 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5420 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5422 static bool
5423 fold_real_zero_addition_p (tree type, tree addend, int negate)
5425 if (!real_zerop (addend))
5426 return false;
5428 /* Don't allow the fold with -fsignaling-nans. */
5429 if (HONOR_SNANS (TYPE_MODE (type)))
5430 return false;
5432 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5433 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5434 return true;
5436 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5437 if (TREE_CODE (addend) == REAL_CST
5438 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5439 negate = !negate;
5441 /* The mode has signed zeros, and we have to honor their sign.
5442 In this situation, there is only one case we can return true for.
5443 X - 0 is the same as X unless rounding towards -infinity is
5444 supported. */
5445 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5448 /* Subroutine of fold() that checks comparisons of built-in math
5449 functions against real constants.
5451 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5452 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5453 is the type of the result and ARG0 and ARG1 are the operands of the
5454 comparison. ARG1 must be a TREE_REAL_CST.
5456 The function returns the constant folded tree if a simplification
5457 can be made, and NULL_TREE otherwise. */
5459 static tree
5460 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5461 tree type, tree arg0, tree arg1)
5463 REAL_VALUE_TYPE c;
5465 if (BUILTIN_SQRT_P (fcode))
5467 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5468 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5470 c = TREE_REAL_CST (arg1);
5471 if (REAL_VALUE_NEGATIVE (c))
5473 /* sqrt(x) < y is always false, if y is negative. */
5474 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5475 return omit_one_operand (type, integer_zero_node, arg);
5477 /* sqrt(x) > y is always true, if y is negative and we
5478 don't care about NaNs, i.e. negative values of x. */
5479 if (code == NE_EXPR || !HONOR_NANS (mode))
5480 return omit_one_operand (type, integer_one_node, arg);
5482 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5483 return fold (build2 (GE_EXPR, type, arg,
5484 build_real (TREE_TYPE (arg), dconst0)));
5486 else if (code == GT_EXPR || code == GE_EXPR)
5488 REAL_VALUE_TYPE c2;
5490 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5491 real_convert (&c2, mode, &c2);
5493 if (REAL_VALUE_ISINF (c2))
5495 /* sqrt(x) > y is x == +Inf, when y is very large. */
5496 if (HONOR_INFINITIES (mode))
5497 return fold (build2 (EQ_EXPR, type, arg,
5498 build_real (TREE_TYPE (arg), c2)));
5500 /* sqrt(x) > y is always false, when y is very large
5501 and we don't care about infinities. */
5502 return omit_one_operand (type, integer_zero_node, arg);
5505 /* sqrt(x) > c is the same as x > c*c. */
5506 return fold (build2 (code, type, arg,
5507 build_real (TREE_TYPE (arg), c2)));
5509 else if (code == LT_EXPR || code == LE_EXPR)
5511 REAL_VALUE_TYPE c2;
5513 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5514 real_convert (&c2, mode, &c2);
5516 if (REAL_VALUE_ISINF (c2))
5518 /* sqrt(x) < y is always true, when y is a very large
5519 value and we don't care about NaNs or Infinities. */
5520 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5521 return omit_one_operand (type, integer_one_node, arg);
5523 /* sqrt(x) < y is x != +Inf when y is very large and we
5524 don't care about NaNs. */
5525 if (! HONOR_NANS (mode))
5526 return fold (build2 (NE_EXPR, type, arg,
5527 build_real (TREE_TYPE (arg), c2)));
5529 /* sqrt(x) < y is x >= 0 when y is very large and we
5530 don't care about Infinities. */
5531 if (! HONOR_INFINITIES (mode))
5532 return fold (build2 (GE_EXPR, type, arg,
5533 build_real (TREE_TYPE (arg), dconst0)));
5535 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5536 if (lang_hooks.decls.global_bindings_p () != 0
5537 || CONTAINS_PLACEHOLDER_P (arg))
5538 return NULL_TREE;
5540 arg = save_expr (arg);
5541 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5542 fold (build2 (GE_EXPR, type, arg,
5543 build_real (TREE_TYPE (arg),
5544 dconst0))),
5545 fold (build2 (NE_EXPR, type, arg,
5546 build_real (TREE_TYPE (arg),
5547 c2)))));
5550 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5551 if (! HONOR_NANS (mode))
5552 return fold (build2 (code, type, arg,
5553 build_real (TREE_TYPE (arg), c2)));
5555 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5556 if (lang_hooks.decls.global_bindings_p () == 0
5557 && ! CONTAINS_PLACEHOLDER_P (arg))
5559 arg = save_expr (arg);
5560 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5561 fold (build2 (GE_EXPR, type, arg,
5562 build_real (TREE_TYPE (arg),
5563 dconst0))),
5564 fold (build2 (code, type, arg,
5565 build_real (TREE_TYPE (arg),
5566 c2)))));
5571 return NULL_TREE;
5574 /* Subroutine of fold() that optimizes comparisons against Infinities,
5575 either +Inf or -Inf.
5577 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5578 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5579 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5581 The function returns the constant folded tree if a simplification
5582 can be made, and NULL_TREE otherwise. */
5584 static tree
5585 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5587 enum machine_mode mode;
5588 REAL_VALUE_TYPE max;
5589 tree temp;
5590 bool neg;
5592 mode = TYPE_MODE (TREE_TYPE (arg0));
5594 /* For negative infinity swap the sense of the comparison. */
5595 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5596 if (neg)
5597 code = swap_tree_comparison (code);
5599 switch (code)
5601 case GT_EXPR:
5602 /* x > +Inf is always false, if with ignore sNANs. */
5603 if (HONOR_SNANS (mode))
5604 return NULL_TREE;
5605 return omit_one_operand (type, integer_zero_node, arg0);
5607 case LE_EXPR:
5608 /* x <= +Inf is always true, if we don't case about NaNs. */
5609 if (! HONOR_NANS (mode))
5610 return omit_one_operand (type, integer_one_node, arg0);
5612 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5613 if (lang_hooks.decls.global_bindings_p () == 0
5614 && ! CONTAINS_PLACEHOLDER_P (arg0))
5616 arg0 = save_expr (arg0);
5617 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5619 break;
5621 case EQ_EXPR:
5622 case GE_EXPR:
5623 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5624 real_maxval (&max, neg, mode);
5625 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5626 arg0, build_real (TREE_TYPE (arg0), max)));
5628 case LT_EXPR:
5629 /* x < +Inf is always equal to x <= DBL_MAX. */
5630 real_maxval (&max, neg, mode);
5631 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5632 arg0, build_real (TREE_TYPE (arg0), max)));
5634 case NE_EXPR:
5635 /* x != +Inf is always equal to !(x > DBL_MAX). */
5636 real_maxval (&max, neg, mode);
5637 if (! HONOR_NANS (mode))
5638 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5639 arg0, build_real (TREE_TYPE (arg0), max)));
5641 /* The transformation below creates non-gimple code and thus is
5642 not appropriate if we are in gimple form. */
5643 if (in_gimple_form)
5644 return NULL_TREE;
5646 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5647 arg0, build_real (TREE_TYPE (arg0), max)));
5648 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5650 default:
5651 break;
5654 return NULL_TREE;
5657 /* Subroutine of fold() that optimizes comparisons of a division by
5658 a nonzero integer constant against an integer constant, i.e.
5659 X/C1 op C2.
5661 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5662 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5663 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5665 The function returns the constant folded tree if a simplification
5666 can be made, and NULL_TREE otherwise. */
5668 static tree
5669 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5671 tree prod, tmp, hi, lo;
5672 tree arg00 = TREE_OPERAND (arg0, 0);
5673 tree arg01 = TREE_OPERAND (arg0, 1);
5674 unsigned HOST_WIDE_INT lpart;
5675 HOST_WIDE_INT hpart;
5676 int overflow;
5678 /* We have to do this the hard way to detect unsigned overflow.
5679 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5680 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5681 TREE_INT_CST_HIGH (arg01),
5682 TREE_INT_CST_LOW (arg1),
5683 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5684 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5685 prod = force_fit_type (prod, -1, overflow, false);
5687 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5689 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5690 lo = prod;
5692 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5693 overflow = add_double (TREE_INT_CST_LOW (prod),
5694 TREE_INT_CST_HIGH (prod),
5695 TREE_INT_CST_LOW (tmp),
5696 TREE_INT_CST_HIGH (tmp),
5697 &lpart, &hpart);
5698 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5699 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5700 TREE_CONSTANT_OVERFLOW (prod));
5702 else if (tree_int_cst_sgn (arg01) >= 0)
5704 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5705 switch (tree_int_cst_sgn (arg1))
5707 case -1:
5708 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5709 hi = prod;
5710 break;
5712 case 0:
5713 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5714 hi = tmp;
5715 break;
5717 case 1:
5718 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5719 lo = prod;
5720 break;
5722 default:
5723 gcc_unreachable ();
5726 else
5728 /* A negative divisor reverses the relational operators. */
5729 code = swap_tree_comparison (code);
5731 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5732 switch (tree_int_cst_sgn (arg1))
5734 case -1:
5735 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5736 lo = prod;
5737 break;
5739 case 0:
5740 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5741 lo = tmp;
5742 break;
5744 case 1:
5745 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5746 hi = prod;
5747 break;
5749 default:
5750 gcc_unreachable ();
5754 switch (code)
5756 case EQ_EXPR:
5757 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5758 return omit_one_operand (type, integer_zero_node, arg00);
5759 if (TREE_OVERFLOW (hi))
5760 return fold (build2 (GE_EXPR, type, arg00, lo));
5761 if (TREE_OVERFLOW (lo))
5762 return fold (build2 (LE_EXPR, type, arg00, hi));
5763 return build_range_check (type, arg00, 1, lo, hi);
5765 case NE_EXPR:
5766 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5767 return omit_one_operand (type, integer_one_node, arg00);
5768 if (TREE_OVERFLOW (hi))
5769 return fold (build2 (LT_EXPR, type, arg00, lo));
5770 if (TREE_OVERFLOW (lo))
5771 return fold (build2 (GT_EXPR, type, arg00, hi));
5772 return build_range_check (type, arg00, 0, lo, hi);
5774 case LT_EXPR:
5775 if (TREE_OVERFLOW (lo))
5776 return omit_one_operand (type, integer_zero_node, arg00);
5777 return fold (build2 (LT_EXPR, type, arg00, lo));
5779 case LE_EXPR:
5780 if (TREE_OVERFLOW (hi))
5781 return omit_one_operand (type, integer_one_node, arg00);
5782 return fold (build2 (LE_EXPR, type, arg00, hi));
5784 case GT_EXPR:
5785 if (TREE_OVERFLOW (hi))
5786 return omit_one_operand (type, integer_zero_node, arg00);
5787 return fold (build2 (GT_EXPR, type, arg00, hi));
5789 case GE_EXPR:
5790 if (TREE_OVERFLOW (lo))
5791 return omit_one_operand (type, integer_one_node, arg00);
5792 return fold (build2 (GE_EXPR, type, arg00, lo));
5794 default:
5795 break;
5798 return NULL_TREE;
5802 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5803 equality/inequality test, then return a simplified form of
5804 the test using shifts and logical operations. Otherwise return
5805 NULL. TYPE is the desired result type. */
5807 tree
5808 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5809 tree result_type)
5811 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5812 operand 0. */
5813 if (code == TRUTH_NOT_EXPR)
5815 code = TREE_CODE (arg0);
5816 if (code != NE_EXPR && code != EQ_EXPR)
5817 return NULL_TREE;
5819 /* Extract the arguments of the EQ/NE. */
5820 arg1 = TREE_OPERAND (arg0, 1);
5821 arg0 = TREE_OPERAND (arg0, 0);
5823 /* This requires us to invert the code. */
5824 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5827 /* If this is testing a single bit, we can optimize the test. */
5828 if ((code == NE_EXPR || code == EQ_EXPR)
5829 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5830 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5832 tree inner = TREE_OPERAND (arg0, 0);
5833 tree type = TREE_TYPE (arg0);
5834 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5835 enum machine_mode operand_mode = TYPE_MODE (type);
5836 int ops_unsigned;
5837 tree signed_type, unsigned_type, intermediate_type;
5838 tree arg00;
5840 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5841 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5842 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5843 if (arg00 != NULL_TREE
5844 /* This is only a win if casting to a signed type is cheap,
5845 i.e. when arg00's type is not a partial mode. */
5846 && TYPE_PRECISION (TREE_TYPE (arg00))
5847 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5849 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5850 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5851 result_type, fold_convert (stype, arg00),
5852 fold_convert (stype, integer_zero_node)));
5855 /* Otherwise we have (A & C) != 0 where C is a single bit,
5856 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5857 Similarly for (A & C) == 0. */
5859 /* If INNER is a right shift of a constant and it plus BITNUM does
5860 not overflow, adjust BITNUM and INNER. */
5861 if (TREE_CODE (inner) == RSHIFT_EXPR
5862 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5863 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5864 && bitnum < TYPE_PRECISION (type)
5865 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5866 bitnum - TYPE_PRECISION (type)))
5868 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5869 inner = TREE_OPERAND (inner, 0);
5872 /* If we are going to be able to omit the AND below, we must do our
5873 operations as unsigned. If we must use the AND, we have a choice.
5874 Normally unsigned is faster, but for some machines signed is. */
5875 #ifdef LOAD_EXTEND_OP
5876 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5877 #else
5878 ops_unsigned = 1;
5879 #endif
5881 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5882 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5883 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5884 inner = fold_convert (intermediate_type, inner);
5886 if (bitnum != 0)
5887 inner = build2 (RSHIFT_EXPR, intermediate_type,
5888 inner, size_int (bitnum));
5890 if (code == EQ_EXPR)
5891 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5892 inner, integer_one_node));
5894 /* Put the AND last so it can combine with more things. */
5895 inner = build2 (BIT_AND_EXPR, intermediate_type,
5896 inner, integer_one_node);
5898 /* Make sure to return the proper type. */
5899 inner = fold_convert (result_type, inner);
5901 return inner;
5903 return NULL_TREE;
5906 /* Check whether we are allowed to reorder operands arg0 and arg1,
5907 such that the evaluation of arg1 occurs before arg0. */
5909 static bool
5910 reorder_operands_p (tree arg0, tree arg1)
5912 if (! flag_evaluation_order)
5913 return true;
5914 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5915 return true;
5916 return ! TREE_SIDE_EFFECTS (arg0)
5917 && ! TREE_SIDE_EFFECTS (arg1);
5920 /* Test whether it is preferable two swap two operands, ARG0 and
5921 ARG1, for example because ARG0 is an integer constant and ARG1
5922 isn't. If REORDER is true, only recommend swapping if we can
5923 evaluate the operands in reverse order. */
5925 bool
5926 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5928 STRIP_SIGN_NOPS (arg0);
5929 STRIP_SIGN_NOPS (arg1);
5931 if (TREE_CODE (arg1) == INTEGER_CST)
5932 return 0;
5933 if (TREE_CODE (arg0) == INTEGER_CST)
5934 return 1;
5936 if (TREE_CODE (arg1) == REAL_CST)
5937 return 0;
5938 if (TREE_CODE (arg0) == REAL_CST)
5939 return 1;
5941 if (TREE_CODE (arg1) == COMPLEX_CST)
5942 return 0;
5943 if (TREE_CODE (arg0) == COMPLEX_CST)
5944 return 1;
5946 if (TREE_CONSTANT (arg1))
5947 return 0;
5948 if (TREE_CONSTANT (arg0))
5949 return 1;
5951 if (optimize_size)
5952 return 0;
5954 if (reorder && flag_evaluation_order
5955 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5956 return 0;
5958 if (DECL_P (arg1))
5959 return 0;
5960 if (DECL_P (arg0))
5961 return 1;
5963 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5964 for commutative and comparison operators. Ensuring a canonical
5965 form allows the optimizers to find additional redundancies without
5966 having to explicitly check for both orderings. */
5967 if (TREE_CODE (arg0) == SSA_NAME
5968 && TREE_CODE (arg1) == SSA_NAME
5969 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5970 return 1;
5972 return 0;
5975 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
5976 step of the array. TYPE is the type of the expression. ADDR is the address.
5977 MULT is the multiplicative expression. If the function succeeds, the new
5978 address expression is returned. Otherwise NULL_TREE is returned. */
5980 static tree
5981 try_move_mult_to_index (tree type, enum tree_code code, tree addr, tree mult)
5983 tree s, delta, step;
5984 tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
5985 tree ref = TREE_OPERAND (addr, 0), pref;
5986 tree ret, pos;
5987 tree itype;
5989 STRIP_NOPS (arg0);
5990 STRIP_NOPS (arg1);
5992 if (TREE_CODE (arg0) == INTEGER_CST)
5994 s = arg0;
5995 delta = arg1;
5997 else if (TREE_CODE (arg1) == INTEGER_CST)
5999 s = arg1;
6000 delta = arg0;
6002 else
6003 return NULL_TREE;
6005 for (;; ref = TREE_OPERAND (ref, 0))
6007 if (TREE_CODE (ref) == ARRAY_REF)
6009 step = array_ref_element_size (ref);
6011 if (TREE_CODE (step) != INTEGER_CST)
6012 continue;
6014 itype = TREE_TYPE (step);
6016 /* If the type sizes do not match, we might run into problems
6017 when one of them would overflow. */
6018 if (TYPE_PRECISION (itype) != TYPE_PRECISION (type))
6019 continue;
6021 if (!operand_equal_p (step, fold_convert (itype, s), 0))
6022 continue;
6024 delta = fold_convert (itype, delta);
6025 break;
6028 if (!handled_component_p (ref))
6029 return NULL_TREE;
6032 /* We found the suitable array reference. So copy everything up to it,
6033 and replace the index. */
6035 pref = TREE_OPERAND (addr, 0);
6036 ret = copy_node (pref);
6037 pos = ret;
6039 while (pref != ref)
6041 pref = TREE_OPERAND (pref, 0);
6042 TREE_OPERAND (pos, 0) = copy_node (pref);
6043 pos = TREE_OPERAND (pos, 0);
6046 TREE_OPERAND (pos, 1) = fold (build2 (code, itype,
6047 TREE_OPERAND (pos, 1),
6048 delta));
6050 return build1 (ADDR_EXPR, type, ret);
6053 /* Perform constant folding and related simplification of EXPR.
6054 The related simplifications include x*1 => x, x*0 => 0, etc.,
6055 and application of the associative law.
6056 NOP_EXPR conversions may be removed freely (as long as we
6057 are careful not to change the type of the overall expression).
6058 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
6059 but we can constant-fold them if they have constant operands. */
6061 #ifdef ENABLE_FOLD_CHECKING
6062 # define fold(x) fold_1 (x)
6063 static tree fold_1 (tree);
6064 static
6065 #endif
6066 tree
6067 fold (tree expr)
6069 const tree t = expr;
6070 const tree type = TREE_TYPE (expr);
6071 tree t1 = NULL_TREE;
6072 tree tem;
6073 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
6074 enum tree_code code = TREE_CODE (t);
6075 enum tree_code_class kind = TREE_CODE_CLASS (code);
6077 /* WINS will be nonzero when the switch is done
6078 if all operands are constant. */
6079 int wins = 1;
6081 /* Return right away if a constant. */
6082 if (kind == tcc_constant)
6083 return t;
6085 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6087 tree subop;
6089 /* Special case for conversion ops that can have fixed point args. */
6090 arg0 = TREE_OPERAND (t, 0);
6092 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6093 if (arg0 != 0)
6094 STRIP_SIGN_NOPS (arg0);
6096 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
6097 subop = TREE_REALPART (arg0);
6098 else
6099 subop = arg0;
6101 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
6102 && TREE_CODE (subop) != REAL_CST)
6103 /* Note that TREE_CONSTANT isn't enough:
6104 static var addresses are constant but we can't
6105 do arithmetic on them. */
6106 wins = 0;
6108 else if (IS_EXPR_CODE_CLASS (kind))
6110 int len = first_rtl_op (code);
6111 int i;
6112 for (i = 0; i < len; i++)
6114 tree op = TREE_OPERAND (t, i);
6115 tree subop;
6117 if (op == 0)
6118 continue; /* Valid for CALL_EXPR, at least. */
6120 /* Strip any conversions that don't change the mode. This is
6121 safe for every expression, except for a comparison expression
6122 because its signedness is derived from its operands. So, in
6123 the latter case, only strip conversions that don't change the
6124 signedness.
6126 Note that this is done as an internal manipulation within the
6127 constant folder, in order to find the simplest representation
6128 of the arguments so that their form can be studied. In any
6129 cases, the appropriate type conversions should be put back in
6130 the tree that will get out of the constant folder. */
6131 if (kind == tcc_comparison)
6132 STRIP_SIGN_NOPS (op);
6133 else
6134 STRIP_NOPS (op);
6136 if (TREE_CODE (op) == COMPLEX_CST)
6137 subop = TREE_REALPART (op);
6138 else
6139 subop = op;
6141 if (TREE_CODE (subop) != INTEGER_CST
6142 && TREE_CODE (subop) != REAL_CST)
6143 /* Note that TREE_CONSTANT isn't enough:
6144 static var addresses are constant but we can't
6145 do arithmetic on them. */
6146 wins = 0;
6148 if (i == 0)
6149 arg0 = op;
6150 else if (i == 1)
6151 arg1 = op;
6155 /* If this is a commutative operation, and ARG0 is a constant, move it
6156 to ARG1 to reduce the number of tests below. */
6157 if (commutative_tree_code (code)
6158 && tree_swap_operands_p (arg0, arg1, true))
6159 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6160 TREE_OPERAND (t, 0)));
6162 /* Now WINS is set as described above,
6163 ARG0 is the first operand of EXPR,
6164 and ARG1 is the second operand (if it has more than one operand).
6166 First check for cases where an arithmetic operation is applied to a
6167 compound, conditional, or comparison operation. Push the arithmetic
6168 operation inside the compound or conditional to see if any folding
6169 can then be done. Convert comparison to conditional for this purpose.
6170 The also optimizes non-constant cases that used to be done in
6171 expand_expr.
6173 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6174 one of the operands is a comparison and the other is a comparison, a
6175 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6176 code below would make the expression more complex. Change it to a
6177 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6178 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6180 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6181 || code == EQ_EXPR || code == NE_EXPR)
6182 && ((truth_value_p (TREE_CODE (arg0))
6183 && (truth_value_p (TREE_CODE (arg1))
6184 || (TREE_CODE (arg1) == BIT_AND_EXPR
6185 && integer_onep (TREE_OPERAND (arg1, 1)))))
6186 || (truth_value_p (TREE_CODE (arg1))
6187 && (truth_value_p (TREE_CODE (arg0))
6188 || (TREE_CODE (arg0) == BIT_AND_EXPR
6189 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6191 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6192 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6193 : TRUTH_XOR_EXPR,
6194 type, fold_convert (boolean_type_node, arg0),
6195 fold_convert (boolean_type_node, arg1)));
6197 if (code == EQ_EXPR)
6198 tem = invert_truthvalue (tem);
6200 return tem;
6203 if (TREE_CODE_CLASS (code) == tcc_unary)
6205 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6206 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6207 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6208 else if (TREE_CODE (arg0) == COND_EXPR)
6210 tree arg01 = TREE_OPERAND (arg0, 1);
6211 tree arg02 = TREE_OPERAND (arg0, 2);
6212 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6213 arg01 = fold (build1 (code, type, arg01));
6214 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6215 arg02 = fold (build1 (code, type, arg02));
6216 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6217 arg01, arg02));
6219 /* If this was a conversion, and all we did was to move into
6220 inside the COND_EXPR, bring it back out. But leave it if
6221 it is a conversion from integer to integer and the
6222 result precision is no wider than a word since such a
6223 conversion is cheap and may be optimized away by combine,
6224 while it couldn't if it were outside the COND_EXPR. Then return
6225 so we don't get into an infinite recursion loop taking the
6226 conversion out and then back in. */
6228 if ((code == NOP_EXPR || code == CONVERT_EXPR
6229 || code == NON_LVALUE_EXPR)
6230 && TREE_CODE (tem) == COND_EXPR
6231 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6232 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6233 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6234 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6235 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6236 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6237 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6238 && (INTEGRAL_TYPE_P
6239 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6240 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
6241 tem = build1 (code, type,
6242 build3 (COND_EXPR,
6243 TREE_TYPE (TREE_OPERAND
6244 (TREE_OPERAND (tem, 1), 0)),
6245 TREE_OPERAND (tem, 0),
6246 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6247 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6248 return tem;
6250 else if (COMPARISON_CLASS_P (arg0))
6252 if (TREE_CODE (type) == BOOLEAN_TYPE)
6254 arg0 = copy_node (arg0);
6255 TREE_TYPE (arg0) = type;
6256 return arg0;
6258 else if (TREE_CODE (type) != INTEGER_TYPE)
6259 return fold (build3 (COND_EXPR, type, arg0,
6260 fold (build1 (code, type,
6261 integer_one_node)),
6262 fold (build1 (code, type,
6263 integer_zero_node))));
6266 else if (TREE_CODE_CLASS (code) == tcc_comparison
6267 && TREE_CODE (arg0) == COMPOUND_EXPR)
6268 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6269 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6270 else if (TREE_CODE_CLASS (code) == tcc_comparison
6271 && TREE_CODE (arg1) == COMPOUND_EXPR)
6272 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6273 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6274 else if (TREE_CODE_CLASS (code) == tcc_binary
6275 || TREE_CODE_CLASS (code) == tcc_comparison)
6277 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6278 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6279 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6280 arg1)));
6281 if (TREE_CODE (arg1) == COMPOUND_EXPR
6282 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6283 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6284 fold (build2 (code, type,
6285 arg0, TREE_OPERAND (arg1, 1))));
6287 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
6289 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
6290 /*cond_first_p=*/1);
6291 if (tem != NULL_TREE)
6292 return tem;
6295 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
6297 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
6298 /*cond_first_p=*/0);
6299 if (tem != NULL_TREE)
6300 return tem;
6304 switch (code)
6306 case CONST_DECL:
6307 return fold (DECL_INITIAL (t));
6309 case NOP_EXPR:
6310 case FLOAT_EXPR:
6311 case CONVERT_EXPR:
6312 case FIX_TRUNC_EXPR:
6313 case FIX_CEIL_EXPR:
6314 case FIX_FLOOR_EXPR:
6315 case FIX_ROUND_EXPR:
6316 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6317 return TREE_OPERAND (t, 0);
6319 /* Handle cases of two conversions in a row. */
6320 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6321 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6323 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6324 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6325 int inside_int = INTEGRAL_TYPE_P (inside_type);
6326 int inside_ptr = POINTER_TYPE_P (inside_type);
6327 int inside_float = FLOAT_TYPE_P (inside_type);
6328 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6329 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6330 int inter_int = INTEGRAL_TYPE_P (inter_type);
6331 int inter_ptr = POINTER_TYPE_P (inter_type);
6332 int inter_float = FLOAT_TYPE_P (inter_type);
6333 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6334 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6335 int final_int = INTEGRAL_TYPE_P (type);
6336 int final_ptr = POINTER_TYPE_P (type);
6337 int final_float = FLOAT_TYPE_P (type);
6338 unsigned int final_prec = TYPE_PRECISION (type);
6339 int final_unsignedp = TYPE_UNSIGNED (type);
6341 /* In addition to the cases of two conversions in a row
6342 handled below, if we are converting something to its own
6343 type via an object of identical or wider precision, neither
6344 conversion is needed. */
6345 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6346 && ((inter_int && final_int) || (inter_float && final_float))
6347 && inter_prec >= final_prec)
6348 return fold (build1 (code, type,
6349 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6351 /* Likewise, if the intermediate and final types are either both
6352 float or both integer, we don't need the middle conversion if
6353 it is wider than the final type and doesn't change the signedness
6354 (for integers). Avoid this if the final type is a pointer
6355 since then we sometimes need the inner conversion. Likewise if
6356 the outer has a precision not equal to the size of its mode. */
6357 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6358 || (inter_float && inside_float))
6359 && inter_prec >= inside_prec
6360 && (inter_float || inter_unsignedp == inside_unsignedp)
6361 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6362 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6363 && ! final_ptr)
6364 return fold (build1 (code, type,
6365 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6367 /* If we have a sign-extension of a zero-extended value, we can
6368 replace that by a single zero-extension. */
6369 if (inside_int && inter_int && final_int
6370 && inside_prec < inter_prec && inter_prec < final_prec
6371 && inside_unsignedp && !inter_unsignedp)
6372 return fold (build1 (code, type,
6373 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6375 /* Two conversions in a row are not needed unless:
6376 - some conversion is floating-point (overstrict for now), or
6377 - the intermediate type is narrower than both initial and
6378 final, or
6379 - the intermediate type and innermost type differ in signedness,
6380 and the outermost type is wider than the intermediate, or
6381 - the initial type is a pointer type and the precisions of the
6382 intermediate and final types differ, or
6383 - the final type is a pointer type and the precisions of the
6384 initial and intermediate types differ. */
6385 if (! inside_float && ! inter_float && ! final_float
6386 && (inter_prec > inside_prec || inter_prec > final_prec)
6387 && ! (inside_int && inter_int
6388 && inter_unsignedp != inside_unsignedp
6389 && inter_prec < final_prec)
6390 && ((inter_unsignedp && inter_prec > inside_prec)
6391 == (final_unsignedp && final_prec > inter_prec))
6392 && ! (inside_ptr && inter_prec != final_prec)
6393 && ! (final_ptr && inside_prec != inter_prec)
6394 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6395 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6396 && ! final_ptr)
6397 return fold (build1 (code, type,
6398 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6401 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6402 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6403 /* Detect assigning a bitfield. */
6404 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6405 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6407 /* Don't leave an assignment inside a conversion
6408 unless assigning a bitfield. */
6409 tree prev = TREE_OPERAND (t, 0);
6410 tem = copy_node (t);
6411 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6412 /* First do the assignment, then return converted constant. */
6413 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6414 TREE_NO_WARNING (tem) = 1;
6415 TREE_USED (tem) = 1;
6416 return tem;
6419 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6420 constants (if x has signed type, the sign bit cannot be set
6421 in c). This folds extension into the BIT_AND_EXPR. */
6422 if (INTEGRAL_TYPE_P (type)
6423 && TREE_CODE (type) != BOOLEAN_TYPE
6424 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6425 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6427 tree and = TREE_OPERAND (t, 0);
6428 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6429 int change = 0;
6431 if (TYPE_UNSIGNED (TREE_TYPE (and))
6432 || (TYPE_PRECISION (type)
6433 <= TYPE_PRECISION (TREE_TYPE (and))))
6434 change = 1;
6435 else if (TYPE_PRECISION (TREE_TYPE (and1))
6436 <= HOST_BITS_PER_WIDE_INT
6437 && host_integerp (and1, 1))
6439 unsigned HOST_WIDE_INT cst;
6441 cst = tree_low_cst (and1, 1);
6442 cst &= (HOST_WIDE_INT) -1
6443 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6444 change = (cst == 0);
6445 #ifdef LOAD_EXTEND_OP
6446 if (change
6447 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6448 == ZERO_EXTEND))
6450 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6451 and0 = fold_convert (uns, and0);
6452 and1 = fold_convert (uns, and1);
6454 #endif
6456 if (change)
6457 return fold (build2 (BIT_AND_EXPR, type,
6458 fold_convert (type, and0),
6459 fold_convert (type, and1)));
6462 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6463 T2 being pointers to types of the same size. */
6464 if (POINTER_TYPE_P (TREE_TYPE (t))
6465 && BINARY_CLASS_P (arg0)
6466 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6467 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6469 tree arg00 = TREE_OPERAND (arg0, 0);
6470 tree t0 = TREE_TYPE (t);
6471 tree t1 = TREE_TYPE (arg00);
6472 tree tt0 = TREE_TYPE (t0);
6473 tree tt1 = TREE_TYPE (t1);
6474 tree s0 = TYPE_SIZE (tt0);
6475 tree s1 = TYPE_SIZE (tt1);
6477 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6478 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6479 TREE_OPERAND (arg0, 1));
6482 tem = fold_convert_const (code, type, arg0);
6483 return tem ? tem : t;
6485 case VIEW_CONVERT_EXPR:
6486 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6487 return build1 (VIEW_CONVERT_EXPR, type,
6488 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6489 return t;
6491 case COMPONENT_REF:
6492 if (TREE_CODE (arg0) == CONSTRUCTOR
6493 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6495 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6496 if (m)
6497 return TREE_VALUE (m);
6499 return t;
6501 case RANGE_EXPR:
6502 if (TREE_CONSTANT (t) != wins)
6504 tem = copy_node (t);
6505 TREE_CONSTANT (tem) = wins;
6506 TREE_INVARIANT (tem) = wins;
6507 return tem;
6509 return t;
6511 case NEGATE_EXPR:
6512 if (negate_expr_p (arg0))
6513 return fold_convert (type, negate_expr (arg0));
6514 return t;
6516 case ABS_EXPR:
6517 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6518 return fold_abs_const (arg0, type);
6519 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6520 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6521 /* Convert fabs((double)float) into (double)fabsf(float). */
6522 else if (TREE_CODE (arg0) == NOP_EXPR
6523 && TREE_CODE (type) == REAL_TYPE)
6525 tree targ0 = strip_float_extensions (arg0);
6526 if (targ0 != arg0)
6527 return fold_convert (type, fold (build1 (ABS_EXPR,
6528 TREE_TYPE (targ0),
6529 targ0)));
6531 else if (tree_expr_nonnegative_p (arg0))
6532 return arg0;
6533 return t;
6535 case CONJ_EXPR:
6536 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6537 return fold_convert (type, arg0);
6538 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6539 return build2 (COMPLEX_EXPR, type,
6540 TREE_OPERAND (arg0, 0),
6541 negate_expr (TREE_OPERAND (arg0, 1)));
6542 else if (TREE_CODE (arg0) == COMPLEX_CST)
6543 return build_complex (type, TREE_REALPART (arg0),
6544 negate_expr (TREE_IMAGPART (arg0)));
6545 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6546 return fold (build2 (TREE_CODE (arg0), type,
6547 fold (build1 (CONJ_EXPR, type,
6548 TREE_OPERAND (arg0, 0))),
6549 fold (build1 (CONJ_EXPR, type,
6550 TREE_OPERAND (arg0, 1)))));
6551 else if (TREE_CODE (arg0) == CONJ_EXPR)
6552 return TREE_OPERAND (arg0, 0);
6553 return t;
6555 case BIT_NOT_EXPR:
6556 if (TREE_CODE (arg0) == INTEGER_CST)
6557 return fold_not_const (arg0, type);
6558 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6559 return TREE_OPERAND (arg0, 0);
6560 return t;
6562 case PLUS_EXPR:
6563 /* A + (-B) -> A - B */
6564 if (TREE_CODE (arg1) == NEGATE_EXPR)
6565 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6566 /* (-A) + B -> B - A */
6567 if (TREE_CODE (arg0) == NEGATE_EXPR
6568 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6569 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6570 if (! FLOAT_TYPE_P (type))
6572 if (integer_zerop (arg1))
6573 return non_lvalue (fold_convert (type, arg0));
6575 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6576 with a constant, and the two constants have no bits in common,
6577 we should treat this as a BIT_IOR_EXPR since this may produce more
6578 simplifications. */
6579 if (TREE_CODE (arg0) == BIT_AND_EXPR
6580 && TREE_CODE (arg1) == BIT_AND_EXPR
6581 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6582 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6583 && integer_zerop (const_binop (BIT_AND_EXPR,
6584 TREE_OPERAND (arg0, 1),
6585 TREE_OPERAND (arg1, 1), 0)))
6587 code = BIT_IOR_EXPR;
6588 goto bit_ior;
6591 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6592 (plus (plus (mult) (mult)) (foo)) so that we can
6593 take advantage of the factoring cases below. */
6594 if (((TREE_CODE (arg0) == PLUS_EXPR
6595 || TREE_CODE (arg0) == MINUS_EXPR)
6596 && TREE_CODE (arg1) == MULT_EXPR)
6597 || ((TREE_CODE (arg1) == PLUS_EXPR
6598 || TREE_CODE (arg1) == MINUS_EXPR)
6599 && TREE_CODE (arg0) == MULT_EXPR))
6601 tree parg0, parg1, parg, marg;
6602 enum tree_code pcode;
6604 if (TREE_CODE (arg1) == MULT_EXPR)
6605 parg = arg0, marg = arg1;
6606 else
6607 parg = arg1, marg = arg0;
6608 pcode = TREE_CODE (parg);
6609 parg0 = TREE_OPERAND (parg, 0);
6610 parg1 = TREE_OPERAND (parg, 1);
6611 STRIP_NOPS (parg0);
6612 STRIP_NOPS (parg1);
6614 if (TREE_CODE (parg0) == MULT_EXPR
6615 && TREE_CODE (parg1) != MULT_EXPR)
6616 return fold (build2 (pcode, type,
6617 fold (build2 (PLUS_EXPR, type,
6618 fold_convert (type, parg0),
6619 fold_convert (type, marg))),
6620 fold_convert (type, parg1)));
6621 if (TREE_CODE (parg0) != MULT_EXPR
6622 && TREE_CODE (parg1) == MULT_EXPR)
6623 return fold (build2 (PLUS_EXPR, type,
6624 fold_convert (type, parg0),
6625 fold (build2 (pcode, type,
6626 fold_convert (type, marg),
6627 fold_convert (type,
6628 parg1)))));
6631 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6633 tree arg00, arg01, arg10, arg11;
6634 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6636 /* (A * C) + (B * C) -> (A+B) * C.
6637 We are most concerned about the case where C is a constant,
6638 but other combinations show up during loop reduction. Since
6639 it is not difficult, try all four possibilities. */
6641 arg00 = TREE_OPERAND (arg0, 0);
6642 arg01 = TREE_OPERAND (arg0, 1);
6643 arg10 = TREE_OPERAND (arg1, 0);
6644 arg11 = TREE_OPERAND (arg1, 1);
6645 same = NULL_TREE;
6647 if (operand_equal_p (arg01, arg11, 0))
6648 same = arg01, alt0 = arg00, alt1 = arg10;
6649 else if (operand_equal_p (arg00, arg10, 0))
6650 same = arg00, alt0 = arg01, alt1 = arg11;
6651 else if (operand_equal_p (arg00, arg11, 0))
6652 same = arg00, alt0 = arg01, alt1 = arg10;
6653 else if (operand_equal_p (arg01, arg10, 0))
6654 same = arg01, alt0 = arg00, alt1 = arg11;
6656 /* No identical multiplicands; see if we can find a common
6657 power-of-two factor in non-power-of-two multiplies. This
6658 can help in multi-dimensional array access. */
6659 else if (TREE_CODE (arg01) == INTEGER_CST
6660 && TREE_CODE (arg11) == INTEGER_CST
6661 && TREE_INT_CST_HIGH (arg01) == 0
6662 && TREE_INT_CST_HIGH (arg11) == 0)
6664 HOST_WIDE_INT int01, int11, tmp;
6665 int01 = TREE_INT_CST_LOW (arg01);
6666 int11 = TREE_INT_CST_LOW (arg11);
6668 /* Move min of absolute values to int11. */
6669 if ((int01 >= 0 ? int01 : -int01)
6670 < (int11 >= 0 ? int11 : -int11))
6672 tmp = int01, int01 = int11, int11 = tmp;
6673 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6674 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6677 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6679 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6680 build_int_cst (NULL_TREE,
6681 int01 / int11)));
6682 alt1 = arg10;
6683 same = arg11;
6687 if (same)
6688 return fold (build2 (MULT_EXPR, type,
6689 fold (build2 (PLUS_EXPR, type,
6690 fold_convert (type, alt0),
6691 fold_convert (type, alt1))),
6692 same));
6695 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
6696 of the array. Loop optimizer sometimes produce this type of
6697 expressions. */
6698 if (TREE_CODE (arg0) == ADDR_EXPR
6699 && TREE_CODE (arg1) == MULT_EXPR)
6701 tem = try_move_mult_to_index (type, PLUS_EXPR, arg0, arg1);
6702 if (tem)
6703 return fold (tem);
6705 else if (TREE_CODE (arg1) == ADDR_EXPR
6706 && TREE_CODE (arg0) == MULT_EXPR)
6708 tem = try_move_mult_to_index (type, PLUS_EXPR, arg1, arg0);
6709 if (tem)
6710 return fold (tem);
6713 else
6715 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6716 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6717 return non_lvalue (fold_convert (type, arg0));
6719 /* Likewise if the operands are reversed. */
6720 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6721 return non_lvalue (fold_convert (type, arg1));
6723 /* Convert X + -C into X - C. */
6724 if (TREE_CODE (arg1) == REAL_CST
6725 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
6727 tem = fold_negate_const (arg1, type);
6728 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
6729 return fold (build2 (MINUS_EXPR, type,
6730 fold_convert (type, arg0),
6731 fold_convert (type, tem)));
6734 /* Convert x+x into x*2.0. */
6735 if (operand_equal_p (arg0, arg1, 0)
6736 && SCALAR_FLOAT_TYPE_P (type))
6737 return fold (build2 (MULT_EXPR, type, arg0,
6738 build_real (type, dconst2)));
6740 /* Convert x*c+x into x*(c+1). */
6741 if (flag_unsafe_math_optimizations
6742 && TREE_CODE (arg0) == MULT_EXPR
6743 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6744 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6745 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6747 REAL_VALUE_TYPE c;
6749 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6750 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6751 return fold (build2 (MULT_EXPR, type, arg1,
6752 build_real (type, c)));
6755 /* Convert x+x*c into x*(c+1). */
6756 if (flag_unsafe_math_optimizations
6757 && TREE_CODE (arg1) == MULT_EXPR
6758 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6759 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6760 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6762 REAL_VALUE_TYPE c;
6764 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6765 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6766 return fold (build2 (MULT_EXPR, type, arg0,
6767 build_real (type, c)));
6770 /* Convert x*c1+x*c2 into x*(c1+c2). */
6771 if (flag_unsafe_math_optimizations
6772 && TREE_CODE (arg0) == MULT_EXPR
6773 && TREE_CODE (arg1) == MULT_EXPR
6774 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6775 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6776 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6777 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6778 && operand_equal_p (TREE_OPERAND (arg0, 0),
6779 TREE_OPERAND (arg1, 0), 0))
6781 REAL_VALUE_TYPE c1, c2;
6783 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6784 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6785 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6786 return fold (build2 (MULT_EXPR, type,
6787 TREE_OPERAND (arg0, 0),
6788 build_real (type, c1)));
6790 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
6791 if (flag_unsafe_math_optimizations
6792 && TREE_CODE (arg1) == PLUS_EXPR
6793 && TREE_CODE (arg0) != MULT_EXPR)
6795 tree tree10 = TREE_OPERAND (arg1, 0);
6796 tree tree11 = TREE_OPERAND (arg1, 1);
6797 if (TREE_CODE (tree11) == MULT_EXPR
6798 && TREE_CODE (tree10) == MULT_EXPR)
6800 tree tree0;
6801 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6802 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6805 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
6806 if (flag_unsafe_math_optimizations
6807 && TREE_CODE (arg0) == PLUS_EXPR
6808 && TREE_CODE (arg1) != MULT_EXPR)
6810 tree tree00 = TREE_OPERAND (arg0, 0);
6811 tree tree01 = TREE_OPERAND (arg0, 1);
6812 if (TREE_CODE (tree01) == MULT_EXPR
6813 && TREE_CODE (tree00) == MULT_EXPR)
6815 tree tree0;
6816 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6817 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6822 bit_rotate:
6823 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6824 is a rotate of A by C1 bits. */
6825 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6826 is a rotate of A by B bits. */
6828 enum tree_code code0, code1;
6829 code0 = TREE_CODE (arg0);
6830 code1 = TREE_CODE (arg1);
6831 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6832 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6833 && operand_equal_p (TREE_OPERAND (arg0, 0),
6834 TREE_OPERAND (arg1, 0), 0)
6835 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6837 tree tree01, tree11;
6838 enum tree_code code01, code11;
6840 tree01 = TREE_OPERAND (arg0, 1);
6841 tree11 = TREE_OPERAND (arg1, 1);
6842 STRIP_NOPS (tree01);
6843 STRIP_NOPS (tree11);
6844 code01 = TREE_CODE (tree01);
6845 code11 = TREE_CODE (tree11);
6846 if (code01 == INTEGER_CST
6847 && code11 == INTEGER_CST
6848 && TREE_INT_CST_HIGH (tree01) == 0
6849 && TREE_INT_CST_HIGH (tree11) == 0
6850 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6851 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6852 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6853 code0 == LSHIFT_EXPR ? tree01 : tree11);
6854 else if (code11 == MINUS_EXPR)
6856 tree tree110, tree111;
6857 tree110 = TREE_OPERAND (tree11, 0);
6858 tree111 = TREE_OPERAND (tree11, 1);
6859 STRIP_NOPS (tree110);
6860 STRIP_NOPS (tree111);
6861 if (TREE_CODE (tree110) == INTEGER_CST
6862 && 0 == compare_tree_int (tree110,
6863 TYPE_PRECISION
6864 (TREE_TYPE (TREE_OPERAND
6865 (arg0, 0))))
6866 && operand_equal_p (tree01, tree111, 0))
6867 return build2 ((code0 == LSHIFT_EXPR
6868 ? LROTATE_EXPR
6869 : RROTATE_EXPR),
6870 type, TREE_OPERAND (arg0, 0), tree01);
6872 else if (code01 == MINUS_EXPR)
6874 tree tree010, tree011;
6875 tree010 = TREE_OPERAND (tree01, 0);
6876 tree011 = TREE_OPERAND (tree01, 1);
6877 STRIP_NOPS (tree010);
6878 STRIP_NOPS (tree011);
6879 if (TREE_CODE (tree010) == INTEGER_CST
6880 && 0 == compare_tree_int (tree010,
6881 TYPE_PRECISION
6882 (TREE_TYPE (TREE_OPERAND
6883 (arg0, 0))))
6884 && operand_equal_p (tree11, tree011, 0))
6885 return build2 ((code0 != LSHIFT_EXPR
6886 ? LROTATE_EXPR
6887 : RROTATE_EXPR),
6888 type, TREE_OPERAND (arg0, 0), tree11);
6893 associate:
6894 /* In most languages, can't associate operations on floats through
6895 parentheses. Rather than remember where the parentheses were, we
6896 don't associate floats at all, unless the user has specified
6897 -funsafe-math-optimizations. */
6899 if (! wins
6900 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6902 tree var0, con0, lit0, minus_lit0;
6903 tree var1, con1, lit1, minus_lit1;
6905 /* Split both trees into variables, constants, and literals. Then
6906 associate each group together, the constants with literals,
6907 then the result with variables. This increases the chances of
6908 literals being recombined later and of generating relocatable
6909 expressions for the sum of a constant and literal. */
6910 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6911 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6912 code == MINUS_EXPR);
6914 /* Only do something if we found more than two objects. Otherwise,
6915 nothing has changed and we risk infinite recursion. */
6916 if (2 < ((var0 != 0) + (var1 != 0)
6917 + (con0 != 0) + (con1 != 0)
6918 + (lit0 != 0) + (lit1 != 0)
6919 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6921 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6922 if (code == MINUS_EXPR)
6923 code = PLUS_EXPR;
6925 var0 = associate_trees (var0, var1, code, type);
6926 con0 = associate_trees (con0, con1, code, type);
6927 lit0 = associate_trees (lit0, lit1, code, type);
6928 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6930 /* Preserve the MINUS_EXPR if the negative part of the literal is
6931 greater than the positive part. Otherwise, the multiplicative
6932 folding code (i.e extract_muldiv) may be fooled in case
6933 unsigned constants are subtracted, like in the following
6934 example: ((X*2 + 4) - 8U)/2. */
6935 if (minus_lit0 && lit0)
6937 if (TREE_CODE (lit0) == INTEGER_CST
6938 && TREE_CODE (minus_lit0) == INTEGER_CST
6939 && tree_int_cst_lt (lit0, minus_lit0))
6941 minus_lit0 = associate_trees (minus_lit0, lit0,
6942 MINUS_EXPR, type);
6943 lit0 = 0;
6945 else
6947 lit0 = associate_trees (lit0, minus_lit0,
6948 MINUS_EXPR, type);
6949 minus_lit0 = 0;
6952 if (minus_lit0)
6954 if (con0 == 0)
6955 return fold_convert (type,
6956 associate_trees (var0, minus_lit0,
6957 MINUS_EXPR, type));
6958 else
6960 con0 = associate_trees (con0, minus_lit0,
6961 MINUS_EXPR, type);
6962 return fold_convert (type,
6963 associate_trees (var0, con0,
6964 PLUS_EXPR, type));
6968 con0 = associate_trees (con0, lit0, code, type);
6969 return fold_convert (type, associate_trees (var0, con0,
6970 code, type));
6974 binary:
6975 if (wins)
6976 t1 = const_binop (code, arg0, arg1, 0);
6977 if (t1 != NULL_TREE)
6979 /* The return value should always have
6980 the same type as the original expression. */
6981 if (TREE_TYPE (t1) != type)
6982 t1 = fold_convert (type, t1);
6984 return t1;
6986 return t;
6988 case MINUS_EXPR:
6989 /* A - (-B) -> A + B */
6990 if (TREE_CODE (arg1) == NEGATE_EXPR)
6991 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6992 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6993 if (TREE_CODE (arg0) == NEGATE_EXPR
6994 && (FLOAT_TYPE_P (type)
6995 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6996 && negate_expr_p (arg1)
6997 && reorder_operands_p (arg0, arg1))
6998 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
6999 TREE_OPERAND (arg0, 0)));
7001 if (! FLOAT_TYPE_P (type))
7003 if (! wins && integer_zerop (arg0))
7004 return negate_expr (fold_convert (type, arg1));
7005 if (integer_zerop (arg1))
7006 return non_lvalue (fold_convert (type, arg0));
7008 /* Fold A - (A & B) into ~B & A. */
7009 if (!TREE_SIDE_EFFECTS (arg0)
7010 && TREE_CODE (arg1) == BIT_AND_EXPR)
7012 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7013 return fold (build2 (BIT_AND_EXPR, type,
7014 fold (build1 (BIT_NOT_EXPR, type,
7015 TREE_OPERAND (arg1, 0))),
7016 arg0));
7017 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7018 return fold (build2 (BIT_AND_EXPR, type,
7019 fold (build1 (BIT_NOT_EXPR, type,
7020 TREE_OPERAND (arg1, 1))),
7021 arg0));
7024 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7025 any power of 2 minus 1. */
7026 if (TREE_CODE (arg0) == BIT_AND_EXPR
7027 && TREE_CODE (arg1) == BIT_AND_EXPR
7028 && operand_equal_p (TREE_OPERAND (arg0, 0),
7029 TREE_OPERAND (arg1, 0), 0))
7031 tree mask0 = TREE_OPERAND (arg0, 1);
7032 tree mask1 = TREE_OPERAND (arg1, 1);
7033 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
7035 if (operand_equal_p (tem, mask1, 0))
7037 tem = fold (build2 (BIT_XOR_EXPR, type,
7038 TREE_OPERAND (arg0, 0), mask1));
7039 return fold (build2 (MINUS_EXPR, type, tem, mask1));
7044 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7045 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7046 return non_lvalue (fold_convert (type, arg0));
7048 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7049 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7050 (-ARG1 + ARG0) reduces to -ARG1. */
7051 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7052 return negate_expr (fold_convert (type, arg1));
7054 /* Fold &x - &x. This can happen from &x.foo - &x.
7055 This is unsafe for certain floats even in non-IEEE formats.
7056 In IEEE, it is unsafe because it does wrong for NaNs.
7057 Also note that operand_equal_p is always false if an operand
7058 is volatile. */
7060 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7061 && operand_equal_p (arg0, arg1, 0))
7062 return fold_convert (type, integer_zero_node);
7064 /* A - B -> A + (-B) if B is easily negatable. */
7065 if (!wins && negate_expr_p (arg1)
7066 && ((FLOAT_TYPE_P (type)
7067 /* Avoid this transformation if B is a positive REAL_CST. */
7068 && (TREE_CODE (arg1) != REAL_CST
7069 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7070 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7071 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
7073 /* Try folding difference of addresses. */
7075 HOST_WIDE_INT diff;
7077 if (TREE_CODE (arg0) == ADDR_EXPR
7078 && TREE_CODE (arg1) == ADDR_EXPR
7079 && ptr_difference_const (TREE_OPERAND (arg0, 0),
7080 TREE_OPERAND (arg1, 0),
7081 &diff))
7082 return build_int_cst_type (type, diff);
7085 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7086 of the array. Loop optimizer sometimes produce this type of
7087 expressions. */
7088 if (TREE_CODE (arg0) == ADDR_EXPR
7089 && TREE_CODE (arg1) == MULT_EXPR)
7091 tem = try_move_mult_to_index (type, MINUS_EXPR, arg0, arg1);
7092 if (tem)
7093 return fold (tem);
7096 if (TREE_CODE (arg0) == MULT_EXPR
7097 && TREE_CODE (arg1) == MULT_EXPR
7098 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7100 /* (A * C) - (B * C) -> (A-B) * C. */
7101 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7102 TREE_OPERAND (arg1, 1), 0))
7103 return fold (build2 (MULT_EXPR, type,
7104 fold (build2 (MINUS_EXPR, type,
7105 TREE_OPERAND (arg0, 0),
7106 TREE_OPERAND (arg1, 0))),
7107 TREE_OPERAND (arg0, 1)));
7108 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7109 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7110 TREE_OPERAND (arg1, 0), 0))
7111 return fold (build2 (MULT_EXPR, type,
7112 TREE_OPERAND (arg0, 0),
7113 fold (build2 (MINUS_EXPR, type,
7114 TREE_OPERAND (arg0, 1),
7115 TREE_OPERAND (arg1, 1)))));
7118 goto associate;
7120 case MULT_EXPR:
7121 /* (-A) * (-B) -> A * B */
7122 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7123 return fold (build2 (MULT_EXPR, type,
7124 TREE_OPERAND (arg0, 0),
7125 negate_expr (arg1)));
7126 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7127 return fold (build2 (MULT_EXPR, type,
7128 negate_expr (arg0),
7129 TREE_OPERAND (arg1, 0)));
7131 if (! FLOAT_TYPE_P (type))
7133 if (integer_zerop (arg1))
7134 return omit_one_operand (type, arg1, arg0);
7135 if (integer_onep (arg1))
7136 return non_lvalue (fold_convert (type, arg0));
7138 /* (a * (1 << b)) is (a << b) */
7139 if (TREE_CODE (arg1) == LSHIFT_EXPR
7140 && integer_onep (TREE_OPERAND (arg1, 0)))
7141 return fold (build2 (LSHIFT_EXPR, type, arg0,
7142 TREE_OPERAND (arg1, 1)));
7143 if (TREE_CODE (arg0) == LSHIFT_EXPR
7144 && integer_onep (TREE_OPERAND (arg0, 0)))
7145 return fold (build2 (LSHIFT_EXPR, type, arg1,
7146 TREE_OPERAND (arg0, 1)));
7148 if (TREE_CODE (arg1) == INTEGER_CST
7149 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
7150 fold_convert (type, arg1),
7151 code, NULL_TREE)))
7152 return fold_convert (type, tem);
7155 else
7157 /* Maybe fold x * 0 to 0. The expressions aren't the same
7158 when x is NaN, since x * 0 is also NaN. Nor are they the
7159 same in modes with signed zeros, since multiplying a
7160 negative value by 0 gives -0, not +0. */
7161 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7162 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7163 && real_zerop (arg1))
7164 return omit_one_operand (type, arg1, arg0);
7165 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7166 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7167 && real_onep (arg1))
7168 return non_lvalue (fold_convert (type, arg0));
7170 /* Transform x * -1.0 into -x. */
7171 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7172 && real_minus_onep (arg1))
7173 return fold_convert (type, negate_expr (arg0));
7175 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7176 if (flag_unsafe_math_optimizations
7177 && TREE_CODE (arg0) == RDIV_EXPR
7178 && TREE_CODE (arg1) == REAL_CST
7179 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7181 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7182 arg1, 0);
7183 if (tem)
7184 return fold (build2 (RDIV_EXPR, type, tem,
7185 TREE_OPERAND (arg0, 1)));
7188 if (flag_unsafe_math_optimizations)
7190 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7191 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7193 /* Optimizations of root(...)*root(...). */
7194 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7196 tree rootfn, arg, arglist;
7197 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7198 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7200 /* Optimize sqrt(x)*sqrt(x) as x. */
7201 if (BUILTIN_SQRT_P (fcode0)
7202 && operand_equal_p (arg00, arg10, 0)
7203 && ! HONOR_SNANS (TYPE_MODE (type)))
7204 return arg00;
7206 /* Optimize root(x)*root(y) as root(x*y). */
7207 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7208 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7209 arglist = build_tree_list (NULL_TREE, arg);
7210 return build_function_call_expr (rootfn, arglist);
7213 /* Optimize expN(x)*expN(y) as expN(x+y). */
7214 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7216 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7217 tree arg = build2 (PLUS_EXPR, type,
7218 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7219 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7220 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7221 return build_function_call_expr (expfn, arglist);
7224 /* Optimizations of pow(...)*pow(...). */
7225 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7226 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7227 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7229 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7230 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7231 1)));
7232 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7233 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7234 1)));
7236 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7237 if (operand_equal_p (arg01, arg11, 0))
7239 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7240 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7241 tree arglist = tree_cons (NULL_TREE, fold (arg),
7242 build_tree_list (NULL_TREE,
7243 arg01));
7244 return build_function_call_expr (powfn, arglist);
7247 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7248 if (operand_equal_p (arg00, arg10, 0))
7250 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7251 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7252 tree arglist = tree_cons (NULL_TREE, arg00,
7253 build_tree_list (NULL_TREE,
7254 arg));
7255 return build_function_call_expr (powfn, arglist);
7259 /* Optimize tan(x)*cos(x) as sin(x). */
7260 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7261 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7262 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7263 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7264 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7265 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7266 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7267 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7269 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7271 if (sinfn != NULL_TREE)
7272 return build_function_call_expr (sinfn,
7273 TREE_OPERAND (arg0, 1));
7276 /* Optimize x*pow(x,c) as pow(x,c+1). */
7277 if (fcode1 == BUILT_IN_POW
7278 || fcode1 == BUILT_IN_POWF
7279 || fcode1 == BUILT_IN_POWL)
7281 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7282 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7283 1)));
7284 if (TREE_CODE (arg11) == REAL_CST
7285 && ! TREE_CONSTANT_OVERFLOW (arg11)
7286 && operand_equal_p (arg0, arg10, 0))
7288 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7289 REAL_VALUE_TYPE c;
7290 tree arg, arglist;
7292 c = TREE_REAL_CST (arg11);
7293 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7294 arg = build_real (type, c);
7295 arglist = build_tree_list (NULL_TREE, arg);
7296 arglist = tree_cons (NULL_TREE, arg0, arglist);
7297 return build_function_call_expr (powfn, arglist);
7301 /* Optimize pow(x,c)*x as pow(x,c+1). */
7302 if (fcode0 == BUILT_IN_POW
7303 || fcode0 == BUILT_IN_POWF
7304 || fcode0 == BUILT_IN_POWL)
7306 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7307 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7308 1)));
7309 if (TREE_CODE (arg01) == REAL_CST
7310 && ! TREE_CONSTANT_OVERFLOW (arg01)
7311 && operand_equal_p (arg1, arg00, 0))
7313 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7314 REAL_VALUE_TYPE c;
7315 tree arg, arglist;
7317 c = TREE_REAL_CST (arg01);
7318 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7319 arg = build_real (type, c);
7320 arglist = build_tree_list (NULL_TREE, arg);
7321 arglist = tree_cons (NULL_TREE, arg1, arglist);
7322 return build_function_call_expr (powfn, arglist);
7326 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7327 if (! optimize_size
7328 && operand_equal_p (arg0, arg1, 0))
7330 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7332 if (powfn)
7334 tree arg = build_real (type, dconst2);
7335 tree arglist = build_tree_list (NULL_TREE, arg);
7336 arglist = tree_cons (NULL_TREE, arg0, arglist);
7337 return build_function_call_expr (powfn, arglist);
7342 goto associate;
7344 case BIT_IOR_EXPR:
7345 bit_ior:
7346 if (integer_all_onesp (arg1))
7347 return omit_one_operand (type, arg1, arg0);
7348 if (integer_zerop (arg1))
7349 return non_lvalue (fold_convert (type, arg0));
7350 if (operand_equal_p (arg0, arg1, 0))
7351 return non_lvalue (fold_convert (type, arg0));
7353 /* ~X | X is -1. */
7354 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7355 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7357 t1 = build_int_cst (type, -1);
7358 t1 = force_fit_type (t1, 0, false, false);
7359 return omit_one_operand (type, t1, arg1);
7362 /* X | ~X is -1. */
7363 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7364 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7366 t1 = build_int_cst (type, -1);
7367 t1 = force_fit_type (t1, 0, false, false);
7368 return omit_one_operand (type, t1, arg0);
7371 t1 = distribute_bit_expr (code, type, arg0, arg1);
7372 if (t1 != NULL_TREE)
7373 return t1;
7375 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7377 This results in more efficient code for machines without a NAND
7378 instruction. Combine will canonicalize to the first form
7379 which will allow use of NAND instructions provided by the
7380 backend if they exist. */
7381 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7382 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7384 return fold (build1 (BIT_NOT_EXPR, type,
7385 build2 (BIT_AND_EXPR, type,
7386 TREE_OPERAND (arg0, 0),
7387 TREE_OPERAND (arg1, 0))));
7390 /* See if this can be simplified into a rotate first. If that
7391 is unsuccessful continue in the association code. */
7392 goto bit_rotate;
7394 case BIT_XOR_EXPR:
7395 if (integer_zerop (arg1))
7396 return non_lvalue (fold_convert (type, arg0));
7397 if (integer_all_onesp (arg1))
7398 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7399 if (operand_equal_p (arg0, arg1, 0))
7400 return omit_one_operand (type, integer_zero_node, arg0);
7402 /* ~X ^ X is -1. */
7403 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7404 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7406 t1 = build_int_cst (type, -1);
7407 t1 = force_fit_type (t1, 0, false, false);
7408 return omit_one_operand (type, t1, arg1);
7411 /* X ^ ~X is -1. */
7412 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7413 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7415 t1 = build_int_cst (type, -1);
7416 t1 = force_fit_type (t1, 0, false, false);
7417 return omit_one_operand (type, t1, arg0);
7420 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7421 with a constant, and the two constants have no bits in common,
7422 we should treat this as a BIT_IOR_EXPR since this may produce more
7423 simplifications. */
7424 if (TREE_CODE (arg0) == BIT_AND_EXPR
7425 && TREE_CODE (arg1) == BIT_AND_EXPR
7426 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7427 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7428 && integer_zerop (const_binop (BIT_AND_EXPR,
7429 TREE_OPERAND (arg0, 1),
7430 TREE_OPERAND (arg1, 1), 0)))
7432 code = BIT_IOR_EXPR;
7433 goto bit_ior;
7436 /* See if this can be simplified into a rotate first. If that
7437 is unsuccessful continue in the association code. */
7438 goto bit_rotate;
7440 case BIT_AND_EXPR:
7441 if (integer_all_onesp (arg1))
7442 return non_lvalue (fold_convert (type, arg0));
7443 if (integer_zerop (arg1))
7444 return omit_one_operand (type, arg1, arg0);
7445 if (operand_equal_p (arg0, arg1, 0))
7446 return non_lvalue (fold_convert (type, arg0));
7448 /* ~X & X is always zero. */
7449 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7450 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7451 return omit_one_operand (type, integer_zero_node, arg1);
7453 /* X & ~X is always zero. */
7454 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7455 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7456 return omit_one_operand (type, integer_zero_node, arg0);
7458 t1 = distribute_bit_expr (code, type, arg0, arg1);
7459 if (t1 != NULL_TREE)
7460 return t1;
7461 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7462 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7463 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7465 unsigned int prec
7466 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7468 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7469 && (~TREE_INT_CST_LOW (arg1)
7470 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7471 return fold_convert (type, TREE_OPERAND (arg0, 0));
7474 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7476 This results in more efficient code for machines without a NOR
7477 instruction. Combine will canonicalize to the first form
7478 which will allow use of NOR instructions provided by the
7479 backend if they exist. */
7480 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7481 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7483 return fold (build1 (BIT_NOT_EXPR, type,
7484 build2 (BIT_IOR_EXPR, type,
7485 TREE_OPERAND (arg0, 0),
7486 TREE_OPERAND (arg1, 0))));
7489 goto associate;
7491 case RDIV_EXPR:
7492 /* Don't touch a floating-point divide by zero unless the mode
7493 of the constant can represent infinity. */
7494 if (TREE_CODE (arg1) == REAL_CST
7495 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7496 && real_zerop (arg1))
7497 return t;
7499 /* (-A) / (-B) -> A / B */
7500 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7501 return fold (build2 (RDIV_EXPR, type,
7502 TREE_OPERAND (arg0, 0),
7503 negate_expr (arg1)));
7504 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7505 return fold (build2 (RDIV_EXPR, type,
7506 negate_expr (arg0),
7507 TREE_OPERAND (arg1, 0)));
7509 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7510 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7511 && real_onep (arg1))
7512 return non_lvalue (fold_convert (type, arg0));
7514 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7515 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7516 && real_minus_onep (arg1))
7517 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7519 /* If ARG1 is a constant, we can convert this to a multiply by the
7520 reciprocal. This does not have the same rounding properties,
7521 so only do this if -funsafe-math-optimizations. We can actually
7522 always safely do it if ARG1 is a power of two, but it's hard to
7523 tell if it is or not in a portable manner. */
7524 if (TREE_CODE (arg1) == REAL_CST)
7526 if (flag_unsafe_math_optimizations
7527 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7528 arg1, 0)))
7529 return fold (build2 (MULT_EXPR, type, arg0, tem));
7530 /* Find the reciprocal if optimizing and the result is exact. */
7531 if (optimize)
7533 REAL_VALUE_TYPE r;
7534 r = TREE_REAL_CST (arg1);
7535 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7537 tem = build_real (type, r);
7538 return fold (build2 (MULT_EXPR, type, arg0, tem));
7542 /* Convert A/B/C to A/(B*C). */
7543 if (flag_unsafe_math_optimizations
7544 && TREE_CODE (arg0) == RDIV_EXPR)
7545 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7546 fold (build2 (MULT_EXPR, type,
7547 TREE_OPERAND (arg0, 1), arg1))));
7549 /* Convert A/(B/C) to (A/B)*C. */
7550 if (flag_unsafe_math_optimizations
7551 && TREE_CODE (arg1) == RDIV_EXPR)
7552 return fold (build2 (MULT_EXPR, type,
7553 fold (build2 (RDIV_EXPR, type, arg0,
7554 TREE_OPERAND (arg1, 0))),
7555 TREE_OPERAND (arg1, 1)));
7557 /* Convert C1/(X*C2) into (C1/C2)/X. */
7558 if (flag_unsafe_math_optimizations
7559 && TREE_CODE (arg1) == MULT_EXPR
7560 && TREE_CODE (arg0) == REAL_CST
7561 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7563 tree tem = const_binop (RDIV_EXPR, arg0,
7564 TREE_OPERAND (arg1, 1), 0);
7565 if (tem)
7566 return fold (build2 (RDIV_EXPR, type, tem,
7567 TREE_OPERAND (arg1, 0)));
7570 if (flag_unsafe_math_optimizations)
7572 enum built_in_function fcode = builtin_mathfn_code (arg1);
7573 /* Optimize x/expN(y) into x*expN(-y). */
7574 if (BUILTIN_EXPONENT_P (fcode))
7576 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7577 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7578 tree arglist = build_tree_list (NULL_TREE,
7579 fold_convert (type, arg));
7580 arg1 = build_function_call_expr (expfn, arglist);
7581 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7584 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7585 if (fcode == BUILT_IN_POW
7586 || fcode == BUILT_IN_POWF
7587 || fcode == BUILT_IN_POWL)
7589 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7590 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7591 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7592 tree neg11 = fold_convert (type, negate_expr (arg11));
7593 tree arglist = tree_cons(NULL_TREE, arg10,
7594 build_tree_list (NULL_TREE, neg11));
7595 arg1 = build_function_call_expr (powfn, arglist);
7596 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7600 if (flag_unsafe_math_optimizations)
7602 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7603 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7605 /* Optimize sin(x)/cos(x) as tan(x). */
7606 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7607 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7608 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7609 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7610 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7612 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7614 if (tanfn != NULL_TREE)
7615 return build_function_call_expr (tanfn,
7616 TREE_OPERAND (arg0, 1));
7619 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7620 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7621 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7622 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7623 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7624 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7626 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7628 if (tanfn != NULL_TREE)
7630 tree tmp = TREE_OPERAND (arg0, 1);
7631 tmp = build_function_call_expr (tanfn, tmp);
7632 return fold (build2 (RDIV_EXPR, type,
7633 build_real (type, dconst1), tmp));
7637 /* Optimize pow(x,c)/x as pow(x,c-1). */
7638 if (fcode0 == BUILT_IN_POW
7639 || fcode0 == BUILT_IN_POWF
7640 || fcode0 == BUILT_IN_POWL)
7642 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7643 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7644 if (TREE_CODE (arg01) == REAL_CST
7645 && ! TREE_CONSTANT_OVERFLOW (arg01)
7646 && operand_equal_p (arg1, arg00, 0))
7648 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7649 REAL_VALUE_TYPE c;
7650 tree arg, arglist;
7652 c = TREE_REAL_CST (arg01);
7653 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7654 arg = build_real (type, c);
7655 arglist = build_tree_list (NULL_TREE, arg);
7656 arglist = tree_cons (NULL_TREE, arg1, arglist);
7657 return build_function_call_expr (powfn, arglist);
7661 goto binary;
7663 case TRUNC_DIV_EXPR:
7664 case ROUND_DIV_EXPR:
7665 case FLOOR_DIV_EXPR:
7666 case CEIL_DIV_EXPR:
7667 case EXACT_DIV_EXPR:
7668 if (integer_onep (arg1))
7669 return non_lvalue (fold_convert (type, arg0));
7670 if (integer_zerop (arg1))
7671 return t;
7672 /* X / -1 is -X. */
7673 if (!TYPE_UNSIGNED (type)
7674 && TREE_CODE (arg1) == INTEGER_CST
7675 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7676 && TREE_INT_CST_HIGH (arg1) == -1)
7677 return fold_convert (type, negate_expr (arg0));
7679 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7680 operation, EXACT_DIV_EXPR.
7682 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7683 At one time others generated faster code, it's not clear if they do
7684 after the last round to changes to the DIV code in expmed.c. */
7685 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7686 && multiple_of_p (type, arg0, arg1))
7687 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7689 if (TREE_CODE (arg1) == INTEGER_CST
7690 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7691 code, NULL_TREE)))
7692 return fold_convert (type, tem);
7694 goto binary;
7696 case CEIL_MOD_EXPR:
7697 case FLOOR_MOD_EXPR:
7698 case ROUND_MOD_EXPR:
7699 case TRUNC_MOD_EXPR:
7700 if (integer_onep (arg1))
7701 return omit_one_operand (type, integer_zero_node, arg0);
7702 if (integer_zerop (arg1))
7703 return t;
7705 /* X % -1 is zero. */
7706 if (!TYPE_UNSIGNED (type)
7707 && TREE_CODE (arg1) == INTEGER_CST
7708 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7709 && TREE_INT_CST_HIGH (arg1) == -1)
7710 return omit_one_operand (type, integer_zero_node, arg0);
7712 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7713 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7714 if (code == TRUNC_MOD_EXPR
7715 && TYPE_UNSIGNED (type)
7716 && integer_pow2p (arg1))
7718 unsigned HOST_WIDE_INT high, low;
7719 tree mask;
7720 int l;
7722 l = tree_log2 (arg1);
7723 if (l >= HOST_BITS_PER_WIDE_INT)
7725 high = ((unsigned HOST_WIDE_INT) 1
7726 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
7727 low = -1;
7729 else
7731 high = 0;
7732 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
7735 mask = build_int_cst_wide (type, low, high);
7736 return fold (build2 (BIT_AND_EXPR, type,
7737 fold_convert (type, arg0), mask));
7740 /* X % -C is the same as X % C. */
7741 if (code == TRUNC_MOD_EXPR
7742 && !TYPE_UNSIGNED (type)
7743 && TREE_CODE (arg1) == INTEGER_CST
7744 && TREE_INT_CST_HIGH (arg1) < 0
7745 && !flag_trapv
7746 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
7747 && !sign_bit_p (arg1, arg1))
7748 return fold (build2 (code, type, fold_convert (type, arg0),
7749 fold_convert (type, negate_expr (arg1))));
7751 /* X % -Y is the same as X % Y. */
7752 if (code == TRUNC_MOD_EXPR
7753 && !TYPE_UNSIGNED (type)
7754 && TREE_CODE (arg1) == NEGATE_EXPR
7755 && !flag_trapv)
7756 return fold (build2 (code, type, fold_convert (type, arg0),
7757 fold_convert (type, TREE_OPERAND (arg1, 0))));
7759 if (TREE_CODE (arg1) == INTEGER_CST
7760 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7761 code, NULL_TREE)))
7762 return fold_convert (type, tem);
7764 goto binary;
7766 case LROTATE_EXPR:
7767 case RROTATE_EXPR:
7768 if (integer_all_onesp (arg0))
7769 return omit_one_operand (type, arg0, arg1);
7770 goto shift;
7772 case RSHIFT_EXPR:
7773 /* Optimize -1 >> x for arithmetic right shifts. */
7774 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7775 return omit_one_operand (type, arg0, arg1);
7776 /* ... fall through ... */
7778 case LSHIFT_EXPR:
7779 shift:
7780 if (integer_zerop (arg1))
7781 return non_lvalue (fold_convert (type, arg0));
7782 if (integer_zerop (arg0))
7783 return omit_one_operand (type, arg0, arg1);
7785 /* Since negative shift count is not well-defined,
7786 don't try to compute it in the compiler. */
7787 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7788 return t;
7789 /* Rewrite an LROTATE_EXPR by a constant into an
7790 RROTATE_EXPR by a new constant. */
7791 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7793 tree tem = build_int_cst (NULL_TREE,
7794 GET_MODE_BITSIZE (TYPE_MODE (type)));
7795 tem = fold_convert (TREE_TYPE (arg1), tem);
7796 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7797 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7800 /* If we have a rotate of a bit operation with the rotate count and
7801 the second operand of the bit operation both constant,
7802 permute the two operations. */
7803 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7804 && (TREE_CODE (arg0) == BIT_AND_EXPR
7805 || TREE_CODE (arg0) == BIT_IOR_EXPR
7806 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7807 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7808 return fold (build2 (TREE_CODE (arg0), type,
7809 fold (build2 (code, type,
7810 TREE_OPERAND (arg0, 0), arg1)),
7811 fold (build2 (code, type,
7812 TREE_OPERAND (arg0, 1), arg1))));
7814 /* Two consecutive rotates adding up to the width of the mode can
7815 be ignored. */
7816 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7817 && TREE_CODE (arg0) == RROTATE_EXPR
7818 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7819 && TREE_INT_CST_HIGH (arg1) == 0
7820 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7821 && ((TREE_INT_CST_LOW (arg1)
7822 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7823 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7824 return TREE_OPERAND (arg0, 0);
7826 goto binary;
7828 case MIN_EXPR:
7829 if (operand_equal_p (arg0, arg1, 0))
7830 return omit_one_operand (type, arg0, arg1);
7831 if (INTEGRAL_TYPE_P (type)
7832 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
7833 return omit_one_operand (type, arg1, arg0);
7834 goto associate;
7836 case MAX_EXPR:
7837 if (operand_equal_p (arg0, arg1, 0))
7838 return omit_one_operand (type, arg0, arg1);
7839 if (INTEGRAL_TYPE_P (type)
7840 && TYPE_MAX_VALUE (type)
7841 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
7842 return omit_one_operand (type, arg1, arg0);
7843 goto associate;
7845 case TRUTH_NOT_EXPR:
7846 /* The argument to invert_truthvalue must have Boolean type. */
7847 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7848 arg0 = fold_convert (boolean_type_node, arg0);
7850 /* Note that the operand of this must be an int
7851 and its values must be 0 or 1.
7852 ("true" is a fixed value perhaps depending on the language,
7853 but we don't handle values other than 1 correctly yet.) */
7854 tem = invert_truthvalue (arg0);
7855 /* Avoid infinite recursion. */
7856 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7858 tem = fold_single_bit_test (code, arg0, arg1, type);
7859 if (tem)
7860 return tem;
7861 return t;
7863 return fold_convert (type, tem);
7865 case TRUTH_ANDIF_EXPR:
7866 /* Note that the operands of this must be ints
7867 and their values must be 0 or 1.
7868 ("true" is a fixed value perhaps depending on the language.) */
7869 /* If first arg is constant zero, return it. */
7870 if (integer_zerop (arg0))
7871 return fold_convert (type, arg0);
7872 case TRUTH_AND_EXPR:
7873 /* If either arg is constant true, drop it. */
7874 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7875 return non_lvalue (fold_convert (type, arg1));
7876 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7877 /* Preserve sequence points. */
7878 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7879 return non_lvalue (fold_convert (type, arg0));
7880 /* If second arg is constant zero, result is zero, but first arg
7881 must be evaluated. */
7882 if (integer_zerop (arg1))
7883 return omit_one_operand (type, arg1, arg0);
7884 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7885 case will be handled here. */
7886 if (integer_zerop (arg0))
7887 return omit_one_operand (type, arg0, arg1);
7889 /* !X && X is always false. */
7890 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7891 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7892 return omit_one_operand (type, integer_zero_node, arg1);
7893 /* X && !X is always false. */
7894 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7895 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7896 return omit_one_operand (type, integer_zero_node, arg0);
7898 truth_andor:
7899 /* We only do these simplifications if we are optimizing. */
7900 if (!optimize)
7901 return t;
7903 /* Check for things like (A || B) && (A || C). We can convert this
7904 to A || (B && C). Note that either operator can be any of the four
7905 truth and/or operations and the transformation will still be
7906 valid. Also note that we only care about order for the
7907 ANDIF and ORIF operators. If B contains side effects, this
7908 might change the truth-value of A. */
7909 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7910 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7911 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7912 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7913 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7914 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7916 tree a00 = TREE_OPERAND (arg0, 0);
7917 tree a01 = TREE_OPERAND (arg0, 1);
7918 tree a10 = TREE_OPERAND (arg1, 0);
7919 tree a11 = TREE_OPERAND (arg1, 1);
7920 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7921 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7922 && (code == TRUTH_AND_EXPR
7923 || code == TRUTH_OR_EXPR));
7925 if (operand_equal_p (a00, a10, 0))
7926 return fold (build2 (TREE_CODE (arg0), type, a00,
7927 fold (build2 (code, type, a01, a11))));
7928 else if (commutative && operand_equal_p (a00, a11, 0))
7929 return fold (build2 (TREE_CODE (arg0), type, a00,
7930 fold (build2 (code, type, a01, a10))));
7931 else if (commutative && operand_equal_p (a01, a10, 0))
7932 return fold (build2 (TREE_CODE (arg0), type, a01,
7933 fold (build2 (code, type, a00, a11))));
7935 /* This case if tricky because we must either have commutative
7936 operators or else A10 must not have side-effects. */
7938 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7939 && operand_equal_p (a01, a11, 0))
7940 return fold (build2 (TREE_CODE (arg0), type,
7941 fold (build2 (code, type, a00, a10)),
7942 a01));
7945 /* See if we can build a range comparison. */
7946 if (0 != (tem = fold_range_test (t)))
7947 return tem;
7949 /* Check for the possibility of merging component references. If our
7950 lhs is another similar operation, try to merge its rhs with our
7951 rhs. Then try to merge our lhs and rhs. */
7952 if (TREE_CODE (arg0) == code
7953 && 0 != (tem = fold_truthop (code, type,
7954 TREE_OPERAND (arg0, 1), arg1)))
7955 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7957 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7958 return tem;
7960 return t;
7962 case TRUTH_ORIF_EXPR:
7963 /* Note that the operands of this must be ints
7964 and their values must be 0 or true.
7965 ("true" is a fixed value perhaps depending on the language.) */
7966 /* If first arg is constant true, return it. */
7967 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7968 return fold_convert (type, arg0);
7969 case TRUTH_OR_EXPR:
7970 /* If either arg is constant zero, drop it. */
7971 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7972 return non_lvalue (fold_convert (type, arg1));
7973 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7974 /* Preserve sequence points. */
7975 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7976 return non_lvalue (fold_convert (type, arg0));
7977 /* If second arg is constant true, result is true, but we must
7978 evaluate first arg. */
7979 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7980 return omit_one_operand (type, arg1, arg0);
7981 /* Likewise for first arg, but note this only occurs here for
7982 TRUTH_OR_EXPR. */
7983 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7984 return omit_one_operand (type, arg0, arg1);
7986 /* !X || X is always true. */
7987 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7988 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7989 return omit_one_operand (type, integer_one_node, arg1);
7990 /* X || !X is always true. */
7991 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7992 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7993 return omit_one_operand (type, integer_one_node, arg0);
7995 goto truth_andor;
7997 case TRUTH_XOR_EXPR:
7998 /* If the second arg is constant zero, drop it. */
7999 if (integer_zerop (arg1))
8000 return non_lvalue (fold_convert (type, arg0));
8001 /* If the second arg is constant true, this is a logical inversion. */
8002 if (integer_onep (arg1))
8003 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
8004 /* Identical arguments cancel to zero. */
8005 if (operand_equal_p (arg0, arg1, 0))
8006 return omit_one_operand (type, integer_zero_node, arg0);
8008 /* !X ^ X is always true. */
8009 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8010 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8011 return omit_one_operand (type, integer_one_node, arg1);
8013 /* X ^ !X is always true. */
8014 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8015 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8016 return omit_one_operand (type, integer_one_node, arg0);
8018 return t;
8020 case EQ_EXPR:
8021 case NE_EXPR:
8022 case LT_EXPR:
8023 case GT_EXPR:
8024 case LE_EXPR:
8025 case GE_EXPR:
8026 /* If one arg is a real or integer constant, put it last. */
8027 if (tree_swap_operands_p (arg0, arg1, true))
8028 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
8030 /* If this is an equality comparison of the address of a non-weak
8031 object against zero, then we know the result. */
8032 if ((code == EQ_EXPR || code == NE_EXPR)
8033 && TREE_CODE (arg0) == ADDR_EXPR
8034 && DECL_P (TREE_OPERAND (arg0, 0))
8035 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8036 && integer_zerop (arg1))
8037 return constant_boolean_node (code != EQ_EXPR, type);
8039 /* If this is an equality comparison of the address of two non-weak,
8040 unaliased symbols neither of which are extern (since we do not
8041 have access to attributes for externs), then we know the result. */
8042 if ((code == EQ_EXPR || code == NE_EXPR)
8043 && TREE_CODE (arg0) == ADDR_EXPR
8044 && DECL_P (TREE_OPERAND (arg0, 0))
8045 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8046 && ! lookup_attribute ("alias",
8047 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8048 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8049 && TREE_CODE (arg1) == ADDR_EXPR
8050 && DECL_P (TREE_OPERAND (arg1, 0))
8051 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8052 && ! lookup_attribute ("alias",
8053 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8054 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8055 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
8056 ? code == EQ_EXPR : code != EQ_EXPR,
8057 type);
8059 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8061 tree targ0 = strip_float_extensions (arg0);
8062 tree targ1 = strip_float_extensions (arg1);
8063 tree newtype = TREE_TYPE (targ0);
8065 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8066 newtype = TREE_TYPE (targ1);
8068 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8069 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8070 return fold (build2 (code, type, fold_convert (newtype, targ0),
8071 fold_convert (newtype, targ1)));
8073 /* (-a) CMP (-b) -> b CMP a */
8074 if (TREE_CODE (arg0) == NEGATE_EXPR
8075 && TREE_CODE (arg1) == NEGATE_EXPR)
8076 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
8077 TREE_OPERAND (arg0, 0)));
8079 if (TREE_CODE (arg1) == REAL_CST)
8081 REAL_VALUE_TYPE cst;
8082 cst = TREE_REAL_CST (arg1);
8084 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8085 if (TREE_CODE (arg0) == NEGATE_EXPR)
8086 return
8087 fold (build2 (swap_tree_comparison (code), type,
8088 TREE_OPERAND (arg0, 0),
8089 build_real (TREE_TYPE (arg1),
8090 REAL_VALUE_NEGATE (cst))));
8092 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8093 /* a CMP (-0) -> a CMP 0 */
8094 if (REAL_VALUE_MINUS_ZERO (cst))
8095 return fold (build2 (code, type, arg0,
8096 build_real (TREE_TYPE (arg1), dconst0)));
8098 /* x != NaN is always true, other ops are always false. */
8099 if (REAL_VALUE_ISNAN (cst)
8100 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8102 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8103 return omit_one_operand (type, tem, arg0);
8106 /* Fold comparisons against infinity. */
8107 if (REAL_VALUE_ISINF (cst))
8109 tem = fold_inf_compare (code, type, arg0, arg1);
8110 if (tem != NULL_TREE)
8111 return tem;
8115 /* If this is a comparison of a real constant with a PLUS_EXPR
8116 or a MINUS_EXPR of a real constant, we can convert it into a
8117 comparison with a revised real constant as long as no overflow
8118 occurs when unsafe_math_optimizations are enabled. */
8119 if (flag_unsafe_math_optimizations
8120 && TREE_CODE (arg1) == REAL_CST
8121 && (TREE_CODE (arg0) == PLUS_EXPR
8122 || TREE_CODE (arg0) == MINUS_EXPR)
8123 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8124 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8125 ? MINUS_EXPR : PLUS_EXPR,
8126 arg1, TREE_OPERAND (arg0, 1), 0))
8127 && ! TREE_CONSTANT_OVERFLOW (tem))
8128 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8130 /* Likewise, we can simplify a comparison of a real constant with
8131 a MINUS_EXPR whose first operand is also a real constant, i.e.
8132 (c1 - x) < c2 becomes x > c1-c2. */
8133 if (flag_unsafe_math_optimizations
8134 && TREE_CODE (arg1) == REAL_CST
8135 && TREE_CODE (arg0) == MINUS_EXPR
8136 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8137 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8138 arg1, 0))
8139 && ! TREE_CONSTANT_OVERFLOW (tem))
8140 return fold (build2 (swap_tree_comparison (code), type,
8141 TREE_OPERAND (arg0, 1), tem));
8143 /* Fold comparisons against built-in math functions. */
8144 if (TREE_CODE (arg1) == REAL_CST
8145 && flag_unsafe_math_optimizations
8146 && ! flag_errno_math)
8148 enum built_in_function fcode = builtin_mathfn_code (arg0);
8150 if (fcode != END_BUILTINS)
8152 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8153 if (tem != NULL_TREE)
8154 return tem;
8159 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8160 if (TREE_CONSTANT (arg1)
8161 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8162 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8163 /* This optimization is invalid for ordered comparisons
8164 if CONST+INCR overflows or if foo+incr might overflow.
8165 This optimization is invalid for floating point due to rounding.
8166 For pointer types we assume overflow doesn't happen. */
8167 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8168 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8169 && (code == EQ_EXPR || code == NE_EXPR))))
8171 tree varop, newconst;
8173 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8175 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
8176 arg1, TREE_OPERAND (arg0, 1)));
8177 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8178 TREE_OPERAND (arg0, 0),
8179 TREE_OPERAND (arg0, 1));
8181 else
8183 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
8184 arg1, TREE_OPERAND (arg0, 1)));
8185 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8186 TREE_OPERAND (arg0, 0),
8187 TREE_OPERAND (arg0, 1));
8191 /* If VAROP is a reference to a bitfield, we must mask
8192 the constant by the width of the field. */
8193 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8194 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8195 && host_integerp (DECL_SIZE (TREE_OPERAND
8196 (TREE_OPERAND (varop, 0), 1)), 1))
8198 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8199 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8200 tree folded_compare, shift;
8202 /* First check whether the comparison would come out
8203 always the same. If we don't do that we would
8204 change the meaning with the masking. */
8205 folded_compare = fold (build2 (code, type,
8206 TREE_OPERAND (varop, 0), arg1));
8207 if (integer_zerop (folded_compare)
8208 || integer_onep (folded_compare))
8209 return omit_one_operand (type, folded_compare, varop);
8211 shift = build_int_cst (NULL_TREE,
8212 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8213 shift = fold_convert (TREE_TYPE (varop), shift);
8214 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8215 newconst, shift));
8216 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8217 newconst, shift));
8220 return fold (build2 (code, type, varop, newconst));
8223 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8224 This transformation affects the cases which are handled in later
8225 optimizations involving comparisons with non-negative constants. */
8226 if (TREE_CODE (arg1) == INTEGER_CST
8227 && TREE_CODE (arg0) != INTEGER_CST
8228 && tree_int_cst_sgn (arg1) > 0)
8230 switch (code)
8232 case GE_EXPR:
8233 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8234 return fold (build2 (GT_EXPR, type, arg0, arg1));
8236 case LT_EXPR:
8237 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8238 return fold (build2 (LE_EXPR, type, arg0, arg1));
8240 default:
8241 break;
8245 /* Comparisons with the highest or lowest possible integer of
8246 the specified size will have known values.
8248 This is quite similar to fold_relational_hi_lo; however, my
8249 attempts to share the code have been nothing but trouble.
8250 I give up for now. */
8252 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8254 if (TREE_CODE (arg1) == INTEGER_CST
8255 && ! TREE_CONSTANT_OVERFLOW (arg1)
8256 && width <= HOST_BITS_PER_WIDE_INT
8257 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8258 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8260 unsigned HOST_WIDE_INT signed_max;
8261 unsigned HOST_WIDE_INT max, min;
8263 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
8265 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8267 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8268 min = 0;
8270 else
8272 max = signed_max;
8273 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8276 if (TREE_INT_CST_HIGH (arg1) == 0
8277 && TREE_INT_CST_LOW (arg1) == max)
8278 switch (code)
8280 case GT_EXPR:
8281 return omit_one_operand (type, integer_zero_node, arg0);
8283 case GE_EXPR:
8284 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8286 case LE_EXPR:
8287 return omit_one_operand (type, integer_one_node, arg0);
8289 case LT_EXPR:
8290 return fold (build2 (NE_EXPR, type, arg0, arg1));
8292 /* The GE_EXPR and LT_EXPR cases above are not normally
8293 reached because of previous transformations. */
8295 default:
8296 break;
8298 else if (TREE_INT_CST_HIGH (arg1) == 0
8299 && TREE_INT_CST_LOW (arg1) == max - 1)
8300 switch (code)
8302 case GT_EXPR:
8303 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8304 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8305 case LE_EXPR:
8306 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8307 return fold (build2 (NE_EXPR, type, arg0, arg1));
8308 default:
8309 break;
8311 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8312 && TREE_INT_CST_LOW (arg1) == min)
8313 switch (code)
8315 case LT_EXPR:
8316 return omit_one_operand (type, integer_zero_node, arg0);
8318 case LE_EXPR:
8319 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8321 case GE_EXPR:
8322 return omit_one_operand (type, integer_one_node, arg0);
8324 case GT_EXPR:
8325 return fold (build2 (NE_EXPR, type, arg0, arg1));
8327 default:
8328 break;
8330 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8331 && TREE_INT_CST_LOW (arg1) == min + 1)
8332 switch (code)
8334 case GE_EXPR:
8335 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8336 return fold (build2 (NE_EXPR, type, arg0, arg1));
8337 case LT_EXPR:
8338 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8339 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8340 default:
8341 break;
8344 else if (!in_gimple_form
8345 && TREE_INT_CST_HIGH (arg1) == 0
8346 && TREE_INT_CST_LOW (arg1) == signed_max
8347 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8348 /* signed_type does not work on pointer types. */
8349 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8351 /* The following case also applies to X < signed_max+1
8352 and X >= signed_max+1 because previous transformations. */
8353 if (code == LE_EXPR || code == GT_EXPR)
8355 tree st0, st1;
8356 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8357 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8358 return fold
8359 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8360 type, fold_convert (st0, arg0),
8361 fold_convert (st1, integer_zero_node)));
8367 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8368 a MINUS_EXPR of a constant, we can convert it into a comparison with
8369 a revised constant as long as no overflow occurs. */
8370 if ((code == EQ_EXPR || code == NE_EXPR)
8371 && TREE_CODE (arg1) == INTEGER_CST
8372 && (TREE_CODE (arg0) == PLUS_EXPR
8373 || TREE_CODE (arg0) == MINUS_EXPR)
8374 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8375 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8376 ? MINUS_EXPR : PLUS_EXPR,
8377 arg1, TREE_OPERAND (arg0, 1), 0))
8378 && ! TREE_CONSTANT_OVERFLOW (tem))
8379 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8381 /* Similarly for a NEGATE_EXPR. */
8382 else if ((code == EQ_EXPR || code == NE_EXPR)
8383 && TREE_CODE (arg0) == NEGATE_EXPR
8384 && TREE_CODE (arg1) == INTEGER_CST
8385 && 0 != (tem = negate_expr (arg1))
8386 && TREE_CODE (tem) == INTEGER_CST
8387 && ! TREE_CONSTANT_OVERFLOW (tem))
8388 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8390 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8391 for !=. Don't do this for ordered comparisons due to overflow. */
8392 else if ((code == NE_EXPR || code == EQ_EXPR)
8393 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8394 return fold (build2 (code, type,
8395 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8397 /* If we are widening one operand of an integer comparison,
8398 see if the other operand is similarly being widened. Perhaps we
8399 can do the comparison in the narrower type. */
8400 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8401 && TREE_CODE (arg0) == NOP_EXPR
8402 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
8403 && (code == EQ_EXPR || code == NE_EXPR
8404 || TYPE_UNSIGNED (TREE_TYPE (arg0))
8405 == TYPE_UNSIGNED (TREE_TYPE (tem)))
8406 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
8407 && (TREE_TYPE (t1) == TREE_TYPE (tem)
8408 || (TREE_CODE (t1) == INTEGER_CST
8409 && TREE_CODE (TREE_TYPE (tem)) == INTEGER_TYPE
8410 && int_fits_type_p (t1, TREE_TYPE (tem)))))
8411 return fold (build2 (code, type, tem,
8412 fold_convert (TREE_TYPE (tem), t1)));
8414 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8415 constant, we can simplify it. */
8416 else if (TREE_CODE (arg1) == INTEGER_CST
8417 && (TREE_CODE (arg0) == MIN_EXPR
8418 || TREE_CODE (arg0) == MAX_EXPR)
8419 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8420 return optimize_minmax_comparison (t);
8422 /* If we are comparing an ABS_EXPR with a constant, we can
8423 convert all the cases into explicit comparisons, but they may
8424 well not be faster than doing the ABS and one comparison.
8425 But ABS (X) <= C is a range comparison, which becomes a subtraction
8426 and a comparison, and is probably faster. */
8427 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8428 && TREE_CODE (arg0) == ABS_EXPR
8429 && ! TREE_SIDE_EFFECTS (arg0)
8430 && (0 != (tem = negate_expr (arg1)))
8431 && TREE_CODE (tem) == INTEGER_CST
8432 && ! TREE_CONSTANT_OVERFLOW (tem))
8433 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8434 build2 (GE_EXPR, type,
8435 TREE_OPERAND (arg0, 0), tem),
8436 build2 (LE_EXPR, type,
8437 TREE_OPERAND (arg0, 0), arg1)));
8439 /* If this is an EQ or NE comparison with zero and ARG0 is
8440 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8441 two operations, but the latter can be done in one less insn
8442 on machines that have only two-operand insns or on which a
8443 constant cannot be the first operand. */
8444 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8445 && TREE_CODE (arg0) == BIT_AND_EXPR)
8447 tree arg00 = TREE_OPERAND (arg0, 0);
8448 tree arg01 = TREE_OPERAND (arg0, 1);
8449 if (TREE_CODE (arg00) == LSHIFT_EXPR
8450 && integer_onep (TREE_OPERAND (arg00, 0)))
8451 return
8452 fold (build2 (code, type,
8453 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8454 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8455 arg01, TREE_OPERAND (arg00, 1)),
8456 fold_convert (TREE_TYPE (arg0),
8457 integer_one_node)),
8458 arg1));
8459 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8460 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8461 return
8462 fold (build2 (code, type,
8463 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8464 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8465 arg00, TREE_OPERAND (arg01, 1)),
8466 fold_convert (TREE_TYPE (arg0),
8467 integer_one_node)),
8468 arg1));
8471 /* If this is an NE or EQ comparison of zero against the result of a
8472 signed MOD operation whose second operand is a power of 2, make
8473 the MOD operation unsigned since it is simpler and equivalent. */
8474 if ((code == NE_EXPR || code == EQ_EXPR)
8475 && integer_zerop (arg1)
8476 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8477 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8478 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8479 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8480 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8481 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8483 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8484 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
8485 fold_convert (newtype,
8486 TREE_OPERAND (arg0, 0)),
8487 fold_convert (newtype,
8488 TREE_OPERAND (arg0, 1))));
8490 return fold (build2 (code, type, newmod,
8491 fold_convert (newtype, arg1)));
8494 /* If this is an NE comparison of zero with an AND of one, remove the
8495 comparison since the AND will give the correct value. */
8496 if (code == NE_EXPR && integer_zerop (arg1)
8497 && TREE_CODE (arg0) == BIT_AND_EXPR
8498 && integer_onep (TREE_OPERAND (arg0, 1)))
8499 return fold_convert (type, arg0);
8501 /* If we have (A & C) == C where C is a power of 2, convert this into
8502 (A & C) != 0. Similarly for NE_EXPR. */
8503 if ((code == EQ_EXPR || code == NE_EXPR)
8504 && TREE_CODE (arg0) == BIT_AND_EXPR
8505 && integer_pow2p (TREE_OPERAND (arg0, 1))
8506 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8507 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8508 arg0, fold_convert (TREE_TYPE (arg0),
8509 integer_zero_node)));
8511 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8512 2, then fold the expression into shifts and logical operations. */
8513 tem = fold_single_bit_test (code, arg0, arg1, type);
8514 if (tem)
8515 return tem;
8517 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8518 Similarly for NE_EXPR. */
8519 if ((code == EQ_EXPR || code == NE_EXPR)
8520 && TREE_CODE (arg0) == BIT_AND_EXPR
8521 && TREE_CODE (arg1) == INTEGER_CST
8522 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8524 tree notc = fold (build1 (BIT_NOT_EXPR,
8525 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8526 TREE_OPERAND (arg0, 1)));
8527 tree dandnotc = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8528 arg1, notc));
8529 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8530 if (integer_nonzerop (dandnotc))
8531 return omit_one_operand (type, rslt, arg0);
8534 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8535 Similarly for NE_EXPR. */
8536 if ((code == EQ_EXPR || code == NE_EXPR)
8537 && TREE_CODE (arg0) == BIT_IOR_EXPR
8538 && TREE_CODE (arg1) == INTEGER_CST
8539 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8541 tree notd = fold (build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8542 tree candnotd = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8543 TREE_OPERAND (arg0, 1), notd));
8544 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8545 if (integer_nonzerop (candnotd))
8546 return omit_one_operand (type, rslt, arg0);
8549 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8550 and similarly for >= into !=. */
8551 if ((code == LT_EXPR || code == GE_EXPR)
8552 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8553 && TREE_CODE (arg1) == LSHIFT_EXPR
8554 && integer_onep (TREE_OPERAND (arg1, 0)))
8555 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8556 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8557 TREE_OPERAND (arg1, 1)),
8558 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8560 else if ((code == LT_EXPR || code == GE_EXPR)
8561 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8562 && (TREE_CODE (arg1) == NOP_EXPR
8563 || TREE_CODE (arg1) == CONVERT_EXPR)
8564 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8565 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8566 return
8567 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8568 fold_convert (TREE_TYPE (arg0),
8569 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8570 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8571 1))),
8572 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8574 /* Simplify comparison of something with itself. (For IEEE
8575 floating-point, we can only do some of these simplifications.) */
8576 if (operand_equal_p (arg0, arg1, 0))
8578 switch (code)
8580 case EQ_EXPR:
8581 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8582 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8583 return constant_boolean_node (1, type);
8584 break;
8586 case GE_EXPR:
8587 case LE_EXPR:
8588 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8589 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8590 return constant_boolean_node (1, type);
8591 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8593 case NE_EXPR:
8594 /* For NE, we can only do this simplification if integer
8595 or we don't honor IEEE floating point NaNs. */
8596 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8597 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8598 break;
8599 /* ... fall through ... */
8600 case GT_EXPR:
8601 case LT_EXPR:
8602 return constant_boolean_node (0, type);
8603 default:
8604 gcc_unreachable ();
8608 /* If we are comparing an expression that just has comparisons
8609 of two integer values, arithmetic expressions of those comparisons,
8610 and constants, we can simplify it. There are only three cases
8611 to check: the two values can either be equal, the first can be
8612 greater, or the second can be greater. Fold the expression for
8613 those three values. Since each value must be 0 or 1, we have
8614 eight possibilities, each of which corresponds to the constant 0
8615 or 1 or one of the six possible comparisons.
8617 This handles common cases like (a > b) == 0 but also handles
8618 expressions like ((x > y) - (y > x)) > 0, which supposedly
8619 occur in macroized code. */
8621 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8623 tree cval1 = 0, cval2 = 0;
8624 int save_p = 0;
8626 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8627 /* Don't handle degenerate cases here; they should already
8628 have been handled anyway. */
8629 && cval1 != 0 && cval2 != 0
8630 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8631 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8632 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8633 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8634 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8635 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8636 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8638 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8639 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8641 /* We can't just pass T to eval_subst in case cval1 or cval2
8642 was the same as ARG1. */
8644 tree high_result
8645 = fold (build2 (code, type,
8646 eval_subst (arg0, cval1, maxval,
8647 cval2, minval),
8648 arg1));
8649 tree equal_result
8650 = fold (build2 (code, type,
8651 eval_subst (arg0, cval1, maxval,
8652 cval2, maxval),
8653 arg1));
8654 tree low_result
8655 = fold (build2 (code, type,
8656 eval_subst (arg0, cval1, minval,
8657 cval2, maxval),
8658 arg1));
8660 /* All three of these results should be 0 or 1. Confirm they
8661 are. Then use those values to select the proper code
8662 to use. */
8664 if ((integer_zerop (high_result)
8665 || integer_onep (high_result))
8666 && (integer_zerop (equal_result)
8667 || integer_onep (equal_result))
8668 && (integer_zerop (low_result)
8669 || integer_onep (low_result)))
8671 /* Make a 3-bit mask with the high-order bit being the
8672 value for `>', the next for '=', and the low for '<'. */
8673 switch ((integer_onep (high_result) * 4)
8674 + (integer_onep (equal_result) * 2)
8675 + integer_onep (low_result))
8677 case 0:
8678 /* Always false. */
8679 return omit_one_operand (type, integer_zero_node, arg0);
8680 case 1:
8681 code = LT_EXPR;
8682 break;
8683 case 2:
8684 code = EQ_EXPR;
8685 break;
8686 case 3:
8687 code = LE_EXPR;
8688 break;
8689 case 4:
8690 code = GT_EXPR;
8691 break;
8692 case 5:
8693 code = NE_EXPR;
8694 break;
8695 case 6:
8696 code = GE_EXPR;
8697 break;
8698 case 7:
8699 /* Always true. */
8700 return omit_one_operand (type, integer_one_node, arg0);
8703 tem = build2 (code, type, cval1, cval2);
8704 if (save_p)
8705 return save_expr (tem);
8706 else
8707 return fold (tem);
8712 /* If this is a comparison of a field, we may be able to simplify it. */
8713 if (((TREE_CODE (arg0) == COMPONENT_REF
8714 && lang_hooks.can_use_bit_fields_p ())
8715 || TREE_CODE (arg0) == BIT_FIELD_REF)
8716 && (code == EQ_EXPR || code == NE_EXPR)
8717 /* Handle the constant case even without -O
8718 to make sure the warnings are given. */
8719 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8721 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8722 if (t1)
8723 return t1;
8726 /* If this is a comparison of complex values and either or both sides
8727 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8728 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8729 This may prevent needless evaluations. */
8730 if ((code == EQ_EXPR || code == NE_EXPR)
8731 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8732 && (TREE_CODE (arg0) == COMPLEX_EXPR
8733 || TREE_CODE (arg1) == COMPLEX_EXPR
8734 || TREE_CODE (arg0) == COMPLEX_CST
8735 || TREE_CODE (arg1) == COMPLEX_CST))
8737 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8738 tree real0, imag0, real1, imag1;
8740 arg0 = save_expr (arg0);
8741 arg1 = save_expr (arg1);
8742 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8743 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8744 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8745 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8747 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8748 : TRUTH_ORIF_EXPR),
8749 type,
8750 fold (build2 (code, type, real0, real1)),
8751 fold (build2 (code, type, imag0, imag1))));
8754 /* Optimize comparisons of strlen vs zero to a compare of the
8755 first character of the string vs zero. To wit,
8756 strlen(ptr) == 0 => *ptr == 0
8757 strlen(ptr) != 0 => *ptr != 0
8758 Other cases should reduce to one of these two (or a constant)
8759 due to the return value of strlen being unsigned. */
8760 if ((code == EQ_EXPR || code == NE_EXPR)
8761 && integer_zerop (arg1)
8762 && TREE_CODE (arg0) == CALL_EXPR)
8764 tree fndecl = get_callee_fndecl (arg0);
8765 tree arglist;
8767 if (fndecl
8768 && DECL_BUILT_IN (fndecl)
8769 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8770 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8771 && (arglist = TREE_OPERAND (arg0, 1))
8772 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8773 && ! TREE_CHAIN (arglist))
8774 return fold (build2 (code, type,
8775 build1 (INDIRECT_REF, char_type_node,
8776 TREE_VALUE (arglist)),
8777 fold_convert (char_type_node,
8778 integer_zero_node)));
8781 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8782 into a single range test. */
8783 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8784 && TREE_CODE (arg1) == INTEGER_CST
8785 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8786 && !integer_zerop (TREE_OPERAND (arg0, 1))
8787 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8788 && !TREE_OVERFLOW (arg1))
8790 t1 = fold_div_compare (code, type, arg0, arg1);
8791 if (t1 != NULL_TREE)
8792 return t1;
8795 if ((code == EQ_EXPR || code == NE_EXPR)
8796 && !TREE_SIDE_EFFECTS (arg0)
8797 && integer_zerop (arg1)
8798 && tree_expr_nonzero_p (arg0))
8799 return constant_boolean_node (code==NE_EXPR, type);
8801 t1 = fold_relational_const (code, type, arg0, arg1);
8802 return t1 == NULL_TREE ? t : t1;
8804 case UNORDERED_EXPR:
8805 case ORDERED_EXPR:
8806 case UNLT_EXPR:
8807 case UNLE_EXPR:
8808 case UNGT_EXPR:
8809 case UNGE_EXPR:
8810 case UNEQ_EXPR:
8811 case LTGT_EXPR:
8812 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8814 t1 = fold_relational_const (code, type, arg0, arg1);
8815 if (t1 != NULL_TREE)
8816 return t1;
8819 /* If the first operand is NaN, the result is constant. */
8820 if (TREE_CODE (arg0) == REAL_CST
8821 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8822 && (code != LTGT_EXPR || ! flag_trapping_math))
8824 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8825 ? integer_zero_node
8826 : integer_one_node;
8827 return omit_one_operand (type, t1, arg1);
8830 /* If the second operand is NaN, the result is constant. */
8831 if (TREE_CODE (arg1) == REAL_CST
8832 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
8833 && (code != LTGT_EXPR || ! flag_trapping_math))
8835 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8836 ? integer_zero_node
8837 : integer_one_node;
8838 return omit_one_operand (type, t1, arg0);
8841 /* Simplify unordered comparison of something with itself. */
8842 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
8843 && operand_equal_p (arg0, arg1, 0))
8844 return constant_boolean_node (1, type);
8846 if (code == LTGT_EXPR
8847 && !flag_trapping_math
8848 && operand_equal_p (arg0, arg1, 0))
8849 return constant_boolean_node (0, type);
8851 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8853 tree targ0 = strip_float_extensions (arg0);
8854 tree targ1 = strip_float_extensions (arg1);
8855 tree newtype = TREE_TYPE (targ0);
8857 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8858 newtype = TREE_TYPE (targ1);
8860 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8861 return fold (build2 (code, type, fold_convert (newtype, targ0),
8862 fold_convert (newtype, targ1)));
8865 return t;
8867 case COND_EXPR:
8868 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8869 so all simple results must be passed through pedantic_non_lvalue. */
8870 if (TREE_CODE (arg0) == INTEGER_CST)
8872 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8873 /* Only optimize constant conditions when the selected branch
8874 has the same type as the COND_EXPR. This avoids optimizing
8875 away "c ? x : throw", where the throw has a void type. */
8876 if (! VOID_TYPE_P (TREE_TYPE (tem))
8877 || VOID_TYPE_P (type))
8878 return pedantic_non_lvalue (tem);
8879 return t;
8881 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
8882 return pedantic_omit_one_operand (type, arg1, arg0);
8884 /* If we have A op B ? A : C, we may be able to convert this to a
8885 simpler expression, depending on the operation and the values
8886 of B and C. Signed zeros prevent all of these transformations,
8887 for reasons given above each one.
8889 Also try swapping the arguments and inverting the conditional. */
8890 if (COMPARISON_CLASS_P (arg0)
8891 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8892 arg1, TREE_OPERAND (arg0, 1))
8893 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8895 tem = fold_cond_expr_with_comparison (type, arg0,
8896 TREE_OPERAND (t, 1),
8897 TREE_OPERAND (t, 2));
8898 if (tem)
8899 return tem;
8902 if (COMPARISON_CLASS_P (arg0)
8903 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8904 TREE_OPERAND (t, 2),
8905 TREE_OPERAND (arg0, 1))
8906 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
8908 tem = invert_truthvalue (arg0);
8909 if (COMPARISON_CLASS_P (tem))
8911 tem = fold_cond_expr_with_comparison (type, tem,
8912 TREE_OPERAND (t, 2),
8913 TREE_OPERAND (t, 1));
8914 if (tem)
8915 return tem;
8919 /* If the second operand is simpler than the third, swap them
8920 since that produces better jump optimization results. */
8921 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8922 TREE_OPERAND (t, 2), false))
8924 /* See if this can be inverted. If it can't, possibly because
8925 it was a floating-point inequality comparison, don't do
8926 anything. */
8927 tem = invert_truthvalue (arg0);
8929 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8930 return fold (build3 (code, type, tem,
8931 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8934 /* Convert A ? 1 : 0 to simply A. */
8935 if (integer_onep (TREE_OPERAND (t, 1))
8936 && integer_zerop (TREE_OPERAND (t, 2))
8937 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8938 call to fold will try to move the conversion inside
8939 a COND, which will recurse. In that case, the COND_EXPR
8940 is probably the best choice, so leave it alone. */
8941 && type == TREE_TYPE (arg0))
8942 return pedantic_non_lvalue (arg0);
8944 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8945 over COND_EXPR in cases such as floating point comparisons. */
8946 if (integer_zerop (TREE_OPERAND (t, 1))
8947 && integer_onep (TREE_OPERAND (t, 2))
8948 && truth_value_p (TREE_CODE (arg0)))
8949 return pedantic_non_lvalue (fold_convert (type,
8950 invert_truthvalue (arg0)));
8952 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
8953 if (TREE_CODE (arg0) == LT_EXPR
8954 && integer_zerop (TREE_OPERAND (arg0, 1))
8955 && integer_zerop (TREE_OPERAND (t, 2))
8956 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
8957 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
8958 TREE_TYPE (tem), tem, arg1)));
8960 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
8961 already handled above. */
8962 if (TREE_CODE (arg0) == BIT_AND_EXPR
8963 && integer_onep (TREE_OPERAND (arg0, 1))
8964 && integer_zerop (TREE_OPERAND (t, 2))
8965 && integer_pow2p (arg1))
8967 tree tem = TREE_OPERAND (arg0, 0);
8968 STRIP_NOPS (tem);
8969 if (TREE_CODE (tem) == RSHIFT_EXPR
8970 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
8971 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
8972 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
8973 return fold (build2 (BIT_AND_EXPR, type,
8974 TREE_OPERAND (tem, 0), arg1));
8977 /* A & N ? N : 0 is simply A & N if N is a power of two. This
8978 is probably obsolete because the first operand should be a
8979 truth value (that's why we have the two cases above), but let's
8980 leave it in until we can confirm this for all front-ends. */
8981 if (integer_zerop (TREE_OPERAND (t, 2))
8982 && TREE_CODE (arg0) == NE_EXPR
8983 && integer_zerop (TREE_OPERAND (arg0, 1))
8984 && integer_pow2p (arg1)
8985 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8986 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8987 arg1, OEP_ONLY_CONST))
8988 return pedantic_non_lvalue (fold_convert (type,
8989 TREE_OPERAND (arg0, 0)));
8991 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8992 if (integer_zerop (TREE_OPERAND (t, 2))
8993 && truth_value_p (TREE_CODE (arg0))
8994 && truth_value_p (TREE_CODE (arg1)))
8995 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
8997 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8998 if (integer_onep (TREE_OPERAND (t, 2))
8999 && truth_value_p (TREE_CODE (arg0))
9000 && truth_value_p (TREE_CODE (arg1)))
9002 /* Only perform transformation if ARG0 is easily inverted. */
9003 tem = invert_truthvalue (arg0);
9004 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9005 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
9008 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9009 if (integer_zerop (arg1)
9010 && truth_value_p (TREE_CODE (arg0))
9011 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9013 /* Only perform transformation if ARG0 is easily inverted. */
9014 tem = invert_truthvalue (arg0);
9015 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9016 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
9017 TREE_OPERAND (t, 2)));
9020 /* Convert A ? 1 : B into A || B if A and B are truth values. */
9021 if (integer_onep (arg1)
9022 && truth_value_p (TREE_CODE (arg0))
9023 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9024 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
9025 TREE_OPERAND (t, 2)));
9027 return t;
9029 case COMPOUND_EXPR:
9030 /* When pedantic, a compound expression can be neither an lvalue
9031 nor an integer constant expression. */
9032 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9033 return t;
9034 /* Don't let (0, 0) be null pointer constant. */
9035 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9036 : fold_convert (type, arg1);
9037 return pedantic_non_lvalue (tem);
9039 case COMPLEX_EXPR:
9040 if (wins)
9041 return build_complex (type, arg0, arg1);
9042 return t;
9044 case REALPART_EXPR:
9045 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9046 return t;
9047 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9048 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
9049 TREE_OPERAND (arg0, 1));
9050 else if (TREE_CODE (arg0) == COMPLEX_CST)
9051 return TREE_REALPART (arg0);
9052 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9053 return fold (build2 (TREE_CODE (arg0), type,
9054 fold (build1 (REALPART_EXPR, type,
9055 TREE_OPERAND (arg0, 0))),
9056 fold (build1 (REALPART_EXPR, type,
9057 TREE_OPERAND (arg0, 1)))));
9058 return t;
9060 case IMAGPART_EXPR:
9061 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9062 return fold_convert (type, integer_zero_node);
9063 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9064 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
9065 TREE_OPERAND (arg0, 0));
9066 else if (TREE_CODE (arg0) == COMPLEX_CST)
9067 return TREE_IMAGPART (arg0);
9068 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9069 return fold (build2 (TREE_CODE (arg0), type,
9070 fold (build1 (IMAGPART_EXPR, type,
9071 TREE_OPERAND (arg0, 0))),
9072 fold (build1 (IMAGPART_EXPR, type,
9073 TREE_OPERAND (arg0, 1)))));
9074 return t;
9076 case CALL_EXPR:
9077 /* Check for a built-in function. */
9078 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
9079 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
9080 == FUNCTION_DECL)
9081 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
9083 tree tmp = fold_builtin (t, false);
9084 if (tmp)
9085 return tmp;
9087 return t;
9089 default:
9090 return t;
9091 } /* switch (code) */
9094 #ifdef ENABLE_FOLD_CHECKING
9095 #undef fold
9097 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
9098 static void fold_check_failed (tree, tree);
9099 void print_fold_checksum (tree);
9101 /* When --enable-checking=fold, compute a digest of expr before
9102 and after actual fold call to see if fold did not accidentally
9103 change original expr. */
9105 tree
9106 fold (tree expr)
9108 tree ret;
9109 struct md5_ctx ctx;
9110 unsigned char checksum_before[16], checksum_after[16];
9111 htab_t ht;
9113 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9114 md5_init_ctx (&ctx);
9115 fold_checksum_tree (expr, &ctx, ht);
9116 md5_finish_ctx (&ctx, checksum_before);
9117 htab_empty (ht);
9119 ret = fold_1 (expr);
9121 md5_init_ctx (&ctx);
9122 fold_checksum_tree (expr, &ctx, ht);
9123 md5_finish_ctx (&ctx, checksum_after);
9124 htab_delete (ht);
9126 if (memcmp (checksum_before, checksum_after, 16))
9127 fold_check_failed (expr, ret);
9129 return ret;
9132 void
9133 print_fold_checksum (tree expr)
9135 struct md5_ctx ctx;
9136 unsigned char checksum[16], cnt;
9137 htab_t ht;
9139 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9140 md5_init_ctx (&ctx);
9141 fold_checksum_tree (expr, &ctx, ht);
9142 md5_finish_ctx (&ctx, checksum);
9143 htab_delete (ht);
9144 for (cnt = 0; cnt < 16; ++cnt)
9145 fprintf (stderr, "%02x", checksum[cnt]);
9146 putc ('\n', stderr);
9149 static void
9150 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9152 internal_error ("fold check: original tree changed by fold");
9155 static void
9156 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9158 void **slot;
9159 enum tree_code code;
9160 char buf[sizeof (struct tree_decl)];
9161 int i, len;
9163 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
9164 <= sizeof (struct tree_decl))
9165 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
9166 if (expr == NULL)
9167 return;
9168 slot = htab_find_slot (ht, expr, INSERT);
9169 if (*slot != NULL)
9170 return;
9171 *slot = expr;
9172 code = TREE_CODE (expr);
9173 if (TREE_CODE_CLASS (code) == tcc_declaration
9174 && DECL_ASSEMBLER_NAME_SET_P (expr))
9176 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9177 memcpy (buf, expr, tree_size (expr));
9178 expr = (tree) buf;
9179 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9181 else if (TREE_CODE_CLASS (code) == tcc_type
9182 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
9183 || TYPE_CACHED_VALUES_P (expr)))
9185 /* Allow these fields to be modified. */
9186 memcpy (buf, expr, tree_size (expr));
9187 expr = (tree) buf;
9188 TYPE_POINTER_TO (expr) = NULL;
9189 TYPE_REFERENCE_TO (expr) = NULL;
9190 TYPE_CACHED_VALUES_P (expr) = 0;
9191 TYPE_CACHED_VALUES (expr) = NULL;
9193 md5_process_bytes (expr, tree_size (expr), ctx);
9194 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9195 if (TREE_CODE_CLASS (code) != tcc_type
9196 && TREE_CODE_CLASS (code) != tcc_declaration)
9197 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9198 switch (TREE_CODE_CLASS (code))
9200 case tcc_constant:
9201 switch (code)
9203 case STRING_CST:
9204 md5_process_bytes (TREE_STRING_POINTER (expr),
9205 TREE_STRING_LENGTH (expr), ctx);
9206 break;
9207 case COMPLEX_CST:
9208 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9209 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9210 break;
9211 case VECTOR_CST:
9212 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9213 break;
9214 default:
9215 break;
9217 break;
9218 case tcc_exceptional:
9219 switch (code)
9221 case TREE_LIST:
9222 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9223 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9224 break;
9225 case TREE_VEC:
9226 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9227 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9228 break;
9229 default:
9230 break;
9232 break;
9233 case tcc_expression:
9234 case tcc_reference:
9235 case tcc_comparison:
9236 case tcc_unary:
9237 case tcc_binary:
9238 case tcc_statement:
9239 len = first_rtl_op (code);
9240 for (i = 0; i < len; ++i)
9241 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9242 break;
9243 case tcc_declaration:
9244 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9245 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9246 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9247 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9248 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9249 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9250 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9251 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9252 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9253 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9254 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9255 break;
9256 case tcc_type:
9257 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9258 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9259 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9260 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9261 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9262 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9263 if (INTEGRAL_TYPE_P (expr)
9264 || SCALAR_FLOAT_TYPE_P (expr))
9266 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9267 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9269 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9270 if (TREE_CODE (expr) == RECORD_TYPE
9271 || TREE_CODE (expr) == UNION_TYPE
9272 || TREE_CODE (expr) == QUAL_UNION_TYPE)
9273 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9274 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9275 break;
9276 default:
9277 break;
9281 #endif
9283 /* Perform constant folding and related simplification of initializer
9284 expression EXPR. This behaves identically to "fold" but ignores
9285 potential run-time traps and exceptions that fold must preserve. */
9287 tree
9288 fold_initializer (tree expr)
9290 int saved_signaling_nans = flag_signaling_nans;
9291 int saved_trapping_math = flag_trapping_math;
9292 int saved_trapv = flag_trapv;
9293 tree result;
9295 flag_signaling_nans = 0;
9296 flag_trapping_math = 0;
9297 flag_trapv = 0;
9299 result = fold (expr);
9301 flag_signaling_nans = saved_signaling_nans;
9302 flag_trapping_math = saved_trapping_math;
9303 flag_trapv = saved_trapv;
9305 return result;
9308 /* Determine if first argument is a multiple of second argument. Return 0 if
9309 it is not, or we cannot easily determined it to be.
9311 An example of the sort of thing we care about (at this point; this routine
9312 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9313 fold cases do now) is discovering that
9315 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9317 is a multiple of
9319 SAVE_EXPR (J * 8)
9321 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9323 This code also handles discovering that
9325 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9327 is a multiple of 8 so we don't have to worry about dealing with a
9328 possible remainder.
9330 Note that we *look* inside a SAVE_EXPR only to determine how it was
9331 calculated; it is not safe for fold to do much of anything else with the
9332 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9333 at run time. For example, the latter example above *cannot* be implemented
9334 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9335 evaluation time of the original SAVE_EXPR is not necessarily the same at
9336 the time the new expression is evaluated. The only optimization of this
9337 sort that would be valid is changing
9339 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9341 divided by 8 to
9343 SAVE_EXPR (I) * SAVE_EXPR (J)
9345 (where the same SAVE_EXPR (J) is used in the original and the
9346 transformed version). */
9348 static int
9349 multiple_of_p (tree type, tree top, tree bottom)
9351 if (operand_equal_p (top, bottom, 0))
9352 return 1;
9354 if (TREE_CODE (type) != INTEGER_TYPE)
9355 return 0;
9357 switch (TREE_CODE (top))
9359 case MULT_EXPR:
9360 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9361 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9363 case PLUS_EXPR:
9364 case MINUS_EXPR:
9365 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9366 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9368 case LSHIFT_EXPR:
9369 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9371 tree op1, t1;
9373 op1 = TREE_OPERAND (top, 1);
9374 /* const_binop may not detect overflow correctly,
9375 so check for it explicitly here. */
9376 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9377 > TREE_INT_CST_LOW (op1)
9378 && TREE_INT_CST_HIGH (op1) == 0
9379 && 0 != (t1 = fold_convert (type,
9380 const_binop (LSHIFT_EXPR,
9381 size_one_node,
9382 op1, 0)))
9383 && ! TREE_OVERFLOW (t1))
9384 return multiple_of_p (type, t1, bottom);
9386 return 0;
9388 case NOP_EXPR:
9389 /* Can't handle conversions from non-integral or wider integral type. */
9390 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9391 || (TYPE_PRECISION (type)
9392 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9393 return 0;
9395 /* .. fall through ... */
9397 case SAVE_EXPR:
9398 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9400 case INTEGER_CST:
9401 if (TREE_CODE (bottom) != INTEGER_CST
9402 || (TYPE_UNSIGNED (type)
9403 && (tree_int_cst_sgn (top) < 0
9404 || tree_int_cst_sgn (bottom) < 0)))
9405 return 0;
9406 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9407 top, bottom, 0));
9409 default:
9410 return 0;
9414 /* Return true if `t' is known to be non-negative. */
9417 tree_expr_nonnegative_p (tree t)
9419 switch (TREE_CODE (t))
9421 case ABS_EXPR:
9422 return 1;
9424 case INTEGER_CST:
9425 return tree_int_cst_sgn (t) >= 0;
9427 case REAL_CST:
9428 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9430 case PLUS_EXPR:
9431 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9432 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9433 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9435 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9436 both unsigned and at least 2 bits shorter than the result. */
9437 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9438 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9439 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9441 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9442 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9443 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9444 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9446 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9447 TYPE_PRECISION (inner2)) + 1;
9448 return prec < TYPE_PRECISION (TREE_TYPE (t));
9451 break;
9453 case MULT_EXPR:
9454 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9456 /* x * x for floating point x is always non-negative. */
9457 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9458 return 1;
9459 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9460 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9463 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9464 both unsigned and their total bits is shorter than the result. */
9465 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9466 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9467 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9469 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9470 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9471 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9472 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9473 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9474 < TYPE_PRECISION (TREE_TYPE (t));
9476 return 0;
9478 case TRUNC_DIV_EXPR:
9479 case CEIL_DIV_EXPR:
9480 case FLOOR_DIV_EXPR:
9481 case ROUND_DIV_EXPR:
9482 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9483 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9485 case TRUNC_MOD_EXPR:
9486 case CEIL_MOD_EXPR:
9487 case FLOOR_MOD_EXPR:
9488 case ROUND_MOD_EXPR:
9489 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9491 case RDIV_EXPR:
9492 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9493 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9495 case BIT_AND_EXPR:
9496 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9497 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9498 case BIT_IOR_EXPR:
9499 case BIT_XOR_EXPR:
9500 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9501 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9503 case NOP_EXPR:
9505 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9506 tree outer_type = TREE_TYPE (t);
9508 if (TREE_CODE (outer_type) == REAL_TYPE)
9510 if (TREE_CODE (inner_type) == REAL_TYPE)
9511 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9512 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9514 if (TYPE_UNSIGNED (inner_type))
9515 return 1;
9516 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9519 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9521 if (TREE_CODE (inner_type) == REAL_TYPE)
9522 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9523 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9524 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9525 && TYPE_UNSIGNED (inner_type);
9528 break;
9530 case COND_EXPR:
9531 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9532 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9533 case COMPOUND_EXPR:
9534 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9535 case MIN_EXPR:
9536 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9537 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9538 case MAX_EXPR:
9539 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9540 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9541 case MODIFY_EXPR:
9542 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9543 case BIND_EXPR:
9544 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9545 case SAVE_EXPR:
9546 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9547 case NON_LVALUE_EXPR:
9548 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9549 case FLOAT_EXPR:
9550 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9552 case TARGET_EXPR:
9554 tree temp = TARGET_EXPR_SLOT (t);
9555 t = TARGET_EXPR_INITIAL (t);
9557 /* If the initializer is non-void, then it's a normal expression
9558 that will be assigned to the slot. */
9559 if (!VOID_TYPE_P (t))
9560 return tree_expr_nonnegative_p (t);
9562 /* Otherwise, the initializer sets the slot in some way. One common
9563 way is an assignment statement at the end of the initializer. */
9564 while (1)
9566 if (TREE_CODE (t) == BIND_EXPR)
9567 t = expr_last (BIND_EXPR_BODY (t));
9568 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9569 || TREE_CODE (t) == TRY_CATCH_EXPR)
9570 t = expr_last (TREE_OPERAND (t, 0));
9571 else if (TREE_CODE (t) == STATEMENT_LIST)
9572 t = expr_last (t);
9573 else
9574 break;
9576 if (TREE_CODE (t) == MODIFY_EXPR
9577 && TREE_OPERAND (t, 0) == temp)
9578 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9580 return 0;
9583 case CALL_EXPR:
9585 tree fndecl = get_callee_fndecl (t);
9586 tree arglist = TREE_OPERAND (t, 1);
9587 if (fndecl
9588 && DECL_BUILT_IN (fndecl)
9589 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9590 switch (DECL_FUNCTION_CODE (fndecl))
9592 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9593 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9594 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9595 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9597 CASE_BUILTIN_F (BUILT_IN_ACOS)
9598 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9599 CASE_BUILTIN_F (BUILT_IN_CABS)
9600 CASE_BUILTIN_F (BUILT_IN_COSH)
9601 CASE_BUILTIN_F (BUILT_IN_ERFC)
9602 CASE_BUILTIN_F (BUILT_IN_EXP)
9603 CASE_BUILTIN_F (BUILT_IN_EXP10)
9604 CASE_BUILTIN_F (BUILT_IN_EXP2)
9605 CASE_BUILTIN_F (BUILT_IN_FABS)
9606 CASE_BUILTIN_F (BUILT_IN_FDIM)
9607 CASE_BUILTIN_F (BUILT_IN_FREXP)
9608 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9609 CASE_BUILTIN_F (BUILT_IN_POW10)
9610 CASE_BUILTIN_I (BUILT_IN_FFS)
9611 CASE_BUILTIN_I (BUILT_IN_PARITY)
9612 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9613 /* Always true. */
9614 return 1;
9616 CASE_BUILTIN_F (BUILT_IN_SQRT)
9617 /* sqrt(-0.0) is -0.0. */
9618 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9619 return 1;
9620 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9622 CASE_BUILTIN_F (BUILT_IN_ASINH)
9623 CASE_BUILTIN_F (BUILT_IN_ATAN)
9624 CASE_BUILTIN_F (BUILT_IN_ATANH)
9625 CASE_BUILTIN_F (BUILT_IN_CBRT)
9626 CASE_BUILTIN_F (BUILT_IN_CEIL)
9627 CASE_BUILTIN_F (BUILT_IN_ERF)
9628 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9629 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9630 CASE_BUILTIN_F (BUILT_IN_FMOD)
9631 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9632 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9633 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9634 CASE_BUILTIN_F (BUILT_IN_LRINT)
9635 CASE_BUILTIN_F (BUILT_IN_LROUND)
9636 CASE_BUILTIN_F (BUILT_IN_MODF)
9637 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9638 CASE_BUILTIN_F (BUILT_IN_POW)
9639 CASE_BUILTIN_F (BUILT_IN_RINT)
9640 CASE_BUILTIN_F (BUILT_IN_ROUND)
9641 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9642 CASE_BUILTIN_F (BUILT_IN_SINH)
9643 CASE_BUILTIN_F (BUILT_IN_TANH)
9644 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9645 /* True if the 1st argument is nonnegative. */
9646 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9648 CASE_BUILTIN_F (BUILT_IN_FMAX)
9649 /* True if the 1st OR 2nd arguments are nonnegative. */
9650 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9651 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9653 CASE_BUILTIN_F (BUILT_IN_FMIN)
9654 /* True if the 1st AND 2nd arguments are nonnegative. */
9655 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9656 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9658 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9659 /* True if the 2nd argument is nonnegative. */
9660 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9662 default:
9663 break;
9664 #undef CASE_BUILTIN_F
9665 #undef CASE_BUILTIN_I
9669 /* ... fall through ... */
9671 default:
9672 if (truth_value_p (TREE_CODE (t)))
9673 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9674 return 1;
9677 /* We don't know sign of `t', so be conservative and return false. */
9678 return 0;
9681 /* Return true when T is an address and is known to be nonzero.
9682 For floating point we further ensure that T is not denormal.
9683 Similar logic is present in nonzero_address in rtlanal.h. */
9685 static bool
9686 tree_expr_nonzero_p (tree t)
9688 tree type = TREE_TYPE (t);
9690 /* Doing something useful for floating point would need more work. */
9691 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9692 return false;
9694 switch (TREE_CODE (t))
9696 case ABS_EXPR:
9697 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9698 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9700 case INTEGER_CST:
9701 /* We used to test for !integer_zerop here. This does not work correctly
9702 if TREE_CONSTANT_OVERFLOW (t). */
9703 return (TREE_INT_CST_LOW (t) != 0
9704 || TREE_INT_CST_HIGH (t) != 0);
9706 case PLUS_EXPR:
9707 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9709 /* With the presence of negative values it is hard
9710 to say something. */
9711 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9712 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9713 return false;
9714 /* One of operands must be positive and the other non-negative. */
9715 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9716 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9718 break;
9720 case MULT_EXPR:
9721 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9723 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9724 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9726 break;
9728 case NOP_EXPR:
9730 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9731 tree outer_type = TREE_TYPE (t);
9733 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9734 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9736 break;
9738 case ADDR_EXPR:
9740 tree base = get_base_address (TREE_OPERAND (t, 0));
9742 if (!base)
9743 return false;
9745 /* Weak declarations may link to NULL. */
9746 if (DECL_P (base))
9747 return !DECL_WEAK (base);
9749 /* Constants are never weak. */
9750 if (CONSTANT_CLASS_P (base))
9751 return true;
9753 return false;
9756 case COND_EXPR:
9757 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9758 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9760 case MIN_EXPR:
9761 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9762 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9764 case MAX_EXPR:
9765 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9767 /* When both operands are nonzero, then MAX must be too. */
9768 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9769 return true;
9771 /* MAX where operand 0 is positive is positive. */
9772 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9774 /* MAX where operand 1 is positive is positive. */
9775 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9776 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9777 return true;
9778 break;
9780 case COMPOUND_EXPR:
9781 case MODIFY_EXPR:
9782 case BIND_EXPR:
9783 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9785 case SAVE_EXPR:
9786 case NON_LVALUE_EXPR:
9787 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9789 case BIT_IOR_EXPR:
9790 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9791 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9793 default:
9794 break;
9796 return false;
9799 /* See if we are applying CODE, a relational to the highest or lowest
9800 possible integer of TYPE. If so, then the result is a compile
9801 time constant. */
9803 static tree
9804 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9805 tree *op1_p)
9807 tree op0 = *op0_p;
9808 tree op1 = *op1_p;
9809 enum tree_code code = *code_p;
9810 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9812 if (TREE_CODE (op1) == INTEGER_CST
9813 && ! TREE_CONSTANT_OVERFLOW (op1)
9814 && width <= HOST_BITS_PER_WIDE_INT
9815 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9816 || POINTER_TYPE_P (TREE_TYPE (op1))))
9818 unsigned HOST_WIDE_INT signed_max;
9819 unsigned HOST_WIDE_INT max, min;
9821 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
9823 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
9825 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9826 min = 0;
9828 else
9830 max = signed_max;
9831 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9834 if (TREE_INT_CST_HIGH (op1) == 0
9835 && TREE_INT_CST_LOW (op1) == max)
9836 switch (code)
9838 case GT_EXPR:
9839 return omit_one_operand (type, integer_zero_node, op0);
9841 case GE_EXPR:
9842 *code_p = EQ_EXPR;
9843 break;
9844 case LE_EXPR:
9845 return omit_one_operand (type, integer_one_node, op0);
9847 case LT_EXPR:
9848 *code_p = NE_EXPR;
9849 break;
9851 /* The GE_EXPR and LT_EXPR cases above are not normally
9852 reached because of previous transformations. */
9854 default:
9855 break;
9857 else if (TREE_INT_CST_HIGH (op1) == 0
9858 && TREE_INT_CST_LOW (op1) == max - 1)
9859 switch (code)
9861 case GT_EXPR:
9862 *code_p = EQ_EXPR;
9863 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9864 break;
9865 case LE_EXPR:
9866 *code_p = NE_EXPR;
9867 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9868 break;
9869 default:
9870 break;
9872 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9873 && TREE_INT_CST_LOW (op1) == min)
9874 switch (code)
9876 case LT_EXPR:
9877 return omit_one_operand (type, integer_zero_node, op0);
9879 case LE_EXPR:
9880 *code_p = EQ_EXPR;
9881 break;
9883 case GE_EXPR:
9884 return omit_one_operand (type, integer_one_node, op0);
9886 case GT_EXPR:
9887 *code_p = NE_EXPR;
9888 break;
9890 default:
9891 break;
9893 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9894 && TREE_INT_CST_LOW (op1) == min + 1)
9895 switch (code)
9897 case GE_EXPR:
9898 *code_p = NE_EXPR;
9899 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9900 break;
9901 case LT_EXPR:
9902 *code_p = EQ_EXPR;
9903 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9904 break;
9905 default:
9906 break;
9909 else if (TREE_INT_CST_HIGH (op1) == 0
9910 && TREE_INT_CST_LOW (op1) == signed_max
9911 && TYPE_UNSIGNED (TREE_TYPE (op1))
9912 /* signed_type does not work on pointer types. */
9913 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
9915 /* The following case also applies to X < signed_max+1
9916 and X >= signed_max+1 because previous transformations. */
9917 if (code == LE_EXPR || code == GT_EXPR)
9919 tree st0, st1, exp, retval;
9920 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
9921 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
9923 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9924 type,
9925 fold_convert (st0, op0),
9926 fold_convert (st1, integer_zero_node));
9928 retval
9929 = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
9930 TREE_TYPE (exp),
9931 TREE_OPERAND (exp, 0),
9932 TREE_OPERAND (exp, 1));
9934 /* If we are in gimple form, then returning EXP would create
9935 non-gimple expressions. Clearing it is safe and insures
9936 we do not allow a non-gimple expression to escape. */
9937 if (in_gimple_form)
9938 exp = NULL;
9940 return (retval ? retval : exp);
9945 return NULL_TREE;
9949 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9950 attempt to fold the expression to a constant without modifying TYPE,
9951 OP0 or OP1.
9953 If the expression could be simplified to a constant, then return
9954 the constant. If the expression would not be simplified to a
9955 constant, then return NULL_TREE.
9957 Note this is primarily designed to be called after gimplification
9958 of the tree structures and when at least one operand is a constant.
9959 As a result of those simplifying assumptions this routine is far
9960 simpler than the generic fold routine. */
9962 tree
9963 nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
9964 tree op0, tree op1)
9966 int wins = 1;
9967 tree subop0;
9968 tree subop1;
9969 tree tem;
9971 /* If this is a commutative operation, and ARG0 is a constant, move it
9972 to ARG1 to reduce the number of tests below. */
9973 if (commutative_tree_code (code)
9974 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
9976 tem = op0;
9977 op0 = op1;
9978 op1 = tem;
9981 /* If either operand is a complex type, extract its real component. */
9982 if (TREE_CODE (op0) == COMPLEX_CST)
9983 subop0 = TREE_REALPART (op0);
9984 else
9985 subop0 = op0;
9987 if (TREE_CODE (op1) == COMPLEX_CST)
9988 subop1 = TREE_REALPART (op1);
9989 else
9990 subop1 = op1;
9992 /* Note if either argument is not a real or integer constant.
9993 With a few exceptions, simplification is limited to cases
9994 where both arguments are constants. */
9995 if ((TREE_CODE (subop0) != INTEGER_CST
9996 && TREE_CODE (subop0) != REAL_CST)
9997 || (TREE_CODE (subop1) != INTEGER_CST
9998 && TREE_CODE (subop1) != REAL_CST))
9999 wins = 0;
10001 switch (code)
10003 case PLUS_EXPR:
10004 /* (plus (address) (const_int)) is a constant. */
10005 if (TREE_CODE (op0) == PLUS_EXPR
10006 && TREE_CODE (op1) == INTEGER_CST
10007 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
10008 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
10009 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
10010 == ADDR_EXPR)))
10011 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
10013 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
10014 const_binop (PLUS_EXPR, op1,
10015 TREE_OPERAND (op0, 1), 0));
10017 case BIT_XOR_EXPR:
10019 binary:
10020 if (!wins)
10021 return NULL_TREE;
10023 /* Both arguments are constants. Simplify. */
10024 tem = const_binop (code, op0, op1, 0);
10025 if (tem != NULL_TREE)
10027 /* The return value should always have the same type as
10028 the original expression. */
10029 if (TREE_TYPE (tem) != type)
10030 tem = fold_convert (type, tem);
10032 return tem;
10034 return NULL_TREE;
10036 case MINUS_EXPR:
10037 /* Fold &x - &x. This can happen from &x.foo - &x.
10038 This is unsafe for certain floats even in non-IEEE formats.
10039 In IEEE, it is unsafe because it does wrong for NaNs.
10040 Also note that operand_equal_p is always false if an
10041 operand is volatile. */
10042 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
10043 return fold_convert (type, integer_zero_node);
10045 goto binary;
10047 case MULT_EXPR:
10048 case BIT_AND_EXPR:
10049 /* Special case multiplication or bitwise AND where one argument
10050 is zero. */
10051 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
10052 return omit_one_operand (type, op1, op0);
10053 else
10054 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
10055 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
10056 && real_zerop (op1))
10057 return omit_one_operand (type, op1, op0);
10059 goto binary;
10061 case BIT_IOR_EXPR:
10062 /* Special case when we know the result will be all ones. */
10063 if (integer_all_onesp (op1))
10064 return omit_one_operand (type, op1, op0);
10066 goto binary;
10068 case TRUNC_DIV_EXPR:
10069 case ROUND_DIV_EXPR:
10070 case FLOOR_DIV_EXPR:
10071 case CEIL_DIV_EXPR:
10072 case EXACT_DIV_EXPR:
10073 case TRUNC_MOD_EXPR:
10074 case ROUND_MOD_EXPR:
10075 case FLOOR_MOD_EXPR:
10076 case CEIL_MOD_EXPR:
10077 case RDIV_EXPR:
10078 /* Division by zero is undefined. */
10079 if (integer_zerop (op1))
10080 return NULL_TREE;
10082 if (TREE_CODE (op1) == REAL_CST
10083 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
10084 && real_zerop (op1))
10085 return NULL_TREE;
10087 goto binary;
10089 case MIN_EXPR:
10090 if (INTEGRAL_TYPE_P (type)
10091 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10092 return omit_one_operand (type, op1, op0);
10094 goto binary;
10096 case MAX_EXPR:
10097 if (INTEGRAL_TYPE_P (type)
10098 && TYPE_MAX_VALUE (type)
10099 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10100 return omit_one_operand (type, op1, op0);
10102 goto binary;
10104 case RSHIFT_EXPR:
10105 /* Optimize -1 >> x for arithmetic right shifts. */
10106 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
10107 return omit_one_operand (type, op0, op1);
10108 /* ... fall through ... */
10110 case LSHIFT_EXPR:
10111 if (integer_zerop (op0))
10112 return omit_one_operand (type, op0, op1);
10114 /* Since negative shift count is not well-defined, don't
10115 try to compute it in the compiler. */
10116 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
10117 return NULL_TREE;
10119 goto binary;
10121 case LROTATE_EXPR:
10122 case RROTATE_EXPR:
10123 /* -1 rotated either direction by any amount is still -1. */
10124 if (integer_all_onesp (op0))
10125 return omit_one_operand (type, op0, op1);
10127 /* 0 rotated either direction by any amount is still zero. */
10128 if (integer_zerop (op0))
10129 return omit_one_operand (type, op0, op1);
10131 goto binary;
10133 case COMPLEX_EXPR:
10134 if (wins)
10135 return build_complex (type, op0, op1);
10136 return NULL_TREE;
10138 case LT_EXPR:
10139 case LE_EXPR:
10140 case GT_EXPR:
10141 case GE_EXPR:
10142 case EQ_EXPR:
10143 case NE_EXPR:
10144 /* If one arg is a real or integer constant, put it last. */
10145 if ((TREE_CODE (op0) == INTEGER_CST
10146 && TREE_CODE (op1) != INTEGER_CST)
10147 || (TREE_CODE (op0) == REAL_CST
10148 && TREE_CODE (op0) != REAL_CST))
10150 tree temp;
10152 temp = op0;
10153 op0 = op1;
10154 op1 = temp;
10155 code = swap_tree_comparison (code);
10158 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10159 This transformation affects the cases which are handled in later
10160 optimizations involving comparisons with non-negative constants. */
10161 if (TREE_CODE (op1) == INTEGER_CST
10162 && TREE_CODE (op0) != INTEGER_CST
10163 && tree_int_cst_sgn (op1) > 0)
10165 switch (code)
10167 case GE_EXPR:
10168 code = GT_EXPR;
10169 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10170 break;
10172 case LT_EXPR:
10173 code = LE_EXPR;
10174 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10175 break;
10177 default:
10178 break;
10182 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10183 if (tem)
10184 return tem;
10186 /* Fall through. */
10188 case ORDERED_EXPR:
10189 case UNORDERED_EXPR:
10190 case UNLT_EXPR:
10191 case UNLE_EXPR:
10192 case UNGT_EXPR:
10193 case UNGE_EXPR:
10194 case UNEQ_EXPR:
10195 case LTGT_EXPR:
10196 if (!wins)
10197 return NULL_TREE;
10199 return fold_relational_const (code, type, op0, op1);
10201 case RANGE_EXPR:
10202 /* This could probably be handled. */
10203 return NULL_TREE;
10205 case TRUTH_AND_EXPR:
10206 /* If second arg is constant zero, result is zero, but first arg
10207 must be evaluated. */
10208 if (integer_zerop (op1))
10209 return omit_one_operand (type, op1, op0);
10210 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10211 case will be handled here. */
10212 if (integer_zerop (op0))
10213 return omit_one_operand (type, op0, op1);
10214 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10215 return constant_boolean_node (true, type);
10216 return NULL_TREE;
10218 case TRUTH_OR_EXPR:
10219 /* If second arg is constant true, result is true, but we must
10220 evaluate first arg. */
10221 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10222 return omit_one_operand (type, op1, op0);
10223 /* Likewise for first arg, but note this only occurs here for
10224 TRUTH_OR_EXPR. */
10225 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10226 return omit_one_operand (type, op0, op1);
10227 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10228 return constant_boolean_node (false, type);
10229 return NULL_TREE;
10231 case TRUTH_XOR_EXPR:
10232 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10234 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10235 return constant_boolean_node (x, type);
10237 return NULL_TREE;
10239 default:
10240 return NULL_TREE;
10244 /* Given the components of a unary expression CODE, TYPE and OP0,
10245 attempt to fold the expression to a constant without modifying
10246 TYPE or OP0.
10248 If the expression could be simplified to a constant, then return
10249 the constant. If the expression would not be simplified to a
10250 constant, then return NULL_TREE.
10252 Note this is primarily designed to be called after gimplification
10253 of the tree structures and when op0 is a constant. As a result
10254 of those simplifying assumptions this routine is far simpler than
10255 the generic fold routine. */
10257 tree
10258 nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
10259 tree op0)
10261 /* Make sure we have a suitable constant argument. */
10262 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10264 tree subop;
10266 if (TREE_CODE (op0) == COMPLEX_CST)
10267 subop = TREE_REALPART (op0);
10268 else
10269 subop = op0;
10271 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10272 return NULL_TREE;
10275 switch (code)
10277 case NOP_EXPR:
10278 case FLOAT_EXPR:
10279 case CONVERT_EXPR:
10280 case FIX_TRUNC_EXPR:
10281 case FIX_FLOOR_EXPR:
10282 case FIX_CEIL_EXPR:
10283 return fold_convert_const (code, type, op0);
10285 case NEGATE_EXPR:
10286 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10287 return fold_negate_const (op0, type);
10288 else
10289 return NULL_TREE;
10291 case ABS_EXPR:
10292 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10293 return fold_abs_const (op0, type);
10294 else
10295 return NULL_TREE;
10297 case BIT_NOT_EXPR:
10298 if (TREE_CODE (op0) == INTEGER_CST)
10299 return fold_not_const (op0, type);
10300 else
10301 return NULL_TREE;
10303 case REALPART_EXPR:
10304 if (TREE_CODE (op0) == COMPLEX_CST)
10305 return TREE_REALPART (op0);
10306 else
10307 return NULL_TREE;
10309 case IMAGPART_EXPR:
10310 if (TREE_CODE (op0) == COMPLEX_CST)
10311 return TREE_IMAGPART (op0);
10312 else
10313 return NULL_TREE;
10315 case CONJ_EXPR:
10316 if (TREE_CODE (op0) == COMPLEX_CST
10317 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10318 return build_complex (type, TREE_REALPART (op0),
10319 negate_expr (TREE_IMAGPART (op0)));
10320 return NULL_TREE;
10322 default:
10323 return NULL_TREE;
10327 /* If EXP represents referencing an element in a constant string
10328 (either via pointer arithmetic or array indexing), return the
10329 tree representing the value accessed, otherwise return NULL. */
10331 tree
10332 fold_read_from_constant_string (tree exp)
10334 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10336 tree exp1 = TREE_OPERAND (exp, 0);
10337 tree index;
10338 tree string;
10340 if (TREE_CODE (exp) == INDIRECT_REF)
10341 string = string_constant (exp1, &index);
10342 else
10344 tree low_bound = array_ref_low_bound (exp);
10345 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10347 /* Optimize the special-case of a zero lower bound.
10349 We convert the low_bound to sizetype to avoid some problems
10350 with constant folding. (E.g. suppose the lower bound is 1,
10351 and its mode is QI. Without the conversion,l (ARRAY
10352 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10353 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10354 if (! integer_zerop (low_bound))
10355 index = size_diffop (index, fold_convert (sizetype, low_bound));
10357 string = exp1;
10360 if (string
10361 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10362 && TREE_CODE (string) == STRING_CST
10363 && TREE_CODE (index) == INTEGER_CST
10364 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10365 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10366 == MODE_INT)
10367 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10368 return fold_convert (TREE_TYPE (exp),
10369 build_int_cst (NULL_TREE,
10370 (TREE_STRING_POINTER (string)
10371 [TREE_INT_CST_LOW (index)])));
10373 return NULL;
10376 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10377 an integer constant or real constant.
10379 TYPE is the type of the result. */
10381 static tree
10382 fold_negate_const (tree arg0, tree type)
10384 tree t = NULL_TREE;
10386 switch (TREE_CODE (arg0))
10388 case INTEGER_CST:
10390 unsigned HOST_WIDE_INT low;
10391 HOST_WIDE_INT high;
10392 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10393 TREE_INT_CST_HIGH (arg0),
10394 &low, &high);
10395 t = build_int_cst_wide (type, low, high);
10396 t = force_fit_type (t, 1,
10397 (overflow | TREE_OVERFLOW (arg0))
10398 && !TYPE_UNSIGNED (type),
10399 TREE_CONSTANT_OVERFLOW (arg0));
10400 break;
10403 case REAL_CST:
10404 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10405 break;
10407 default:
10408 gcc_unreachable ();
10411 return t;
10414 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10415 an integer constant or real constant.
10417 TYPE is the type of the result. */
10419 tree
10420 fold_abs_const (tree arg0, tree type)
10422 tree t = NULL_TREE;
10424 switch (TREE_CODE (arg0))
10426 case INTEGER_CST:
10427 /* If the value is unsigned, then the absolute value is
10428 the same as the ordinary value. */
10429 if (TYPE_UNSIGNED (type))
10430 t = arg0;
10431 /* Similarly, if the value is non-negative. */
10432 else if (INT_CST_LT (integer_minus_one_node, arg0))
10433 t = arg0;
10434 /* If the value is negative, then the absolute value is
10435 its negation. */
10436 else
10438 unsigned HOST_WIDE_INT low;
10439 HOST_WIDE_INT high;
10440 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10441 TREE_INT_CST_HIGH (arg0),
10442 &low, &high);
10443 t = build_int_cst_wide (type, low, high);
10444 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
10445 TREE_CONSTANT_OVERFLOW (arg0));
10447 break;
10449 case REAL_CST:
10450 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10451 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10452 else
10453 t = arg0;
10454 break;
10456 default:
10457 gcc_unreachable ();
10460 return t;
10463 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10464 constant. TYPE is the type of the result. */
10466 static tree
10467 fold_not_const (tree arg0, tree type)
10469 tree t = NULL_TREE;
10471 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
10473 t = build_int_cst_wide (type,
10474 ~ TREE_INT_CST_LOW (arg0),
10475 ~ TREE_INT_CST_HIGH (arg0));
10476 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
10477 TREE_CONSTANT_OVERFLOW (arg0));
10479 return t;
10482 /* Given CODE, a relational operator, the target type, TYPE and two
10483 constant operands OP0 and OP1, return the result of the
10484 relational operation. If the result is not a compile time
10485 constant, then return NULL_TREE. */
10487 static tree
10488 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10490 int result, invert;
10492 /* From here on, the only cases we handle are when the result is
10493 known to be a constant. */
10495 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10497 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
10498 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
10500 /* Handle the cases where either operand is a NaN. */
10501 if (real_isnan (c0) || real_isnan (c1))
10503 switch (code)
10505 case EQ_EXPR:
10506 case ORDERED_EXPR:
10507 result = 0;
10508 break;
10510 case NE_EXPR:
10511 case UNORDERED_EXPR:
10512 case UNLT_EXPR:
10513 case UNLE_EXPR:
10514 case UNGT_EXPR:
10515 case UNGE_EXPR:
10516 case UNEQ_EXPR:
10517 result = 1;
10518 break;
10520 case LT_EXPR:
10521 case LE_EXPR:
10522 case GT_EXPR:
10523 case GE_EXPR:
10524 case LTGT_EXPR:
10525 if (flag_trapping_math)
10526 return NULL_TREE;
10527 result = 0;
10528 break;
10530 default:
10531 gcc_unreachable ();
10534 return constant_boolean_node (result, type);
10537 return constant_boolean_node (real_compare (code, c0, c1), type);
10540 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10542 To compute GT, swap the arguments and do LT.
10543 To compute GE, do LT and invert the result.
10544 To compute LE, swap the arguments, do LT and invert the result.
10545 To compute NE, do EQ and invert the result.
10547 Therefore, the code below must handle only EQ and LT. */
10549 if (code == LE_EXPR || code == GT_EXPR)
10551 tree tem = op0;
10552 op0 = op1;
10553 op1 = tem;
10554 code = swap_tree_comparison (code);
10557 /* Note that it is safe to invert for real values here because we
10558 have already handled the one case that it matters. */
10560 invert = 0;
10561 if (code == NE_EXPR || code == GE_EXPR)
10563 invert = 1;
10564 code = invert_tree_comparison (code, false);
10567 /* Compute a result for LT or EQ if args permit;
10568 Otherwise return T. */
10569 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10571 if (code == EQ_EXPR)
10572 result = tree_int_cst_equal (op0, op1);
10573 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10574 result = INT_CST_LT_UNSIGNED (op0, op1);
10575 else
10576 result = INT_CST_LT (op0, op1);
10578 else
10579 return NULL_TREE;
10581 if (invert)
10582 result ^= 1;
10583 return constant_boolean_node (result, type);
10586 /* Build an expression for the a clean point containing EXPR with type TYPE.
10587 Don't build a cleanup point expression for EXPR which don't have side
10588 effects. */
10590 tree
10591 fold_build_cleanup_point_expr (tree type, tree expr)
10593 /* If the expression does not have side effects then we don't have to wrap
10594 it with a cleanup point expression. */
10595 if (!TREE_SIDE_EFFECTS (expr))
10596 return expr;
10598 return build1 (CLEANUP_POINT_EXPR, type, expr);
10601 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10602 avoid confusing the gimplify process. */
10604 tree
10605 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10607 /* The size of the object is not relevant when talking about its address. */
10608 if (TREE_CODE (t) == WITH_SIZE_EXPR)
10609 t = TREE_OPERAND (t, 0);
10611 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
10612 if (TREE_CODE (t) == INDIRECT_REF
10613 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
10615 t = TREE_OPERAND (t, 0);
10616 if (TREE_TYPE (t) != ptrtype)
10617 t = build1 (NOP_EXPR, ptrtype, t);
10619 else
10621 tree base = t;
10623 while (handled_component_p (base)
10624 || TREE_CODE (base) == REALPART_EXPR
10625 || TREE_CODE (base) == IMAGPART_EXPR)
10626 base = TREE_OPERAND (base, 0);
10627 if (DECL_P (base))
10628 TREE_ADDRESSABLE (base) = 1;
10630 t = build1 (ADDR_EXPR, ptrtype, t);
10633 return t;
10636 tree
10637 build_fold_addr_expr (tree t)
10639 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10642 /* Builds an expression for an indirection through T, simplifying some
10643 cases. */
10645 tree
10646 build_fold_indirect_ref (tree t)
10648 tree type = TREE_TYPE (TREE_TYPE (t));
10649 tree sub = t;
10650 tree subtype;
10652 STRIP_NOPS (sub);
10653 if (TREE_CODE (sub) == ADDR_EXPR)
10655 tree op = TREE_OPERAND (sub, 0);
10656 tree optype = TREE_TYPE (op);
10657 /* *&p => p */
10658 if (lang_hooks.types_compatible_p (type, optype))
10659 return op;
10660 /* *(foo *)&fooarray => fooarray[0] */
10661 else if (TREE_CODE (optype) == ARRAY_TYPE
10662 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10663 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
10666 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10667 subtype = TREE_TYPE (sub);
10668 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10669 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10671 sub = build_fold_indirect_ref (sub);
10672 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
10675 return build1 (INDIRECT_REF, type, t);
10678 /* Strip non-trapping, non-side-effecting tree nodes from an expression
10679 whose result is ignored. The type of the returned tree need not be
10680 the same as the original expression. */
10682 tree
10683 fold_ignored_result (tree t)
10685 if (!TREE_SIDE_EFFECTS (t))
10686 return integer_zero_node;
10688 for (;;)
10689 switch (TREE_CODE_CLASS (TREE_CODE (t)))
10691 case tcc_unary:
10692 t = TREE_OPERAND (t, 0);
10693 break;
10695 case tcc_binary:
10696 case tcc_comparison:
10697 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10698 t = TREE_OPERAND (t, 0);
10699 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
10700 t = TREE_OPERAND (t, 1);
10701 else
10702 return t;
10703 break;
10705 case tcc_expression:
10706 switch (TREE_CODE (t))
10708 case COMPOUND_EXPR:
10709 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10710 return t;
10711 t = TREE_OPERAND (t, 0);
10712 break;
10714 case COND_EXPR:
10715 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
10716 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
10717 return t;
10718 t = TREE_OPERAND (t, 0);
10719 break;
10721 default:
10722 return t;
10724 break;
10726 default:
10727 return t;
10731 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
10732 This can only be applied to objects of a sizetype. */
10734 tree
10735 round_up (tree value, int divisor)
10737 tree div = NULL_TREE;
10739 gcc_assert (divisor > 0);
10740 if (divisor == 1)
10741 return value;
10743 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10744 have to do anything. Only do this when we are not given a const,
10745 because in that case, this check is more expensive than just
10746 doing it. */
10747 if (TREE_CODE (value) != INTEGER_CST)
10749 div = build_int_cst (TREE_TYPE (value), divisor);
10751 if (multiple_of_p (TREE_TYPE (value), value, div))
10752 return value;
10755 /* If divisor is a power of two, simplify this to bit manipulation. */
10756 if (divisor == (divisor & -divisor))
10758 tree t;
10760 t = build_int_cst (TREE_TYPE (value), divisor - 1);
10761 value = size_binop (PLUS_EXPR, value, t);
10762 t = build_int_cst (TREE_TYPE (value), -divisor);
10763 value = size_binop (BIT_AND_EXPR, value, t);
10765 else
10767 if (!div)
10768 div = build_int_cst (TREE_TYPE (value), divisor);
10769 value = size_binop (CEIL_DIV_EXPR, value, div);
10770 value = size_binop (MULT_EXPR, value, div);
10773 return value;
10776 /* Likewise, but round down. */
10778 tree
10779 round_down (tree value, int divisor)
10781 tree div = NULL_TREE;
10783 gcc_assert (divisor > 0);
10784 if (divisor == 1)
10785 return value;
10787 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10788 have to do anything. Only do this when we are not given a const,
10789 because in that case, this check is more expensive than just
10790 doing it. */
10791 if (TREE_CODE (value) != INTEGER_CST)
10793 div = build_int_cst (TREE_TYPE (value), divisor);
10795 if (multiple_of_p (TREE_TYPE (value), value, div))
10796 return value;
10799 /* If divisor is a power of two, simplify this to bit manipulation. */
10800 if (divisor == (divisor & -divisor))
10802 tree t;
10804 t = build_int_cst (TREE_TYPE (value), -divisor);
10805 value = size_binop (BIT_AND_EXPR, value, t);
10807 else
10809 if (!div)
10810 div = build_int_cst (TREE_TYPE (value), divisor);
10811 value = size_binop (FLOOR_DIV_EXPR, value, div);
10812 value = size_binop (MULT_EXPR, value, div);
10815 return value;
10818 /* Returns true if addresses of E1 and E2 differ by a constant, false
10819 otherwise. If they do, &E1 - &E2 is stored in *DIFF. */
10821 bool
10822 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
10824 tree core1, core2;
10825 HOST_WIDE_INT bitsize1, bitsize2;
10826 HOST_WIDE_INT bitpos1, bitpos2;
10827 tree toffset1, toffset2, tdiff, type;
10828 enum machine_mode mode1, mode2;
10829 int unsignedp1, unsignedp2, volatilep1, volatilep2;
10831 core1 = get_inner_reference (e1, &bitsize1, &bitpos1, &toffset1, &mode1,
10832 &unsignedp1, &volatilep1);
10833 core2 = get_inner_reference (e2, &bitsize2, &bitpos2, &toffset2, &mode2,
10834 &unsignedp2, &volatilep2);
10836 if (bitpos1 % BITS_PER_UNIT != 0
10837 || bitpos2 % BITS_PER_UNIT != 0
10838 || !operand_equal_p (core1, core2, 0))
10839 return false;
10841 if (toffset1 && toffset2)
10843 type = TREE_TYPE (toffset1);
10844 if (type != TREE_TYPE (toffset2))
10845 toffset2 = fold_convert (type, toffset2);
10847 tdiff = fold (build2 (MINUS_EXPR, type, toffset1, toffset2));
10848 if (!host_integerp (tdiff, 0))
10849 return false;
10851 *diff = tree_low_cst (tdiff, 0);
10853 else if (toffset1 || toffset2)
10855 /* If only one of the offsets is non-constant, the difference cannot
10856 be a constant. */
10857 return false;
10859 else
10860 *diff = 0;
10862 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
10863 return true;