* Merge with edge-vector-mergepoint-20040918.
[official-gcc.git] / gcc / fold-const.c
blob25c84a459149aa0aa73ffb8b267f9aaf07a79fa2
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static tree build_zero_vector (tree);
93 static tree fold_convert_const (enum tree_code, tree, tree);
94 static enum tree_code invert_tree_comparison (enum tree_code, bool);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static tree combine_comparisons (enum tree_code, enum tree_code,
98 enum tree_code, tree, tree, tree);
99 static int truth_value_p (enum tree_code);
100 static int operand_equal_for_comparison_p (tree, tree, tree);
101 static int twoval_comparison_p (tree, tree *, tree *, int *);
102 static tree eval_subst (tree, tree, tree, tree, tree);
103 static tree pedantic_omit_one_operand (tree, tree, tree);
104 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
105 static tree make_bit_field_ref (tree, tree, int, int, int);
106 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
107 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (tree, int);
111 static tree sign_bit_p (tree, tree);
112 static int simple_operand_p (tree);
113 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
114 static tree make_range (tree, int *, tree *, tree *);
115 static tree build_range_check (tree, tree, int, tree, tree);
116 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
117 tree);
118 static tree fold_range_test (tree);
119 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
120 static tree unextend (tree, int, int, tree);
121 static tree fold_truthop (enum tree_code, tree, tree, tree);
122 static tree optimize_minmax_comparison (tree);
123 static tree extract_muldiv (tree, tree, enum tree_code, tree);
124 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
125 static int multiple_of_p (tree, tree, tree);
126 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
127 tree, int);
128 static bool fold_real_zero_addition_p (tree, tree, int);
129 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (tree, tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_relational_hi_lo (enum tree_code *, const tree,
138 tree *, tree *);
139 static bool tree_expr_nonzero_p (tree);
141 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
142 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
143 and SUM1. Then this yields nonzero if overflow occurred during the
144 addition.
146 Overflow occurs if A and B have the same sign, but A and SUM differ in
147 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
148 sign. */
149 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
151 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
152 We do that by representing the two-word integer in 4 words, with only
153 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
154 number. The value of the word is LOWPART + HIGHPART * BASE. */
156 #define LOWPART(x) \
157 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
158 #define HIGHPART(x) \
159 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
160 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
162 /* Unpack a two-word integer into 4 words.
163 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
164 WORDS points to the array of HOST_WIDE_INTs. */
166 static void
167 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
169 words[0] = LOWPART (low);
170 words[1] = HIGHPART (low);
171 words[2] = LOWPART (hi);
172 words[3] = HIGHPART (hi);
175 /* Pack an array of 4 words into a two-word integer.
176 WORDS points to the array of words.
177 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
179 static void
180 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
181 HOST_WIDE_INT *hi)
183 *low = words[0] + words[1] * BASE;
184 *hi = words[2] + words[3] * BASE;
187 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
188 in overflow of the value, when >0 we are only interested in signed
189 overflow, for <0 we are interested in any overflow. OVERFLOWED
190 indicates whether overflow has already occurred. CONST_OVERFLOWED
191 indicates whether constant overflow has already occurred. We force
192 T's value to be within range of T's type (by setting to 0 or 1 all
193 the bits outside the type's range). We set TREE_OVERFLOWED if,
194 OVERFLOWED is nonzero,
195 or OVERFLOWABLE is >0 and signed overflow occurs
196 or OVERFLOWABLE is <0 and any overflow occurs
197 We set TREE_CONSTANT_OVERFLOWED if,
198 CONST_OVERFLOWED is nonzero
199 or we set TREE_OVERFLOWED.
200 We return either the original T, or a copy. */
202 tree
203 force_fit_type (tree t, int overflowable,
204 bool overflowed, bool overflowed_const)
206 unsigned HOST_WIDE_INT low;
207 HOST_WIDE_INT high;
208 unsigned int prec;
209 int sign_extended_type;
211 gcc_assert (TREE_CODE (t) == INTEGER_CST);
213 low = TREE_INT_CST_LOW (t);
214 high = TREE_INT_CST_HIGH (t);
216 if (POINTER_TYPE_P (TREE_TYPE (t))
217 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
218 prec = POINTER_SIZE;
219 else
220 prec = TYPE_PRECISION (TREE_TYPE (t));
221 /* Size types *are* sign extended. */
222 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
223 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
224 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
226 /* First clear all bits that are beyond the type's precision. */
228 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
230 else if (prec > HOST_BITS_PER_WIDE_INT)
231 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
232 else
234 high = 0;
235 if (prec < HOST_BITS_PER_WIDE_INT)
236 low &= ~((HOST_WIDE_INT) (-1) << prec);
239 if (!sign_extended_type)
240 /* No sign extension */;
241 else if (prec == 2 * HOST_BITS_PER_WIDE_INT)
242 /* Correct width already. */;
243 else if (prec > HOST_BITS_PER_WIDE_INT)
245 /* Sign extend top half? */
246 if (high & ((unsigned HOST_WIDE_INT)1
247 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
248 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
250 else if (prec == HOST_BITS_PER_WIDE_INT)
252 if ((HOST_WIDE_INT)low < 0)
253 high = -1;
255 else
257 /* Sign extend bottom half? */
258 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
260 high = -1;
261 low |= (HOST_WIDE_INT)(-1) << prec;
265 /* If the value changed, return a new node. */
266 if (overflowed || overflowed_const
267 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
269 t = build_int_cst_wide (TREE_TYPE (t), low, high);
271 if (overflowed
272 || overflowable < 0
273 || (overflowable > 0 && sign_extended_type))
275 t = copy_node (t);
276 TREE_OVERFLOW (t) = 1;
277 TREE_CONSTANT_OVERFLOW (t) = 1;
279 else if (overflowed_const)
281 t = copy_node (t);
282 TREE_CONSTANT_OVERFLOW (t) = 1;
286 return t;
289 /* Add two doubleword integers with doubleword result.
290 Each argument is given as two `HOST_WIDE_INT' pieces.
291 One argument is L1 and H1; the other, L2 and H2.
292 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
295 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
296 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
297 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
299 unsigned HOST_WIDE_INT l;
300 HOST_WIDE_INT h;
302 l = l1 + l2;
303 h = h1 + h2 + (l < l1);
305 *lv = l;
306 *hv = h;
307 return OVERFLOW_SUM_SIGN (h1, h2, h);
310 /* Negate a doubleword integer with doubleword result.
311 Return nonzero if the operation overflows, assuming it's signed.
312 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
313 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
316 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
317 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
319 if (l1 == 0)
321 *lv = 0;
322 *hv = - h1;
323 return (*hv & h1) < 0;
325 else
327 *lv = -l1;
328 *hv = ~h1;
329 return 0;
333 /* Multiply two doubleword integers with doubleword result.
334 Return nonzero if the operation overflows, assuming it's signed.
335 Each argument is given as two `HOST_WIDE_INT' pieces.
336 One argument is L1 and H1; the other, L2 and H2.
337 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
340 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
341 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
342 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
344 HOST_WIDE_INT arg1[4];
345 HOST_WIDE_INT arg2[4];
346 HOST_WIDE_INT prod[4 * 2];
347 unsigned HOST_WIDE_INT carry;
348 int i, j, k;
349 unsigned HOST_WIDE_INT toplow, neglow;
350 HOST_WIDE_INT tophigh, neghigh;
352 encode (arg1, l1, h1);
353 encode (arg2, l2, h2);
355 memset (prod, 0, sizeof prod);
357 for (i = 0; i < 4; i++)
359 carry = 0;
360 for (j = 0; j < 4; j++)
362 k = i + j;
363 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
364 carry += arg1[i] * arg2[j];
365 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
366 carry += prod[k];
367 prod[k] = LOWPART (carry);
368 carry = HIGHPART (carry);
370 prod[i + 4] = carry;
373 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
375 /* Check for overflow by calculating the top half of the answer in full;
376 it should agree with the low half's sign bit. */
377 decode (prod + 4, &toplow, &tophigh);
378 if (h1 < 0)
380 neg_double (l2, h2, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 if (h2 < 0)
385 neg_double (l1, h1, &neglow, &neghigh);
386 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
388 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
391 /* Shift the doubleword integer in L1, H1 left by COUNT places
392 keeping only PREC bits of result.
393 Shift right if COUNT is negative.
394 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
395 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
397 void
398 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
399 HOST_WIDE_INT count, unsigned int prec,
400 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
402 unsigned HOST_WIDE_INT signmask;
404 if (count < 0)
406 rshift_double (l1, h1, -count, prec, lv, hv, arith);
407 return;
410 if (SHIFT_COUNT_TRUNCATED)
411 count %= prec;
413 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
415 /* Shifting by the host word size is undefined according to the
416 ANSI standard, so we must handle this as a special case. */
417 *hv = 0;
418 *lv = 0;
420 else if (count >= HOST_BITS_PER_WIDE_INT)
422 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
423 *lv = 0;
425 else
427 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
428 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
429 *lv = l1 << count;
432 /* Sign extend all bits that are beyond the precision. */
434 signmask = -((prec > HOST_BITS_PER_WIDE_INT
435 ? ((unsigned HOST_WIDE_INT) *hv
436 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
437 : (*lv >> (prec - 1))) & 1);
439 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
441 else if (prec >= HOST_BITS_PER_WIDE_INT)
443 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
444 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
446 else
448 *hv = signmask;
449 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
450 *lv |= signmask << prec;
454 /* Shift the doubleword integer in L1, H1 right by COUNT places
455 keeping only PREC bits of result. COUNT must be positive.
456 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
457 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
459 void
460 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
461 HOST_WIDE_INT count, unsigned int prec,
462 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
463 int arith)
465 unsigned HOST_WIDE_INT signmask;
467 signmask = (arith
468 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
469 : 0);
471 if (SHIFT_COUNT_TRUNCATED)
472 count %= prec;
474 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
476 /* Shifting by the host word size is undefined according to the
477 ANSI standard, so we must handle this as a special case. */
478 *hv = 0;
479 *lv = 0;
481 else if (count >= HOST_BITS_PER_WIDE_INT)
483 *hv = 0;
484 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
486 else
488 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
489 *lv = ((l1 >> count)
490 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
493 /* Zero / sign extend all bits that are beyond the precision. */
495 if (count >= (HOST_WIDE_INT)prec)
497 *hv = signmask;
498 *lv = signmask;
500 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
502 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
504 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
505 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
507 else
509 *hv = signmask;
510 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
511 *lv |= signmask << (prec - count);
515 /* Rotate the doubleword integer in L1, H1 left by COUNT places
516 keeping only PREC bits of result.
517 Rotate right if COUNT is negative.
518 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
520 void
521 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
522 HOST_WIDE_INT count, unsigned int prec,
523 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
525 unsigned HOST_WIDE_INT s1l, s2l;
526 HOST_WIDE_INT s1h, s2h;
528 count %= prec;
529 if (count < 0)
530 count += prec;
532 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
533 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
534 *lv = s1l | s2l;
535 *hv = s1h | s2h;
538 /* Rotate the doubleword integer in L1, H1 left by COUNT places
539 keeping only PREC bits of result. COUNT must be positive.
540 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
542 void
543 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
544 HOST_WIDE_INT count, unsigned int prec,
545 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
547 unsigned HOST_WIDE_INT s1l, s2l;
548 HOST_WIDE_INT s1h, s2h;
550 count %= prec;
551 if (count < 0)
552 count += prec;
554 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
555 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
556 *lv = s1l | s2l;
557 *hv = s1h | s2h;
560 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
561 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
562 CODE is a tree code for a kind of division, one of
563 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
564 or EXACT_DIV_EXPR
565 It controls how the quotient is rounded to an integer.
566 Return nonzero if the operation overflows.
567 UNS nonzero says do unsigned division. */
570 div_and_round_double (enum tree_code code, int uns,
571 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
572 HOST_WIDE_INT hnum_orig,
573 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
574 HOST_WIDE_INT hden_orig,
575 unsigned HOST_WIDE_INT *lquo,
576 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
577 HOST_WIDE_INT *hrem)
579 int quo_neg = 0;
580 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
581 HOST_WIDE_INT den[4], quo[4];
582 int i, j;
583 unsigned HOST_WIDE_INT work;
584 unsigned HOST_WIDE_INT carry = 0;
585 unsigned HOST_WIDE_INT lnum = lnum_orig;
586 HOST_WIDE_INT hnum = hnum_orig;
587 unsigned HOST_WIDE_INT lden = lden_orig;
588 HOST_WIDE_INT hden = hden_orig;
589 int overflow = 0;
591 if (hden == 0 && lden == 0)
592 overflow = 1, lden = 1;
594 /* Calculate quotient sign and convert operands to unsigned. */
595 if (!uns)
597 if (hnum < 0)
599 quo_neg = ~ quo_neg;
600 /* (minimum integer) / (-1) is the only overflow case. */
601 if (neg_double (lnum, hnum, &lnum, &hnum)
602 && ((HOST_WIDE_INT) lden & hden) == -1)
603 overflow = 1;
605 if (hden < 0)
607 quo_neg = ~ quo_neg;
608 neg_double (lden, hden, &lden, &hden);
612 if (hnum == 0 && hden == 0)
613 { /* single precision */
614 *hquo = *hrem = 0;
615 /* This unsigned division rounds toward zero. */
616 *lquo = lnum / lden;
617 goto finish_up;
620 if (hnum == 0)
621 { /* trivial case: dividend < divisor */
622 /* hden != 0 already checked. */
623 *hquo = *lquo = 0;
624 *hrem = hnum;
625 *lrem = lnum;
626 goto finish_up;
629 memset (quo, 0, sizeof quo);
631 memset (num, 0, sizeof num); /* to zero 9th element */
632 memset (den, 0, sizeof den);
634 encode (num, lnum, hnum);
635 encode (den, lden, hden);
637 /* Special code for when the divisor < BASE. */
638 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
640 /* hnum != 0 already checked. */
641 for (i = 4 - 1; i >= 0; i--)
643 work = num[i] + carry * BASE;
644 quo[i] = work / lden;
645 carry = work % lden;
648 else
650 /* Full double precision division,
651 with thanks to Don Knuth's "Seminumerical Algorithms". */
652 int num_hi_sig, den_hi_sig;
653 unsigned HOST_WIDE_INT quo_est, scale;
655 /* Find the highest nonzero divisor digit. */
656 for (i = 4 - 1;; i--)
657 if (den[i] != 0)
659 den_hi_sig = i;
660 break;
663 /* Insure that the first digit of the divisor is at least BASE/2.
664 This is required by the quotient digit estimation algorithm. */
666 scale = BASE / (den[den_hi_sig] + 1);
667 if (scale > 1)
668 { /* scale divisor and dividend */
669 carry = 0;
670 for (i = 0; i <= 4 - 1; i++)
672 work = (num[i] * scale) + carry;
673 num[i] = LOWPART (work);
674 carry = HIGHPART (work);
677 num[4] = carry;
678 carry = 0;
679 for (i = 0; i <= 4 - 1; i++)
681 work = (den[i] * scale) + carry;
682 den[i] = LOWPART (work);
683 carry = HIGHPART (work);
684 if (den[i] != 0) den_hi_sig = i;
688 num_hi_sig = 4;
690 /* Main loop */
691 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
693 /* Guess the next quotient digit, quo_est, by dividing the first
694 two remaining dividend digits by the high order quotient digit.
695 quo_est is never low and is at most 2 high. */
696 unsigned HOST_WIDE_INT tmp;
698 num_hi_sig = i + den_hi_sig + 1;
699 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
700 if (num[num_hi_sig] != den[den_hi_sig])
701 quo_est = work / den[den_hi_sig];
702 else
703 quo_est = BASE - 1;
705 /* Refine quo_est so it's usually correct, and at most one high. */
706 tmp = work - quo_est * den[den_hi_sig];
707 if (tmp < BASE
708 && (den[den_hi_sig - 1] * quo_est
709 > (tmp * BASE + num[num_hi_sig - 2])))
710 quo_est--;
712 /* Try QUO_EST as the quotient digit, by multiplying the
713 divisor by QUO_EST and subtracting from the remaining dividend.
714 Keep in mind that QUO_EST is the I - 1st digit. */
716 carry = 0;
717 for (j = 0; j <= den_hi_sig; j++)
719 work = quo_est * den[j] + carry;
720 carry = HIGHPART (work);
721 work = num[i + j] - LOWPART (work);
722 num[i + j] = LOWPART (work);
723 carry += HIGHPART (work) != 0;
726 /* If quo_est was high by one, then num[i] went negative and
727 we need to correct things. */
728 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
730 quo_est--;
731 carry = 0; /* add divisor back in */
732 for (j = 0; j <= den_hi_sig; j++)
734 work = num[i + j] + den[j] + carry;
735 carry = HIGHPART (work);
736 num[i + j] = LOWPART (work);
739 num [num_hi_sig] += carry;
742 /* Store the quotient digit. */
743 quo[i] = quo_est;
747 decode (quo, lquo, hquo);
749 finish_up:
750 /* If result is negative, make it so. */
751 if (quo_neg)
752 neg_double (*lquo, *hquo, lquo, hquo);
754 /* Compute trial remainder: rem = num - (quo * den) */
755 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
756 neg_double (*lrem, *hrem, lrem, hrem);
757 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
759 switch (code)
761 case TRUNC_DIV_EXPR:
762 case TRUNC_MOD_EXPR: /* round toward zero */
763 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
764 return overflow;
766 case FLOOR_DIV_EXPR:
767 case FLOOR_MOD_EXPR: /* round toward negative infinity */
768 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
770 /* quo = quo - 1; */
771 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
772 lquo, hquo);
774 else
775 return overflow;
776 break;
778 case CEIL_DIV_EXPR:
779 case CEIL_MOD_EXPR: /* round toward positive infinity */
780 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
782 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
783 lquo, hquo);
785 else
786 return overflow;
787 break;
789 case ROUND_DIV_EXPR:
790 case ROUND_MOD_EXPR: /* round to closest integer */
792 unsigned HOST_WIDE_INT labs_rem = *lrem;
793 HOST_WIDE_INT habs_rem = *hrem;
794 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
795 HOST_WIDE_INT habs_den = hden, htwice;
797 /* Get absolute values. */
798 if (*hrem < 0)
799 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
800 if (hden < 0)
801 neg_double (lden, hden, &labs_den, &habs_den);
803 /* If (2 * abs (lrem) >= abs (lden)) */
804 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
805 labs_rem, habs_rem, &ltwice, &htwice);
807 if (((unsigned HOST_WIDE_INT) habs_den
808 < (unsigned HOST_WIDE_INT) htwice)
809 || (((unsigned HOST_WIDE_INT) habs_den
810 == (unsigned HOST_WIDE_INT) htwice)
811 && (labs_den < ltwice)))
813 if (*hquo < 0)
814 /* quo = quo - 1; */
815 add_double (*lquo, *hquo,
816 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
817 else
818 /* quo = quo + 1; */
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
820 lquo, hquo);
822 else
823 return overflow;
825 break;
827 default:
828 gcc_unreachable ();
831 /* Compute true remainder: rem = num - (quo * den) */
832 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
833 neg_double (*lrem, *hrem, lrem, hrem);
834 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
835 return overflow;
838 /* Return true if built-in mathematical function specified by CODE
839 preserves the sign of it argument, i.e. -f(x) == f(-x). */
841 static bool
842 negate_mathfn_p (enum built_in_function code)
844 switch (code)
846 case BUILT_IN_ASIN:
847 case BUILT_IN_ASINF:
848 case BUILT_IN_ASINL:
849 case BUILT_IN_ATAN:
850 case BUILT_IN_ATANF:
851 case BUILT_IN_ATANL:
852 case BUILT_IN_SIN:
853 case BUILT_IN_SINF:
854 case BUILT_IN_SINL:
855 case BUILT_IN_TAN:
856 case BUILT_IN_TANF:
857 case BUILT_IN_TANL:
858 return true;
860 default:
861 break;
863 return false;
866 /* Check whether we may negate an integer constant T without causing
867 overflow. */
869 bool
870 may_negate_without_overflow_p (tree t)
872 unsigned HOST_WIDE_INT val;
873 unsigned int prec;
874 tree type;
876 gcc_assert (TREE_CODE (t) == INTEGER_CST);
878 type = TREE_TYPE (t);
879 if (TYPE_UNSIGNED (type))
880 return false;
882 prec = TYPE_PRECISION (type);
883 if (prec > HOST_BITS_PER_WIDE_INT)
885 if (TREE_INT_CST_LOW (t) != 0)
886 return true;
887 prec -= HOST_BITS_PER_WIDE_INT;
888 val = TREE_INT_CST_HIGH (t);
890 else
891 val = TREE_INT_CST_LOW (t);
892 if (prec < HOST_BITS_PER_WIDE_INT)
893 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
894 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
897 /* Determine whether an expression T can be cheaply negated using
898 the function negate_expr. */
900 static bool
901 negate_expr_p (tree t)
903 tree type;
905 if (t == 0)
906 return false;
908 type = TREE_TYPE (t);
910 STRIP_SIGN_NOPS (t);
911 switch (TREE_CODE (t))
913 case INTEGER_CST:
914 if (TYPE_UNSIGNED (type) || ! flag_trapv)
915 return true;
917 /* Check that -CST will not overflow type. */
918 return may_negate_without_overflow_p (t);
920 case REAL_CST:
921 case NEGATE_EXPR:
922 return true;
924 case COMPLEX_CST:
925 return negate_expr_p (TREE_REALPART (t))
926 && negate_expr_p (TREE_IMAGPART (t));
928 case PLUS_EXPR:
929 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
930 return false;
931 /* -(A + B) -> (-B) - A. */
932 if (negate_expr_p (TREE_OPERAND (t, 1))
933 && reorder_operands_p (TREE_OPERAND (t, 0),
934 TREE_OPERAND (t, 1)))
935 return true;
936 /* -(A + B) -> (-A) - B. */
937 return negate_expr_p (TREE_OPERAND (t, 0));
939 case MINUS_EXPR:
940 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
941 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
942 && reorder_operands_p (TREE_OPERAND (t, 0),
943 TREE_OPERAND (t, 1));
945 case MULT_EXPR:
946 if (TYPE_UNSIGNED (TREE_TYPE (t)))
947 break;
949 /* Fall through. */
951 case RDIV_EXPR:
952 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
953 return negate_expr_p (TREE_OPERAND (t, 1))
954 || negate_expr_p (TREE_OPERAND (t, 0));
955 break;
957 case NOP_EXPR:
958 /* Negate -((double)float) as (double)(-float). */
959 if (TREE_CODE (type) == REAL_TYPE)
961 tree tem = strip_float_extensions (t);
962 if (tem != t)
963 return negate_expr_p (tem);
965 break;
967 case CALL_EXPR:
968 /* Negate -f(x) as f(-x). */
969 if (negate_mathfn_p (builtin_mathfn_code (t)))
970 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
971 break;
973 case RSHIFT_EXPR:
974 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
975 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
977 tree op1 = TREE_OPERAND (t, 1);
978 if (TREE_INT_CST_HIGH (op1) == 0
979 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
980 == TREE_INT_CST_LOW (op1))
981 return true;
983 break;
985 default:
986 break;
988 return false;
991 /* Given T, an expression, return the negation of T. Allow for T to be
992 null, in which case return null. */
994 static tree
995 negate_expr (tree t)
997 tree type;
998 tree tem;
1000 if (t == 0)
1001 return 0;
1003 type = TREE_TYPE (t);
1004 STRIP_SIGN_NOPS (t);
1006 switch (TREE_CODE (t))
1008 case INTEGER_CST:
1009 tem = fold_negate_const (t, type);
1010 if (! TREE_OVERFLOW (tem)
1011 || TYPE_UNSIGNED (type)
1012 || ! flag_trapv)
1013 return tem;
1014 break;
1016 case REAL_CST:
1017 tem = fold_negate_const (t, type);
1018 /* Two's complement FP formats, such as c4x, may overflow. */
1019 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1020 return fold_convert (type, tem);
1021 break;
1023 case COMPLEX_CST:
1025 tree rpart = negate_expr (TREE_REALPART (t));
1026 tree ipart = negate_expr (TREE_IMAGPART (t));
1028 if ((TREE_CODE (rpart) == REAL_CST
1029 && TREE_CODE (ipart) == REAL_CST)
1030 || (TREE_CODE (rpart) == INTEGER_CST
1031 && TREE_CODE (ipart) == INTEGER_CST))
1032 return build_complex (type, rpart, ipart);
1034 break;
1036 case NEGATE_EXPR:
1037 return fold_convert (type, TREE_OPERAND (t, 0));
1039 case PLUS_EXPR:
1040 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1042 /* -(A + B) -> (-B) - A. */
1043 if (negate_expr_p (TREE_OPERAND (t, 1))
1044 && reorder_operands_p (TREE_OPERAND (t, 0),
1045 TREE_OPERAND (t, 1)))
1047 tem = negate_expr (TREE_OPERAND (t, 1));
1048 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1049 tem, TREE_OPERAND (t, 0)));
1050 return fold_convert (type, tem);
1053 /* -(A + B) -> (-A) - B. */
1054 if (negate_expr_p (TREE_OPERAND (t, 0)))
1056 tem = negate_expr (TREE_OPERAND (t, 0));
1057 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1058 tem, TREE_OPERAND (t, 1)));
1059 return fold_convert (type, tem);
1062 break;
1064 case MINUS_EXPR:
1065 /* - (A - B) -> B - A */
1066 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1067 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1068 return fold_convert (type,
1069 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1070 TREE_OPERAND (t, 1),
1071 TREE_OPERAND (t, 0))));
1072 break;
1074 case MULT_EXPR:
1075 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1076 break;
1078 /* Fall through. */
1080 case RDIV_EXPR:
1081 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1083 tem = TREE_OPERAND (t, 1);
1084 if (negate_expr_p (tem))
1085 return fold_convert (type,
1086 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1087 TREE_OPERAND (t, 0),
1088 negate_expr (tem))));
1089 tem = TREE_OPERAND (t, 0);
1090 if (negate_expr_p (tem))
1091 return fold_convert (type,
1092 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1093 negate_expr (tem),
1094 TREE_OPERAND (t, 1))));
1096 break;
1098 case NOP_EXPR:
1099 /* Convert -((double)float) into (double)(-float). */
1100 if (TREE_CODE (type) == REAL_TYPE)
1102 tem = strip_float_extensions (t);
1103 if (tem != t && negate_expr_p (tem))
1104 return fold_convert (type, negate_expr (tem));
1106 break;
1108 case CALL_EXPR:
1109 /* Negate -f(x) as f(-x). */
1110 if (negate_mathfn_p (builtin_mathfn_code (t))
1111 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1113 tree fndecl, arg, arglist;
1115 fndecl = get_callee_fndecl (t);
1116 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1117 arglist = build_tree_list (NULL_TREE, arg);
1118 return build_function_call_expr (fndecl, arglist);
1120 break;
1122 case RSHIFT_EXPR:
1123 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1124 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1126 tree op1 = TREE_OPERAND (t, 1);
1127 if (TREE_INT_CST_HIGH (op1) == 0
1128 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1129 == TREE_INT_CST_LOW (op1))
1131 tree ntype = TYPE_UNSIGNED (type)
1132 ? lang_hooks.types.signed_type (type)
1133 : lang_hooks.types.unsigned_type (type);
1134 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1135 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1136 return fold_convert (type, temp);
1139 break;
1141 default:
1142 break;
1145 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1146 return fold_convert (type, tem);
1149 /* Split a tree IN into a constant, literal and variable parts that could be
1150 combined with CODE to make IN. "constant" means an expression with
1151 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1152 commutative arithmetic operation. Store the constant part into *CONP,
1153 the literal in *LITP and return the variable part. If a part isn't
1154 present, set it to null. If the tree does not decompose in this way,
1155 return the entire tree as the variable part and the other parts as null.
1157 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1158 case, we negate an operand that was subtracted. Except if it is a
1159 literal for which we use *MINUS_LITP instead.
1161 If NEGATE_P is true, we are negating all of IN, again except a literal
1162 for which we use *MINUS_LITP instead.
1164 If IN is itself a literal or constant, return it as appropriate.
1166 Note that we do not guarantee that any of the three values will be the
1167 same type as IN, but they will have the same signedness and mode. */
1169 static tree
1170 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1171 tree *minus_litp, int negate_p)
1173 tree var = 0;
1175 *conp = 0;
1176 *litp = 0;
1177 *minus_litp = 0;
1179 /* Strip any conversions that don't change the machine mode or signedness. */
1180 STRIP_SIGN_NOPS (in);
1182 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1183 *litp = in;
1184 else if (TREE_CODE (in) == code
1185 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1186 /* We can associate addition and subtraction together (even
1187 though the C standard doesn't say so) for integers because
1188 the value is not affected. For reals, the value might be
1189 affected, so we can't. */
1190 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1191 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1193 tree op0 = TREE_OPERAND (in, 0);
1194 tree op1 = TREE_OPERAND (in, 1);
1195 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1196 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1198 /* First see if either of the operands is a literal, then a constant. */
1199 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1200 *litp = op0, op0 = 0;
1201 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1202 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1204 if (op0 != 0 && TREE_CONSTANT (op0))
1205 *conp = op0, op0 = 0;
1206 else if (op1 != 0 && TREE_CONSTANT (op1))
1207 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1209 /* If we haven't dealt with either operand, this is not a case we can
1210 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1211 if (op0 != 0 && op1 != 0)
1212 var = in;
1213 else if (op0 != 0)
1214 var = op0;
1215 else
1216 var = op1, neg_var_p = neg1_p;
1218 /* Now do any needed negations. */
1219 if (neg_litp_p)
1220 *minus_litp = *litp, *litp = 0;
1221 if (neg_conp_p)
1222 *conp = negate_expr (*conp);
1223 if (neg_var_p)
1224 var = negate_expr (var);
1226 else if (TREE_CONSTANT (in))
1227 *conp = in;
1228 else
1229 var = in;
1231 if (negate_p)
1233 if (*litp)
1234 *minus_litp = *litp, *litp = 0;
1235 else if (*minus_litp)
1236 *litp = *minus_litp, *minus_litp = 0;
1237 *conp = negate_expr (*conp);
1238 var = negate_expr (var);
1241 return var;
1244 /* Re-associate trees split by the above function. T1 and T2 are either
1245 expressions to associate or null. Return the new expression, if any. If
1246 we build an operation, do it in TYPE and with CODE. */
1248 static tree
1249 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1251 if (t1 == 0)
1252 return t2;
1253 else if (t2 == 0)
1254 return t1;
1256 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1257 try to fold this since we will have infinite recursion. But do
1258 deal with any NEGATE_EXPRs. */
1259 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1260 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1262 if (code == PLUS_EXPR)
1264 if (TREE_CODE (t1) == NEGATE_EXPR)
1265 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1266 fold_convert (type, TREE_OPERAND (t1, 0)));
1267 else if (TREE_CODE (t2) == NEGATE_EXPR)
1268 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1269 fold_convert (type, TREE_OPERAND (t2, 0)));
1271 return build2 (code, type, fold_convert (type, t1),
1272 fold_convert (type, t2));
1275 return fold (build2 (code, type, fold_convert (type, t1),
1276 fold_convert (type, t2)));
1279 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1280 to produce a new constant.
1282 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1284 tree
1285 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1287 unsigned HOST_WIDE_INT int1l, int2l;
1288 HOST_WIDE_INT int1h, int2h;
1289 unsigned HOST_WIDE_INT low;
1290 HOST_WIDE_INT hi;
1291 unsigned HOST_WIDE_INT garbagel;
1292 HOST_WIDE_INT garbageh;
1293 tree t;
1294 tree type = TREE_TYPE (arg1);
1295 int uns = TYPE_UNSIGNED (type);
1296 int is_sizetype
1297 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1298 int overflow = 0;
1299 int no_overflow = 0;
1301 int1l = TREE_INT_CST_LOW (arg1);
1302 int1h = TREE_INT_CST_HIGH (arg1);
1303 int2l = TREE_INT_CST_LOW (arg2);
1304 int2h = TREE_INT_CST_HIGH (arg2);
1306 switch (code)
1308 case BIT_IOR_EXPR:
1309 low = int1l | int2l, hi = int1h | int2h;
1310 break;
1312 case BIT_XOR_EXPR:
1313 low = int1l ^ int2l, hi = int1h ^ int2h;
1314 break;
1316 case BIT_AND_EXPR:
1317 low = int1l & int2l, hi = int1h & int2h;
1318 break;
1320 case RSHIFT_EXPR:
1321 int2l = -int2l;
1322 case LSHIFT_EXPR:
1323 /* It's unclear from the C standard whether shifts can overflow.
1324 The following code ignores overflow; perhaps a C standard
1325 interpretation ruling is needed. */
1326 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1327 &low, &hi, !uns);
1328 no_overflow = 1;
1329 break;
1331 case RROTATE_EXPR:
1332 int2l = - int2l;
1333 case LROTATE_EXPR:
1334 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1335 &low, &hi);
1336 break;
1338 case PLUS_EXPR:
1339 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1340 break;
1342 case MINUS_EXPR:
1343 neg_double (int2l, int2h, &low, &hi);
1344 add_double (int1l, int1h, low, hi, &low, &hi);
1345 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1346 break;
1348 case MULT_EXPR:
1349 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1350 break;
1352 case TRUNC_DIV_EXPR:
1353 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1354 case EXACT_DIV_EXPR:
1355 /* This is a shortcut for a common special case. */
1356 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1357 && ! TREE_CONSTANT_OVERFLOW (arg1)
1358 && ! TREE_CONSTANT_OVERFLOW (arg2)
1359 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1361 if (code == CEIL_DIV_EXPR)
1362 int1l += int2l - 1;
1364 low = int1l / int2l, hi = 0;
1365 break;
1368 /* ... fall through ... */
1370 case ROUND_DIV_EXPR:
1371 if (int2h == 0 && int2l == 1)
1373 low = int1l, hi = int1h;
1374 break;
1376 if (int1l == int2l && int1h == int2h
1377 && ! (int1l == 0 && int1h == 0))
1379 low = 1, hi = 0;
1380 break;
1382 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1383 &low, &hi, &garbagel, &garbageh);
1384 break;
1386 case TRUNC_MOD_EXPR:
1387 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1388 /* This is a shortcut for a common special case. */
1389 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1390 && ! TREE_CONSTANT_OVERFLOW (arg1)
1391 && ! TREE_CONSTANT_OVERFLOW (arg2)
1392 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1394 if (code == CEIL_MOD_EXPR)
1395 int1l += int2l - 1;
1396 low = int1l % int2l, hi = 0;
1397 break;
1400 /* ... fall through ... */
1402 case ROUND_MOD_EXPR:
1403 overflow = div_and_round_double (code, uns,
1404 int1l, int1h, int2l, int2h,
1405 &garbagel, &garbageh, &low, &hi);
1406 break;
1408 case MIN_EXPR:
1409 case MAX_EXPR:
1410 if (uns)
1411 low = (((unsigned HOST_WIDE_INT) int1h
1412 < (unsigned HOST_WIDE_INT) int2h)
1413 || (((unsigned HOST_WIDE_INT) int1h
1414 == (unsigned HOST_WIDE_INT) int2h)
1415 && int1l < int2l));
1416 else
1417 low = (int1h < int2h
1418 || (int1h == int2h && int1l < int2l));
1420 if (low == (code == MIN_EXPR))
1421 low = int1l, hi = int1h;
1422 else
1423 low = int2l, hi = int2h;
1424 break;
1426 default:
1427 gcc_unreachable ();
1430 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1432 if (notrunc)
1434 /* Propagate overflow flags ourselves. */
1435 if (((!uns || is_sizetype) && overflow)
1436 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1438 t = copy_node (t);
1439 TREE_OVERFLOW (t) = 1;
1440 TREE_CONSTANT_OVERFLOW (t) = 1;
1442 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1444 t = copy_node (t);
1445 TREE_CONSTANT_OVERFLOW (t) = 1;
1448 else
1449 t = force_fit_type (t, 1,
1450 ((!uns || is_sizetype) && overflow)
1451 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1452 TREE_CONSTANT_OVERFLOW (arg1)
1453 | TREE_CONSTANT_OVERFLOW (arg2));
1455 return t;
1458 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1459 constant. We assume ARG1 and ARG2 have the same data type, or at least
1460 are the same kind of constant and the same machine mode.
1462 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1464 static tree
1465 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1467 STRIP_NOPS (arg1);
1468 STRIP_NOPS (arg2);
1470 if (TREE_CODE (arg1) == INTEGER_CST)
1471 return int_const_binop (code, arg1, arg2, notrunc);
1473 if (TREE_CODE (arg1) == REAL_CST)
1475 enum machine_mode mode;
1476 REAL_VALUE_TYPE d1;
1477 REAL_VALUE_TYPE d2;
1478 REAL_VALUE_TYPE value;
1479 tree t, type;
1481 d1 = TREE_REAL_CST (arg1);
1482 d2 = TREE_REAL_CST (arg2);
1484 type = TREE_TYPE (arg1);
1485 mode = TYPE_MODE (type);
1487 /* Don't perform operation if we honor signaling NaNs and
1488 either operand is a NaN. */
1489 if (HONOR_SNANS (mode)
1490 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1491 return NULL_TREE;
1493 /* Don't perform operation if it would raise a division
1494 by zero exception. */
1495 if (code == RDIV_EXPR
1496 && REAL_VALUES_EQUAL (d2, dconst0)
1497 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1498 return NULL_TREE;
1500 /* If either operand is a NaN, just return it. Otherwise, set up
1501 for floating-point trap; we return an overflow. */
1502 if (REAL_VALUE_ISNAN (d1))
1503 return arg1;
1504 else if (REAL_VALUE_ISNAN (d2))
1505 return arg2;
1507 REAL_ARITHMETIC (value, code, d1, d2);
1509 t = build_real (type, real_value_truncate (mode, value));
1511 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1512 TREE_CONSTANT_OVERFLOW (t)
1513 = TREE_OVERFLOW (t)
1514 | TREE_CONSTANT_OVERFLOW (arg1)
1515 | TREE_CONSTANT_OVERFLOW (arg2);
1516 return t;
1518 if (TREE_CODE (arg1) == COMPLEX_CST)
1520 tree type = TREE_TYPE (arg1);
1521 tree r1 = TREE_REALPART (arg1);
1522 tree i1 = TREE_IMAGPART (arg1);
1523 tree r2 = TREE_REALPART (arg2);
1524 tree i2 = TREE_IMAGPART (arg2);
1525 tree t;
1527 switch (code)
1529 case PLUS_EXPR:
1530 t = build_complex (type,
1531 const_binop (PLUS_EXPR, r1, r2, notrunc),
1532 const_binop (PLUS_EXPR, i1, i2, notrunc));
1533 break;
1535 case MINUS_EXPR:
1536 t = build_complex (type,
1537 const_binop (MINUS_EXPR, r1, r2, notrunc),
1538 const_binop (MINUS_EXPR, i1, i2, notrunc));
1539 break;
1541 case MULT_EXPR:
1542 t = build_complex (type,
1543 const_binop (MINUS_EXPR,
1544 const_binop (MULT_EXPR,
1545 r1, r2, notrunc),
1546 const_binop (MULT_EXPR,
1547 i1, i2, notrunc),
1548 notrunc),
1549 const_binop (PLUS_EXPR,
1550 const_binop (MULT_EXPR,
1551 r1, i2, notrunc),
1552 const_binop (MULT_EXPR,
1553 i1, r2, notrunc),
1554 notrunc));
1555 break;
1557 case RDIV_EXPR:
1559 tree magsquared
1560 = const_binop (PLUS_EXPR,
1561 const_binop (MULT_EXPR, r2, r2, notrunc),
1562 const_binop (MULT_EXPR, i2, i2, notrunc),
1563 notrunc);
1565 t = build_complex (type,
1566 const_binop
1567 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1568 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1569 const_binop (PLUS_EXPR,
1570 const_binop (MULT_EXPR, r1, r2,
1571 notrunc),
1572 const_binop (MULT_EXPR, i1, i2,
1573 notrunc),
1574 notrunc),
1575 magsquared, notrunc),
1576 const_binop
1577 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1578 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1579 const_binop (MINUS_EXPR,
1580 const_binop (MULT_EXPR, i1, r2,
1581 notrunc),
1582 const_binop (MULT_EXPR, r1, i2,
1583 notrunc),
1584 notrunc),
1585 magsquared, notrunc));
1587 break;
1589 default:
1590 gcc_unreachable ();
1592 return t;
1594 return 0;
1597 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1598 indicates which particular sizetype to create. */
1600 tree
1601 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1603 return build_int_cst (sizetype_tab[(int) kind], number);
1606 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1607 is a tree code. The type of the result is taken from the operands.
1608 Both must be the same type integer type and it must be a size type.
1609 If the operands are constant, so is the result. */
1611 tree
1612 size_binop (enum tree_code code, tree arg0, tree arg1)
1614 tree type = TREE_TYPE (arg0);
1616 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1617 && type == TREE_TYPE (arg1));
1619 /* Handle the special case of two integer constants faster. */
1620 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1622 /* And some specific cases even faster than that. */
1623 if (code == PLUS_EXPR && integer_zerop (arg0))
1624 return arg1;
1625 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1626 && integer_zerop (arg1))
1627 return arg0;
1628 else if (code == MULT_EXPR && integer_onep (arg0))
1629 return arg1;
1631 /* Handle general case of two integer constants. */
1632 return int_const_binop (code, arg0, arg1, 0);
1635 if (arg0 == error_mark_node || arg1 == error_mark_node)
1636 return error_mark_node;
1638 return fold (build2 (code, type, arg0, arg1));
1641 /* Given two values, either both of sizetype or both of bitsizetype,
1642 compute the difference between the two values. Return the value
1643 in signed type corresponding to the type of the operands. */
1645 tree
1646 size_diffop (tree arg0, tree arg1)
1648 tree type = TREE_TYPE (arg0);
1649 tree ctype;
1651 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1652 && type == TREE_TYPE (arg1));
1654 /* If the type is already signed, just do the simple thing. */
1655 if (!TYPE_UNSIGNED (type))
1656 return size_binop (MINUS_EXPR, arg0, arg1);
1658 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1660 /* If either operand is not a constant, do the conversions to the signed
1661 type and subtract. The hardware will do the right thing with any
1662 overflow in the subtraction. */
1663 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1664 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1665 fold_convert (ctype, arg1));
1667 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1668 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1669 overflow) and negate (which can't either). Special-case a result
1670 of zero while we're here. */
1671 if (tree_int_cst_equal (arg0, arg1))
1672 return fold_convert (ctype, integer_zero_node);
1673 else if (tree_int_cst_lt (arg1, arg0))
1674 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1675 else
1676 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1677 fold_convert (ctype, size_binop (MINUS_EXPR,
1678 arg1, arg0)));
1681 /* Construct a vector of zero elements of vector type TYPE. */
1683 static tree
1684 build_zero_vector (tree type)
1686 tree elem, list;
1687 int i, units;
1689 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1690 units = TYPE_VECTOR_SUBPARTS (type);
1692 list = NULL_TREE;
1693 for (i = 0; i < units; i++)
1694 list = tree_cons (NULL_TREE, elem, list);
1695 return build_vector (type, list);
1699 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1700 type TYPE. If no simplification can be done return NULL_TREE. */
1702 static tree
1703 fold_convert_const (enum tree_code code, tree type, tree arg1)
1705 int overflow = 0;
1706 tree t;
1708 if (TREE_TYPE (arg1) == type)
1709 return arg1;
1711 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1713 if (TREE_CODE (arg1) == INTEGER_CST)
1715 /* If we would build a constant wider than GCC supports,
1716 leave the conversion unfolded. */
1717 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1718 return NULL_TREE;
1720 /* Given an integer constant, make new constant with new type,
1721 appropriately sign-extended or truncated. */
1722 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1723 TREE_INT_CST_HIGH (arg1));
1725 t = force_fit_type (t,
1726 /* Don't set the overflow when
1727 converting a pointer */
1728 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1729 (TREE_INT_CST_HIGH (arg1) < 0
1730 && (TYPE_UNSIGNED (type)
1731 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1732 | TREE_OVERFLOW (arg1),
1733 TREE_CONSTANT_OVERFLOW (arg1));
1734 return t;
1736 else if (TREE_CODE (arg1) == REAL_CST)
1738 /* The following code implements the floating point to integer
1739 conversion rules required by the Java Language Specification,
1740 that IEEE NaNs are mapped to zero and values that overflow
1741 the target precision saturate, i.e. values greater than
1742 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1743 are mapped to INT_MIN. These semantics are allowed by the
1744 C and C++ standards that simply state that the behavior of
1745 FP-to-integer conversion is unspecified upon overflow. */
1747 HOST_WIDE_INT high, low;
1748 REAL_VALUE_TYPE r;
1749 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1751 switch (code)
1753 case FIX_TRUNC_EXPR:
1754 real_trunc (&r, VOIDmode, &x);
1755 break;
1757 case FIX_CEIL_EXPR:
1758 real_ceil (&r, VOIDmode, &x);
1759 break;
1761 case FIX_FLOOR_EXPR:
1762 real_floor (&r, VOIDmode, &x);
1763 break;
1765 case FIX_ROUND_EXPR:
1766 real_round (&r, VOIDmode, &x);
1767 break;
1769 default:
1770 gcc_unreachable ();
1773 /* If R is NaN, return zero and show we have an overflow. */
1774 if (REAL_VALUE_ISNAN (r))
1776 overflow = 1;
1777 high = 0;
1778 low = 0;
1781 /* See if R is less than the lower bound or greater than the
1782 upper bound. */
1784 if (! overflow)
1786 tree lt = TYPE_MIN_VALUE (type);
1787 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1788 if (REAL_VALUES_LESS (r, l))
1790 overflow = 1;
1791 high = TREE_INT_CST_HIGH (lt);
1792 low = TREE_INT_CST_LOW (lt);
1796 if (! overflow)
1798 tree ut = TYPE_MAX_VALUE (type);
1799 if (ut)
1801 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1802 if (REAL_VALUES_LESS (u, r))
1804 overflow = 1;
1805 high = TREE_INT_CST_HIGH (ut);
1806 low = TREE_INT_CST_LOW (ut);
1811 if (! overflow)
1812 REAL_VALUE_TO_INT (&low, &high, r);
1814 t = build_int_cst_wide (type, low, high);
1816 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1817 TREE_CONSTANT_OVERFLOW (arg1));
1818 return t;
1821 else if (TREE_CODE (type) == REAL_TYPE)
1823 if (TREE_CODE (arg1) == INTEGER_CST)
1824 return build_real_from_int_cst (type, arg1);
1825 if (TREE_CODE (arg1) == REAL_CST)
1827 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1829 /* We make a copy of ARG1 so that we don't modify an
1830 existing constant tree. */
1831 t = copy_node (arg1);
1832 TREE_TYPE (t) = type;
1833 return t;
1836 t = build_real (type,
1837 real_value_truncate (TYPE_MODE (type),
1838 TREE_REAL_CST (arg1)));
1840 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1841 TREE_CONSTANT_OVERFLOW (t)
1842 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1843 return t;
1846 return NULL_TREE;
1849 /* Convert expression ARG to type TYPE. Used by the middle-end for
1850 simple conversions in preference to calling the front-end's convert. */
1852 tree
1853 fold_convert (tree type, tree arg)
1855 tree orig = TREE_TYPE (arg);
1856 tree tem;
1858 if (type == orig)
1859 return arg;
1861 if (TREE_CODE (arg) == ERROR_MARK
1862 || TREE_CODE (type) == ERROR_MARK
1863 || TREE_CODE (orig) == ERROR_MARK)
1864 return error_mark_node;
1866 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1867 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1868 TYPE_MAIN_VARIANT (orig)))
1869 return fold (build1 (NOP_EXPR, type, arg));
1871 switch (TREE_CODE (type))
1873 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1874 case POINTER_TYPE: case REFERENCE_TYPE:
1875 case OFFSET_TYPE:
1876 if (TREE_CODE (arg) == INTEGER_CST)
1878 tem = fold_convert_const (NOP_EXPR, type, arg);
1879 if (tem != NULL_TREE)
1880 return tem;
1882 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1883 || TREE_CODE (orig) == OFFSET_TYPE)
1884 return fold (build1 (NOP_EXPR, type, arg));
1885 if (TREE_CODE (orig) == COMPLEX_TYPE)
1887 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1888 return fold_convert (type, tem);
1890 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1891 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1892 return fold (build1 (NOP_EXPR, type, arg));
1894 case REAL_TYPE:
1895 if (TREE_CODE (arg) == INTEGER_CST)
1897 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1898 if (tem != NULL_TREE)
1899 return tem;
1901 else if (TREE_CODE (arg) == REAL_CST)
1903 tem = fold_convert_const (NOP_EXPR, type, arg);
1904 if (tem != NULL_TREE)
1905 return tem;
1908 switch (TREE_CODE (orig))
1910 case INTEGER_TYPE: case CHAR_TYPE:
1911 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1912 case POINTER_TYPE: case REFERENCE_TYPE:
1913 return fold (build1 (FLOAT_EXPR, type, arg));
1915 case REAL_TYPE:
1916 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1917 type, arg));
1919 case COMPLEX_TYPE:
1920 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1921 return fold_convert (type, tem);
1923 default:
1924 gcc_unreachable ();
1927 case COMPLEX_TYPE:
1928 switch (TREE_CODE (orig))
1930 case INTEGER_TYPE: case CHAR_TYPE:
1931 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1932 case POINTER_TYPE: case REFERENCE_TYPE:
1933 case REAL_TYPE:
1934 return build2 (COMPLEX_EXPR, type,
1935 fold_convert (TREE_TYPE (type), arg),
1936 fold_convert (TREE_TYPE (type), integer_zero_node));
1937 case COMPLEX_TYPE:
1939 tree rpart, ipart;
1941 if (TREE_CODE (arg) == COMPLEX_EXPR)
1943 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1944 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1945 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1948 arg = save_expr (arg);
1949 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1950 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1951 rpart = fold_convert (TREE_TYPE (type), rpart);
1952 ipart = fold_convert (TREE_TYPE (type), ipart);
1953 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1956 default:
1957 gcc_unreachable ();
1960 case VECTOR_TYPE:
1961 if (integer_zerop (arg))
1962 return build_zero_vector (type);
1963 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1964 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1965 || TREE_CODE (orig) == VECTOR_TYPE);
1966 return fold (build1 (NOP_EXPR, type, arg));
1968 case VOID_TYPE:
1969 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
1971 default:
1972 gcc_unreachable ();
1976 /* Return an expr equal to X but certainly not valid as an lvalue. */
1978 tree
1979 non_lvalue (tree x)
1981 /* We only need to wrap lvalue tree codes. */
1982 switch (TREE_CODE (x))
1984 case VAR_DECL:
1985 case PARM_DECL:
1986 case RESULT_DECL:
1987 case LABEL_DECL:
1988 case FUNCTION_DECL:
1989 case SSA_NAME:
1991 case COMPONENT_REF:
1992 case INDIRECT_REF:
1993 case ARRAY_REF:
1994 case ARRAY_RANGE_REF:
1995 case BIT_FIELD_REF:
1996 case OBJ_TYPE_REF:
1998 case REALPART_EXPR:
1999 case IMAGPART_EXPR:
2000 case PREINCREMENT_EXPR:
2001 case PREDECREMENT_EXPR:
2002 case SAVE_EXPR:
2003 case TRY_CATCH_EXPR:
2004 case WITH_CLEANUP_EXPR:
2005 case COMPOUND_EXPR:
2006 case MODIFY_EXPR:
2007 case TARGET_EXPR:
2008 case COND_EXPR:
2009 case BIND_EXPR:
2010 case MIN_EXPR:
2011 case MAX_EXPR:
2012 break;
2014 default:
2015 /* Assume the worst for front-end tree codes. */
2016 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2017 break;
2018 return x;
2020 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2023 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2024 Zero means allow extended lvalues. */
2026 int pedantic_lvalues;
2028 /* When pedantic, return an expr equal to X but certainly not valid as a
2029 pedantic lvalue. Otherwise, return X. */
2031 tree
2032 pedantic_non_lvalue (tree x)
2034 if (pedantic_lvalues)
2035 return non_lvalue (x);
2036 else
2037 return x;
2040 /* Given a tree comparison code, return the code that is the logical inverse
2041 of the given code. It is not safe to do this for floating-point
2042 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2043 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2045 static enum tree_code
2046 invert_tree_comparison (enum tree_code code, bool honor_nans)
2048 if (honor_nans && flag_trapping_math)
2049 return ERROR_MARK;
2051 switch (code)
2053 case EQ_EXPR:
2054 return NE_EXPR;
2055 case NE_EXPR:
2056 return EQ_EXPR;
2057 case GT_EXPR:
2058 return honor_nans ? UNLE_EXPR : LE_EXPR;
2059 case GE_EXPR:
2060 return honor_nans ? UNLT_EXPR : LT_EXPR;
2061 case LT_EXPR:
2062 return honor_nans ? UNGE_EXPR : GE_EXPR;
2063 case LE_EXPR:
2064 return honor_nans ? UNGT_EXPR : GT_EXPR;
2065 case LTGT_EXPR:
2066 return UNEQ_EXPR;
2067 case UNEQ_EXPR:
2068 return LTGT_EXPR;
2069 case UNGT_EXPR:
2070 return LE_EXPR;
2071 case UNGE_EXPR:
2072 return LT_EXPR;
2073 case UNLT_EXPR:
2074 return GE_EXPR;
2075 case UNLE_EXPR:
2076 return GT_EXPR;
2077 case ORDERED_EXPR:
2078 return UNORDERED_EXPR;
2079 case UNORDERED_EXPR:
2080 return ORDERED_EXPR;
2081 default:
2082 gcc_unreachable ();
2086 /* Similar, but return the comparison that results if the operands are
2087 swapped. This is safe for floating-point. */
2089 enum tree_code
2090 swap_tree_comparison (enum tree_code code)
2092 switch (code)
2094 case EQ_EXPR:
2095 case NE_EXPR:
2096 return code;
2097 case GT_EXPR:
2098 return LT_EXPR;
2099 case GE_EXPR:
2100 return LE_EXPR;
2101 case LT_EXPR:
2102 return GT_EXPR;
2103 case LE_EXPR:
2104 return GE_EXPR;
2105 default:
2106 gcc_unreachable ();
2111 /* Convert a comparison tree code from an enum tree_code representation
2112 into a compcode bit-based encoding. This function is the inverse of
2113 compcode_to_comparison. */
2115 static enum comparison_code
2116 comparison_to_compcode (enum tree_code code)
2118 switch (code)
2120 case LT_EXPR:
2121 return COMPCODE_LT;
2122 case EQ_EXPR:
2123 return COMPCODE_EQ;
2124 case LE_EXPR:
2125 return COMPCODE_LE;
2126 case GT_EXPR:
2127 return COMPCODE_GT;
2128 case NE_EXPR:
2129 return COMPCODE_NE;
2130 case GE_EXPR:
2131 return COMPCODE_GE;
2132 case ORDERED_EXPR:
2133 return COMPCODE_ORD;
2134 case UNORDERED_EXPR:
2135 return COMPCODE_UNORD;
2136 case UNLT_EXPR:
2137 return COMPCODE_UNLT;
2138 case UNEQ_EXPR:
2139 return COMPCODE_UNEQ;
2140 case UNLE_EXPR:
2141 return COMPCODE_UNLE;
2142 case UNGT_EXPR:
2143 return COMPCODE_UNGT;
2144 case LTGT_EXPR:
2145 return COMPCODE_LTGT;
2146 case UNGE_EXPR:
2147 return COMPCODE_UNGE;
2148 default:
2149 gcc_unreachable ();
2153 /* Convert a compcode bit-based encoding of a comparison operator back
2154 to GCC's enum tree_code representation. This function is the
2155 inverse of comparison_to_compcode. */
2157 static enum tree_code
2158 compcode_to_comparison (enum comparison_code code)
2160 switch (code)
2162 case COMPCODE_LT:
2163 return LT_EXPR;
2164 case COMPCODE_EQ:
2165 return EQ_EXPR;
2166 case COMPCODE_LE:
2167 return LE_EXPR;
2168 case COMPCODE_GT:
2169 return GT_EXPR;
2170 case COMPCODE_NE:
2171 return NE_EXPR;
2172 case COMPCODE_GE:
2173 return GE_EXPR;
2174 case COMPCODE_ORD:
2175 return ORDERED_EXPR;
2176 case COMPCODE_UNORD:
2177 return UNORDERED_EXPR;
2178 case COMPCODE_UNLT:
2179 return UNLT_EXPR;
2180 case COMPCODE_UNEQ:
2181 return UNEQ_EXPR;
2182 case COMPCODE_UNLE:
2183 return UNLE_EXPR;
2184 case COMPCODE_UNGT:
2185 return UNGT_EXPR;
2186 case COMPCODE_LTGT:
2187 return LTGT_EXPR;
2188 case COMPCODE_UNGE:
2189 return UNGE_EXPR;
2190 default:
2191 gcc_unreachable ();
2195 /* Return a tree for the comparison which is the combination of
2196 doing the AND or OR (depending on CODE) of the two operations LCODE
2197 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2198 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2199 if this makes the transformation invalid. */
2201 tree
2202 combine_comparisons (enum tree_code code, enum tree_code lcode,
2203 enum tree_code rcode, tree truth_type,
2204 tree ll_arg, tree lr_arg)
2206 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2207 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2208 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2209 enum comparison_code compcode;
2211 switch (code)
2213 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2214 compcode = lcompcode & rcompcode;
2215 break;
2217 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2218 compcode = lcompcode | rcompcode;
2219 break;
2221 default:
2222 return NULL_TREE;
2225 if (!honor_nans)
2227 /* Eliminate unordered comparisons, as well as LTGT and ORD
2228 which are not used unless the mode has NaNs. */
2229 compcode &= ~COMPCODE_UNORD;
2230 if (compcode == COMPCODE_LTGT)
2231 compcode = COMPCODE_NE;
2232 else if (compcode == COMPCODE_ORD)
2233 compcode = COMPCODE_TRUE;
2235 else if (flag_trapping_math)
2237 /* Check that the original operation and the optimized ones will trap
2238 under the same condition. */
2239 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2240 && (lcompcode != COMPCODE_EQ)
2241 && (lcompcode != COMPCODE_ORD);
2242 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2243 && (rcompcode != COMPCODE_EQ)
2244 && (rcompcode != COMPCODE_ORD);
2245 bool trap = (compcode & COMPCODE_UNORD) == 0
2246 && (compcode != COMPCODE_EQ)
2247 && (compcode != COMPCODE_ORD);
2249 /* In a short-circuited boolean expression the LHS might be
2250 such that the RHS, if evaluated, will never trap. For
2251 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2252 if neither x nor y is NaN. (This is a mixed blessing: for
2253 example, the expression above will never trap, hence
2254 optimizing it to x < y would be invalid). */
2255 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2256 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2257 rtrap = false;
2259 /* If the comparison was short-circuited, and only the RHS
2260 trapped, we may now generate a spurious trap. */
2261 if (rtrap && !ltrap
2262 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2263 return NULL_TREE;
2265 /* If we changed the conditions that cause a trap, we lose. */
2266 if ((ltrap || rtrap) != trap)
2267 return NULL_TREE;
2270 if (compcode == COMPCODE_TRUE)
2271 return constant_boolean_node (true, truth_type);
2272 else if (compcode == COMPCODE_FALSE)
2273 return constant_boolean_node (false, truth_type);
2274 else
2275 return fold (build2 (compcode_to_comparison (compcode),
2276 truth_type, ll_arg, lr_arg));
2279 /* Return nonzero if CODE is a tree code that represents a truth value. */
2281 static int
2282 truth_value_p (enum tree_code code)
2284 return (TREE_CODE_CLASS (code) == tcc_comparison
2285 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2286 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2287 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2290 /* Return nonzero if two operands (typically of the same tree node)
2291 are necessarily equal. If either argument has side-effects this
2292 function returns zero. FLAGS modifies behavior as follows:
2294 If OEP_ONLY_CONST is set, only return nonzero for constants.
2295 This function tests whether the operands are indistinguishable;
2296 it does not test whether they are equal using C's == operation.
2297 The distinction is important for IEEE floating point, because
2298 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2299 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2301 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2302 even though it may hold multiple values during a function.
2303 This is because a GCC tree node guarantees that nothing else is
2304 executed between the evaluation of its "operands" (which may often
2305 be evaluated in arbitrary order). Hence if the operands themselves
2306 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2307 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2308 unset means assuming isochronic (or instantaneous) tree equivalence.
2309 Unless comparing arbitrary expression trees, such as from different
2310 statements, this flag can usually be left unset.
2312 If OEP_PURE_SAME is set, then pure functions with identical arguments
2313 are considered the same. It is used when the caller has other ways
2314 to ensure that global memory is unchanged in between. */
2317 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2319 /* If one is specified and the other isn't, they aren't equal and if
2320 neither is specified, they are.
2322 ??? This is temporary and is meant only to handle the cases of the
2323 optional operands for COMPONENT_REF and ARRAY_REF. */
2324 if ((arg0 && !arg1) || (!arg0 && arg1))
2325 return 0;
2326 else if (!arg0 && !arg1)
2327 return 1;
2328 /* If either is ERROR_MARK, they aren't equal. */
2329 else if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2330 return 0;
2332 /* If both types don't have the same signedness, then we can't consider
2333 them equal. We must check this before the STRIP_NOPS calls
2334 because they may change the signedness of the arguments. */
2335 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2336 return 0;
2338 STRIP_NOPS (arg0);
2339 STRIP_NOPS (arg1);
2341 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2342 /* This is needed for conversions and for COMPONENT_REF.
2343 Might as well play it safe and always test this. */
2344 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2345 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2346 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2347 return 0;
2349 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2350 We don't care about side effects in that case because the SAVE_EXPR
2351 takes care of that for us. In all other cases, two expressions are
2352 equal if they have no side effects. If we have two identical
2353 expressions with side effects that should be treated the same due
2354 to the only side effects being identical SAVE_EXPR's, that will
2355 be detected in the recursive calls below. */
2356 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2357 && (TREE_CODE (arg0) == SAVE_EXPR
2358 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2359 return 1;
2361 /* Next handle constant cases, those for which we can return 1 even
2362 if ONLY_CONST is set. */
2363 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2364 switch (TREE_CODE (arg0))
2366 case INTEGER_CST:
2367 return (! TREE_CONSTANT_OVERFLOW (arg0)
2368 && ! TREE_CONSTANT_OVERFLOW (arg1)
2369 && tree_int_cst_equal (arg0, arg1));
2371 case REAL_CST:
2372 return (! TREE_CONSTANT_OVERFLOW (arg0)
2373 && ! TREE_CONSTANT_OVERFLOW (arg1)
2374 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2375 TREE_REAL_CST (arg1)));
2377 case VECTOR_CST:
2379 tree v1, v2;
2381 if (TREE_CONSTANT_OVERFLOW (arg0)
2382 || TREE_CONSTANT_OVERFLOW (arg1))
2383 return 0;
2385 v1 = TREE_VECTOR_CST_ELTS (arg0);
2386 v2 = TREE_VECTOR_CST_ELTS (arg1);
2387 while (v1 && v2)
2389 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2390 flags))
2391 return 0;
2392 v1 = TREE_CHAIN (v1);
2393 v2 = TREE_CHAIN (v2);
2396 return 1;
2399 case COMPLEX_CST:
2400 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2401 flags)
2402 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2403 flags));
2405 case STRING_CST:
2406 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2407 && ! memcmp (TREE_STRING_POINTER (arg0),
2408 TREE_STRING_POINTER (arg1),
2409 TREE_STRING_LENGTH (arg0)));
2411 case ADDR_EXPR:
2412 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2414 default:
2415 break;
2418 if (flags & OEP_ONLY_CONST)
2419 return 0;
2421 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2423 case tcc_unary:
2424 /* Two conversions are equal only if signedness and modes match. */
2425 switch (TREE_CODE (arg0))
2427 case NOP_EXPR:
2428 case CONVERT_EXPR:
2429 case FIX_CEIL_EXPR:
2430 case FIX_TRUNC_EXPR:
2431 case FIX_FLOOR_EXPR:
2432 case FIX_ROUND_EXPR:
2433 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2434 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2435 return 0;
2436 break;
2437 default:
2438 break;
2441 return operand_equal_p (TREE_OPERAND (arg0, 0),
2442 TREE_OPERAND (arg1, 0), flags);
2444 case tcc_comparison:
2445 case tcc_binary:
2446 if (operand_equal_p (TREE_OPERAND (arg0, 0),
2447 TREE_OPERAND (arg1, 0), flags)
2448 && operand_equal_p (TREE_OPERAND (arg0, 1),
2449 TREE_OPERAND (arg1, 1), flags))
2450 return 1;
2452 /* For commutative ops, allow the other order. */
2453 return (commutative_tree_code (TREE_CODE (arg0))
2454 && operand_equal_p (TREE_OPERAND (arg0, 0),
2455 TREE_OPERAND (arg1, 1), flags)
2456 && operand_equal_p (TREE_OPERAND (arg0, 1),
2457 TREE_OPERAND (arg1, 0), flags));
2459 case tcc_reference:
2460 /* If either of the pointer (or reference) expressions we are
2461 dereferencing contain a side effect, these cannot be equal. */
2462 if (TREE_SIDE_EFFECTS (arg0)
2463 || TREE_SIDE_EFFECTS (arg1))
2464 return 0;
2466 switch (TREE_CODE (arg0))
2468 case INDIRECT_REF:
2469 case REALPART_EXPR:
2470 case IMAGPART_EXPR:
2471 return operand_equal_p (TREE_OPERAND (arg0, 0),
2472 TREE_OPERAND (arg1, 0), flags);
2474 case ARRAY_REF:
2475 case ARRAY_RANGE_REF:
2476 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2477 TREE_OPERAND (arg1, 0), flags)
2478 && operand_equal_p (TREE_OPERAND (arg0, 1),
2479 TREE_OPERAND (arg1, 1), flags)
2480 && operand_equal_p (TREE_OPERAND (arg0, 2),
2481 TREE_OPERAND (arg1, 2), flags)
2482 && operand_equal_p (TREE_OPERAND (arg0, 3),
2483 TREE_OPERAND (arg1, 3), flags));
2486 case COMPONENT_REF:
2487 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2488 TREE_OPERAND (arg1, 0), flags)
2489 && operand_equal_p (TREE_OPERAND (arg0, 1),
2490 TREE_OPERAND (arg1, 1), flags)
2491 && operand_equal_p (TREE_OPERAND (arg0, 2),
2492 TREE_OPERAND (arg1, 2), flags));
2495 case BIT_FIELD_REF:
2496 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2497 TREE_OPERAND (arg1, 0), flags)
2498 && operand_equal_p (TREE_OPERAND (arg0, 1),
2499 TREE_OPERAND (arg1, 1), flags)
2500 && operand_equal_p (TREE_OPERAND (arg0, 2),
2501 TREE_OPERAND (arg1, 2), flags));
2502 default:
2503 return 0;
2506 case tcc_expression:
2507 switch (TREE_CODE (arg0))
2509 case ADDR_EXPR:
2510 case TRUTH_NOT_EXPR:
2511 return operand_equal_p (TREE_OPERAND (arg0, 0),
2512 TREE_OPERAND (arg1, 0), flags);
2514 case TRUTH_ANDIF_EXPR:
2515 case TRUTH_ORIF_EXPR:
2516 return operand_equal_p (TREE_OPERAND (arg0, 0),
2517 TREE_OPERAND (arg1, 0), flags)
2518 && operand_equal_p (TREE_OPERAND (arg0, 1),
2519 TREE_OPERAND (arg1, 1), flags);
2521 case TRUTH_AND_EXPR:
2522 case TRUTH_OR_EXPR:
2523 case TRUTH_XOR_EXPR:
2524 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2525 TREE_OPERAND (arg1, 0), flags)
2526 && operand_equal_p (TREE_OPERAND (arg0, 1),
2527 TREE_OPERAND (arg1, 1), flags))
2528 || (operand_equal_p (TREE_OPERAND (arg0, 0),
2529 TREE_OPERAND (arg1, 1), flags)
2530 && operand_equal_p (TREE_OPERAND (arg0, 1),
2531 TREE_OPERAND (arg1, 0), flags));
2533 case CALL_EXPR:
2534 /* If the CALL_EXPRs call different functions, then they
2535 clearly can not be equal. */
2536 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2537 TREE_OPERAND (arg1, 0), flags))
2538 return 0;
2541 unsigned int cef = call_expr_flags (arg0);
2542 if (flags & OEP_PURE_SAME)
2543 cef &= ECF_CONST | ECF_PURE;
2544 else
2545 cef &= ECF_CONST;
2546 if (!cef)
2547 return 0;
2550 /* Now see if all the arguments are the same. operand_equal_p
2551 does not handle TREE_LIST, so we walk the operands here
2552 feeding them to operand_equal_p. */
2553 arg0 = TREE_OPERAND (arg0, 1);
2554 arg1 = TREE_OPERAND (arg1, 1);
2555 while (arg0 && arg1)
2557 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2558 flags))
2559 return 0;
2561 arg0 = TREE_CHAIN (arg0);
2562 arg1 = TREE_CHAIN (arg1);
2565 /* If we get here and both argument lists are exhausted
2566 then the CALL_EXPRs are equal. */
2567 return ! (arg0 || arg1);
2569 default:
2570 return 0;
2573 case tcc_declaration:
2574 /* Consider __builtin_sqrt equal to sqrt. */
2575 return (TREE_CODE (arg0) == FUNCTION_DECL
2576 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2577 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2578 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2580 default:
2581 return 0;
2585 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2586 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2588 When in doubt, return 0. */
2590 static int
2591 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2593 int unsignedp1, unsignedpo;
2594 tree primarg0, primarg1, primother;
2595 unsigned int correct_width;
2597 if (operand_equal_p (arg0, arg1, 0))
2598 return 1;
2600 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2601 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2602 return 0;
2604 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2605 and see if the inner values are the same. This removes any
2606 signedness comparison, which doesn't matter here. */
2607 primarg0 = arg0, primarg1 = arg1;
2608 STRIP_NOPS (primarg0);
2609 STRIP_NOPS (primarg1);
2610 if (operand_equal_p (primarg0, primarg1, 0))
2611 return 1;
2613 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2614 actual comparison operand, ARG0.
2616 First throw away any conversions to wider types
2617 already present in the operands. */
2619 primarg1 = get_narrower (arg1, &unsignedp1);
2620 primother = get_narrower (other, &unsignedpo);
2622 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2623 if (unsignedp1 == unsignedpo
2624 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2625 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2627 tree type = TREE_TYPE (arg0);
2629 /* Make sure shorter operand is extended the right way
2630 to match the longer operand. */
2631 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2632 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2634 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2635 return 1;
2638 return 0;
2641 /* See if ARG is an expression that is either a comparison or is performing
2642 arithmetic on comparisons. The comparisons must only be comparing
2643 two different values, which will be stored in *CVAL1 and *CVAL2; if
2644 they are nonzero it means that some operands have already been found.
2645 No variables may be used anywhere else in the expression except in the
2646 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2647 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2649 If this is true, return 1. Otherwise, return zero. */
2651 static int
2652 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2654 enum tree_code code = TREE_CODE (arg);
2655 enum tree_code_class class = TREE_CODE_CLASS (code);
2657 /* We can handle some of the tcc_expression cases here. */
2658 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2659 class = tcc_unary;
2660 else if (class == tcc_expression
2661 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2662 || code == COMPOUND_EXPR))
2663 class = tcc_binary;
2665 else if (class == tcc_expression && code == SAVE_EXPR
2666 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2668 /* If we've already found a CVAL1 or CVAL2, this expression is
2669 two complex to handle. */
2670 if (*cval1 || *cval2)
2671 return 0;
2673 class = tcc_unary;
2674 *save_p = 1;
2677 switch (class)
2679 case tcc_unary:
2680 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2682 case tcc_binary:
2683 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2684 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2685 cval1, cval2, save_p));
2687 case tcc_constant:
2688 return 1;
2690 case tcc_expression:
2691 if (code == COND_EXPR)
2692 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2693 cval1, cval2, save_p)
2694 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2695 cval1, cval2, save_p)
2696 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2697 cval1, cval2, save_p));
2698 return 0;
2700 case tcc_comparison:
2701 /* First see if we can handle the first operand, then the second. For
2702 the second operand, we know *CVAL1 can't be zero. It must be that
2703 one side of the comparison is each of the values; test for the
2704 case where this isn't true by failing if the two operands
2705 are the same. */
2707 if (operand_equal_p (TREE_OPERAND (arg, 0),
2708 TREE_OPERAND (arg, 1), 0))
2709 return 0;
2711 if (*cval1 == 0)
2712 *cval1 = TREE_OPERAND (arg, 0);
2713 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2715 else if (*cval2 == 0)
2716 *cval2 = TREE_OPERAND (arg, 0);
2717 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2719 else
2720 return 0;
2722 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2724 else if (*cval2 == 0)
2725 *cval2 = TREE_OPERAND (arg, 1);
2726 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2728 else
2729 return 0;
2731 return 1;
2733 default:
2734 return 0;
2738 /* ARG is a tree that is known to contain just arithmetic operations and
2739 comparisons. Evaluate the operations in the tree substituting NEW0 for
2740 any occurrence of OLD0 as an operand of a comparison and likewise for
2741 NEW1 and OLD1. */
2743 static tree
2744 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2746 tree type = TREE_TYPE (arg);
2747 enum tree_code code = TREE_CODE (arg);
2748 enum tree_code_class class = TREE_CODE_CLASS (code);
2750 /* We can handle some of the tcc_expression cases here. */
2751 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2752 class = tcc_unary;
2753 else if (class == tcc_expression
2754 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2755 class = tcc_binary;
2757 switch (class)
2759 case tcc_unary:
2760 return fold (build1 (code, type,
2761 eval_subst (TREE_OPERAND (arg, 0),
2762 old0, new0, old1, new1)));
2764 case tcc_binary:
2765 return fold (build2 (code, type,
2766 eval_subst (TREE_OPERAND (arg, 0),
2767 old0, new0, old1, new1),
2768 eval_subst (TREE_OPERAND (arg, 1),
2769 old0, new0, old1, new1)));
2771 case tcc_expression:
2772 switch (code)
2774 case SAVE_EXPR:
2775 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2777 case COMPOUND_EXPR:
2778 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2780 case COND_EXPR:
2781 return fold (build3 (code, type,
2782 eval_subst (TREE_OPERAND (arg, 0),
2783 old0, new0, old1, new1),
2784 eval_subst (TREE_OPERAND (arg, 1),
2785 old0, new0, old1, new1),
2786 eval_subst (TREE_OPERAND (arg, 2),
2787 old0, new0, old1, new1)));
2788 default:
2789 break;
2791 /* Fall through - ??? */
2793 case tcc_comparison:
2795 tree arg0 = TREE_OPERAND (arg, 0);
2796 tree arg1 = TREE_OPERAND (arg, 1);
2798 /* We need to check both for exact equality and tree equality. The
2799 former will be true if the operand has a side-effect. In that
2800 case, we know the operand occurred exactly once. */
2802 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2803 arg0 = new0;
2804 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2805 arg0 = new1;
2807 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2808 arg1 = new0;
2809 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2810 arg1 = new1;
2812 return fold (build2 (code, type, arg0, arg1));
2815 default:
2816 return arg;
2820 /* Return a tree for the case when the result of an expression is RESULT
2821 converted to TYPE and OMITTED was previously an operand of the expression
2822 but is now not needed (e.g., we folded OMITTED * 0).
2824 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2825 the conversion of RESULT to TYPE. */
2827 tree
2828 omit_one_operand (tree type, tree result, tree omitted)
2830 tree t = fold_convert (type, result);
2832 if (TREE_SIDE_EFFECTS (omitted))
2833 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2835 return non_lvalue (t);
2838 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2840 static tree
2841 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2843 tree t = fold_convert (type, result);
2845 if (TREE_SIDE_EFFECTS (omitted))
2846 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2848 return pedantic_non_lvalue (t);
2851 /* Return a tree for the case when the result of an expression is RESULT
2852 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2853 of the expression but are now not needed.
2855 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2856 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2857 evaluated before OMITTED2. Otherwise, if neither has side effects,
2858 just do the conversion of RESULT to TYPE. */
2860 tree
2861 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2863 tree t = fold_convert (type, result);
2865 if (TREE_SIDE_EFFECTS (omitted2))
2866 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2867 if (TREE_SIDE_EFFECTS (omitted1))
2868 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2870 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2874 /* Return a simplified tree node for the truth-negation of ARG. This
2875 never alters ARG itself. We assume that ARG is an operation that
2876 returns a truth value (0 or 1).
2878 FIXME: one would think we would fold the result, but it causes
2879 problems with the dominator optimizer. */
2880 tree
2881 invert_truthvalue (tree arg)
2883 tree type = TREE_TYPE (arg);
2884 enum tree_code code = TREE_CODE (arg);
2886 if (code == ERROR_MARK)
2887 return arg;
2889 /* If this is a comparison, we can simply invert it, except for
2890 floating-point non-equality comparisons, in which case we just
2891 enclose a TRUTH_NOT_EXPR around what we have. */
2893 if (TREE_CODE_CLASS (code) == tcc_comparison)
2895 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2896 if (FLOAT_TYPE_P (op_type)
2897 && flag_trapping_math
2898 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2899 && code != NE_EXPR && code != EQ_EXPR)
2900 return build1 (TRUTH_NOT_EXPR, type, arg);
2901 else
2903 code = invert_tree_comparison (code,
2904 HONOR_NANS (TYPE_MODE (op_type)));
2905 if (code == ERROR_MARK)
2906 return build1 (TRUTH_NOT_EXPR, type, arg);
2907 else
2908 return build2 (code, type,
2909 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2913 switch (code)
2915 case INTEGER_CST:
2916 return fold_convert (type,
2917 build_int_cst (NULL_TREE, integer_zerop (arg)));
2919 case TRUTH_AND_EXPR:
2920 return build2 (TRUTH_OR_EXPR, type,
2921 invert_truthvalue (TREE_OPERAND (arg, 0)),
2922 invert_truthvalue (TREE_OPERAND (arg, 1)));
2924 case TRUTH_OR_EXPR:
2925 return build2 (TRUTH_AND_EXPR, type,
2926 invert_truthvalue (TREE_OPERAND (arg, 0)),
2927 invert_truthvalue (TREE_OPERAND (arg, 1)));
2929 case TRUTH_XOR_EXPR:
2930 /* Here we can invert either operand. We invert the first operand
2931 unless the second operand is a TRUTH_NOT_EXPR in which case our
2932 result is the XOR of the first operand with the inside of the
2933 negation of the second operand. */
2935 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2936 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2937 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2938 else
2939 return build2 (TRUTH_XOR_EXPR, type,
2940 invert_truthvalue (TREE_OPERAND (arg, 0)),
2941 TREE_OPERAND (arg, 1));
2943 case TRUTH_ANDIF_EXPR:
2944 return build2 (TRUTH_ORIF_EXPR, type,
2945 invert_truthvalue (TREE_OPERAND (arg, 0)),
2946 invert_truthvalue (TREE_OPERAND (arg, 1)));
2948 case TRUTH_ORIF_EXPR:
2949 return build2 (TRUTH_ANDIF_EXPR, type,
2950 invert_truthvalue (TREE_OPERAND (arg, 0)),
2951 invert_truthvalue (TREE_OPERAND (arg, 1)));
2953 case TRUTH_NOT_EXPR:
2954 return TREE_OPERAND (arg, 0);
2956 case COND_EXPR:
2957 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2958 invert_truthvalue (TREE_OPERAND (arg, 1)),
2959 invert_truthvalue (TREE_OPERAND (arg, 2)));
2961 case COMPOUND_EXPR:
2962 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2963 invert_truthvalue (TREE_OPERAND (arg, 1)));
2965 case NON_LVALUE_EXPR:
2966 return invert_truthvalue (TREE_OPERAND (arg, 0));
2968 case NOP_EXPR:
2969 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2970 break;
2972 case CONVERT_EXPR:
2973 case FLOAT_EXPR:
2974 return build1 (TREE_CODE (arg), type,
2975 invert_truthvalue (TREE_OPERAND (arg, 0)));
2977 case BIT_AND_EXPR:
2978 if (!integer_onep (TREE_OPERAND (arg, 1)))
2979 break;
2980 return build2 (EQ_EXPR, type, arg,
2981 fold_convert (type, integer_zero_node));
2983 case SAVE_EXPR:
2984 return build1 (TRUTH_NOT_EXPR, type, arg);
2986 case CLEANUP_POINT_EXPR:
2987 return build1 (CLEANUP_POINT_EXPR, type,
2988 invert_truthvalue (TREE_OPERAND (arg, 0)));
2990 default:
2991 break;
2993 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
2994 return build1 (TRUTH_NOT_EXPR, type, arg);
2997 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2998 operands are another bit-wise operation with a common input. If so,
2999 distribute the bit operations to save an operation and possibly two if
3000 constants are involved. For example, convert
3001 (A | B) & (A | C) into A | (B & C)
3002 Further simplification will occur if B and C are constants.
3004 If this optimization cannot be done, 0 will be returned. */
3006 static tree
3007 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3009 tree common;
3010 tree left, right;
3012 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3013 || TREE_CODE (arg0) == code
3014 || (TREE_CODE (arg0) != BIT_AND_EXPR
3015 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3016 return 0;
3018 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3020 common = TREE_OPERAND (arg0, 0);
3021 left = TREE_OPERAND (arg0, 1);
3022 right = TREE_OPERAND (arg1, 1);
3024 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3026 common = TREE_OPERAND (arg0, 0);
3027 left = TREE_OPERAND (arg0, 1);
3028 right = TREE_OPERAND (arg1, 0);
3030 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3032 common = TREE_OPERAND (arg0, 1);
3033 left = TREE_OPERAND (arg0, 0);
3034 right = TREE_OPERAND (arg1, 1);
3036 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3038 common = TREE_OPERAND (arg0, 1);
3039 left = TREE_OPERAND (arg0, 0);
3040 right = TREE_OPERAND (arg1, 0);
3042 else
3043 return 0;
3045 return fold (build2 (TREE_CODE (arg0), type, common,
3046 fold (build2 (code, type, left, right))));
3049 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3050 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3052 static tree
3053 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3054 int unsignedp)
3056 tree result = build3 (BIT_FIELD_REF, type, inner,
3057 size_int (bitsize), bitsize_int (bitpos));
3059 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3061 return result;
3064 /* Optimize a bit-field compare.
3066 There are two cases: First is a compare against a constant and the
3067 second is a comparison of two items where the fields are at the same
3068 bit position relative to the start of a chunk (byte, halfword, word)
3069 large enough to contain it. In these cases we can avoid the shift
3070 implicit in bitfield extractions.
3072 For constants, we emit a compare of the shifted constant with the
3073 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3074 compared. For two fields at the same position, we do the ANDs with the
3075 similar mask and compare the result of the ANDs.
3077 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3078 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3079 are the left and right operands of the comparison, respectively.
3081 If the optimization described above can be done, we return the resulting
3082 tree. Otherwise we return zero. */
3084 static tree
3085 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3086 tree lhs, tree rhs)
3088 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3089 tree type = TREE_TYPE (lhs);
3090 tree signed_type, unsigned_type;
3091 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3092 enum machine_mode lmode, rmode, nmode;
3093 int lunsignedp, runsignedp;
3094 int lvolatilep = 0, rvolatilep = 0;
3095 tree linner, rinner = NULL_TREE;
3096 tree mask;
3097 tree offset;
3099 /* Get all the information about the extractions being done. If the bit size
3100 if the same as the size of the underlying object, we aren't doing an
3101 extraction at all and so can do nothing. We also don't want to
3102 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3103 then will no longer be able to replace it. */
3104 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3105 &lunsignedp, &lvolatilep);
3106 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3107 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3108 return 0;
3110 if (!const_p)
3112 /* If this is not a constant, we can only do something if bit positions,
3113 sizes, and signedness are the same. */
3114 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3115 &runsignedp, &rvolatilep);
3117 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3118 || lunsignedp != runsignedp || offset != 0
3119 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3120 return 0;
3123 /* See if we can find a mode to refer to this field. We should be able to,
3124 but fail if we can't. */
3125 nmode = get_best_mode (lbitsize, lbitpos,
3126 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3127 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3128 TYPE_ALIGN (TREE_TYPE (rinner))),
3129 word_mode, lvolatilep || rvolatilep);
3130 if (nmode == VOIDmode)
3131 return 0;
3133 /* Set signed and unsigned types of the precision of this mode for the
3134 shifts below. */
3135 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3136 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3138 /* Compute the bit position and size for the new reference and our offset
3139 within it. If the new reference is the same size as the original, we
3140 won't optimize anything, so return zero. */
3141 nbitsize = GET_MODE_BITSIZE (nmode);
3142 nbitpos = lbitpos & ~ (nbitsize - 1);
3143 lbitpos -= nbitpos;
3144 if (nbitsize == lbitsize)
3145 return 0;
3147 if (BYTES_BIG_ENDIAN)
3148 lbitpos = nbitsize - lbitsize - lbitpos;
3150 /* Make the mask to be used against the extracted field. */
3151 mask = build_int_cst (unsigned_type, -1);
3152 mask = force_fit_type (mask, 0, false, false);
3153 mask = fold_convert (unsigned_type, mask);
3154 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3155 mask = const_binop (RSHIFT_EXPR, mask,
3156 size_int (nbitsize - lbitsize - lbitpos), 0);
3158 if (! const_p)
3159 /* If not comparing with constant, just rework the comparison
3160 and return. */
3161 return build2 (code, compare_type,
3162 build2 (BIT_AND_EXPR, unsigned_type,
3163 make_bit_field_ref (linner, unsigned_type,
3164 nbitsize, nbitpos, 1),
3165 mask),
3166 build2 (BIT_AND_EXPR, unsigned_type,
3167 make_bit_field_ref (rinner, unsigned_type,
3168 nbitsize, nbitpos, 1),
3169 mask));
3171 /* Otherwise, we are handling the constant case. See if the constant is too
3172 big for the field. Warn and return a tree of for 0 (false) if so. We do
3173 this not only for its own sake, but to avoid having to test for this
3174 error case below. If we didn't, we might generate wrong code.
3176 For unsigned fields, the constant shifted right by the field length should
3177 be all zero. For signed fields, the high-order bits should agree with
3178 the sign bit. */
3180 if (lunsignedp)
3182 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3183 fold_convert (unsigned_type, rhs),
3184 size_int (lbitsize), 0)))
3186 warning ("comparison is always %d due to width of bit-field",
3187 code == NE_EXPR);
3188 return constant_boolean_node (code == NE_EXPR, compare_type);
3191 else
3193 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3194 size_int (lbitsize - 1), 0);
3195 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3197 warning ("comparison is always %d due to width of bit-field",
3198 code == NE_EXPR);
3199 return constant_boolean_node (code == NE_EXPR, compare_type);
3203 /* Single-bit compares should always be against zero. */
3204 if (lbitsize == 1 && ! integer_zerop (rhs))
3206 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3207 rhs = fold_convert (type, integer_zero_node);
3210 /* Make a new bitfield reference, shift the constant over the
3211 appropriate number of bits and mask it with the computed mask
3212 (in case this was a signed field). If we changed it, make a new one. */
3213 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3214 if (lvolatilep)
3216 TREE_SIDE_EFFECTS (lhs) = 1;
3217 TREE_THIS_VOLATILE (lhs) = 1;
3220 rhs = fold (const_binop (BIT_AND_EXPR,
3221 const_binop (LSHIFT_EXPR,
3222 fold_convert (unsigned_type, rhs),
3223 size_int (lbitpos), 0),
3224 mask, 0));
3226 return build2 (code, compare_type,
3227 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3228 rhs);
3231 /* Subroutine for fold_truthop: decode a field reference.
3233 If EXP is a comparison reference, we return the innermost reference.
3235 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3236 set to the starting bit number.
3238 If the innermost field can be completely contained in a mode-sized
3239 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3241 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3242 otherwise it is not changed.
3244 *PUNSIGNEDP is set to the signedness of the field.
3246 *PMASK is set to the mask used. This is either contained in a
3247 BIT_AND_EXPR or derived from the width of the field.
3249 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3251 Return 0 if this is not a component reference or is one that we can't
3252 do anything with. */
3254 static tree
3255 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3256 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3257 int *punsignedp, int *pvolatilep,
3258 tree *pmask, tree *pand_mask)
3260 tree outer_type = 0;
3261 tree and_mask = 0;
3262 tree mask, inner, offset;
3263 tree unsigned_type;
3264 unsigned int precision;
3266 /* All the optimizations using this function assume integer fields.
3267 There are problems with FP fields since the type_for_size call
3268 below can fail for, e.g., XFmode. */
3269 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3270 return 0;
3272 /* We are interested in the bare arrangement of bits, so strip everything
3273 that doesn't affect the machine mode. However, record the type of the
3274 outermost expression if it may matter below. */
3275 if (TREE_CODE (exp) == NOP_EXPR
3276 || TREE_CODE (exp) == CONVERT_EXPR
3277 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3278 outer_type = TREE_TYPE (exp);
3279 STRIP_NOPS (exp);
3281 if (TREE_CODE (exp) == BIT_AND_EXPR)
3283 and_mask = TREE_OPERAND (exp, 1);
3284 exp = TREE_OPERAND (exp, 0);
3285 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3286 if (TREE_CODE (and_mask) != INTEGER_CST)
3287 return 0;
3290 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3291 punsignedp, pvolatilep);
3292 if ((inner == exp && and_mask == 0)
3293 || *pbitsize < 0 || offset != 0
3294 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3295 return 0;
3297 /* If the number of bits in the reference is the same as the bitsize of
3298 the outer type, then the outer type gives the signedness. Otherwise
3299 (in case of a small bitfield) the signedness is unchanged. */
3300 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3301 *punsignedp = TYPE_UNSIGNED (outer_type);
3303 /* Compute the mask to access the bitfield. */
3304 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3305 precision = TYPE_PRECISION (unsigned_type);
3307 mask = build_int_cst (unsigned_type, -1);
3308 mask = force_fit_type (mask, 0, false, false);
3310 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3311 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3313 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3314 if (and_mask != 0)
3315 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3316 fold_convert (unsigned_type, and_mask), mask));
3318 *pmask = mask;
3319 *pand_mask = and_mask;
3320 return inner;
3323 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3324 bit positions. */
3326 static int
3327 all_ones_mask_p (tree mask, int size)
3329 tree type = TREE_TYPE (mask);
3330 unsigned int precision = TYPE_PRECISION (type);
3331 tree tmask;
3333 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3334 tmask = force_fit_type (tmask, 0, false, false);
3336 return
3337 tree_int_cst_equal (mask,
3338 const_binop (RSHIFT_EXPR,
3339 const_binop (LSHIFT_EXPR, tmask,
3340 size_int (precision - size),
3342 size_int (precision - size), 0));
3345 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3346 represents the sign bit of EXP's type. If EXP represents a sign
3347 or zero extension, also test VAL against the unextended type.
3348 The return value is the (sub)expression whose sign bit is VAL,
3349 or NULL_TREE otherwise. */
3351 static tree
3352 sign_bit_p (tree exp, tree val)
3354 unsigned HOST_WIDE_INT mask_lo, lo;
3355 HOST_WIDE_INT mask_hi, hi;
3356 int width;
3357 tree t;
3359 /* Tree EXP must have an integral type. */
3360 t = TREE_TYPE (exp);
3361 if (! INTEGRAL_TYPE_P (t))
3362 return NULL_TREE;
3364 /* Tree VAL must be an integer constant. */
3365 if (TREE_CODE (val) != INTEGER_CST
3366 || TREE_CONSTANT_OVERFLOW (val))
3367 return NULL_TREE;
3369 width = TYPE_PRECISION (t);
3370 if (width > HOST_BITS_PER_WIDE_INT)
3372 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3373 lo = 0;
3375 mask_hi = ((unsigned HOST_WIDE_INT) -1
3376 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3377 mask_lo = -1;
3379 else
3381 hi = 0;
3382 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3384 mask_hi = 0;
3385 mask_lo = ((unsigned HOST_WIDE_INT) -1
3386 >> (HOST_BITS_PER_WIDE_INT - width));
3389 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3390 treat VAL as if it were unsigned. */
3391 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3392 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3393 return exp;
3395 /* Handle extension from a narrower type. */
3396 if (TREE_CODE (exp) == NOP_EXPR
3397 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3398 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3400 return NULL_TREE;
3403 /* Subroutine for fold_truthop: determine if an operand is simple enough
3404 to be evaluated unconditionally. */
3406 static int
3407 simple_operand_p (tree exp)
3409 /* Strip any conversions that don't change the machine mode. */
3410 while ((TREE_CODE (exp) == NOP_EXPR
3411 || TREE_CODE (exp) == CONVERT_EXPR)
3412 && (TYPE_MODE (TREE_TYPE (exp))
3413 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3414 exp = TREE_OPERAND (exp, 0);
3416 return (CONSTANT_CLASS_P (exp)
3417 || (DECL_P (exp)
3418 && ! TREE_ADDRESSABLE (exp)
3419 && ! TREE_THIS_VOLATILE (exp)
3420 && ! DECL_NONLOCAL (exp)
3421 /* Don't regard global variables as simple. They may be
3422 allocated in ways unknown to the compiler (shared memory,
3423 #pragma weak, etc). */
3424 && ! TREE_PUBLIC (exp)
3425 && ! DECL_EXTERNAL (exp)
3426 /* Loading a static variable is unduly expensive, but global
3427 registers aren't expensive. */
3428 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3431 /* The following functions are subroutines to fold_range_test and allow it to
3432 try to change a logical combination of comparisons into a range test.
3434 For example, both
3435 X == 2 || X == 3 || X == 4 || X == 5
3437 X >= 2 && X <= 5
3438 are converted to
3439 (unsigned) (X - 2) <= 3
3441 We describe each set of comparisons as being either inside or outside
3442 a range, using a variable named like IN_P, and then describe the
3443 range with a lower and upper bound. If one of the bounds is omitted,
3444 it represents either the highest or lowest value of the type.
3446 In the comments below, we represent a range by two numbers in brackets
3447 preceded by a "+" to designate being inside that range, or a "-" to
3448 designate being outside that range, so the condition can be inverted by
3449 flipping the prefix. An omitted bound is represented by a "-". For
3450 example, "- [-, 10]" means being outside the range starting at the lowest
3451 possible value and ending at 10, in other words, being greater than 10.
3452 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3453 always false.
3455 We set up things so that the missing bounds are handled in a consistent
3456 manner so neither a missing bound nor "true" and "false" need to be
3457 handled using a special case. */
3459 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3460 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3461 and UPPER1_P are nonzero if the respective argument is an upper bound
3462 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3463 must be specified for a comparison. ARG1 will be converted to ARG0's
3464 type if both are specified. */
3466 static tree
3467 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3468 tree arg1, int upper1_p)
3470 tree tem;
3471 int result;
3472 int sgn0, sgn1;
3474 /* If neither arg represents infinity, do the normal operation.
3475 Else, if not a comparison, return infinity. Else handle the special
3476 comparison rules. Note that most of the cases below won't occur, but
3477 are handled for consistency. */
3479 if (arg0 != 0 && arg1 != 0)
3481 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3482 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3483 STRIP_NOPS (tem);
3484 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3487 if (TREE_CODE_CLASS (code) != tcc_comparison)
3488 return 0;
3490 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3491 for neither. In real maths, we cannot assume open ended ranges are
3492 the same. But, this is computer arithmetic, where numbers are finite.
3493 We can therefore make the transformation of any unbounded range with
3494 the value Z, Z being greater than any representable number. This permits
3495 us to treat unbounded ranges as equal. */
3496 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3497 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3498 switch (code)
3500 case EQ_EXPR:
3501 result = sgn0 == sgn1;
3502 break;
3503 case NE_EXPR:
3504 result = sgn0 != sgn1;
3505 break;
3506 case LT_EXPR:
3507 result = sgn0 < sgn1;
3508 break;
3509 case LE_EXPR:
3510 result = sgn0 <= sgn1;
3511 break;
3512 case GT_EXPR:
3513 result = sgn0 > sgn1;
3514 break;
3515 case GE_EXPR:
3516 result = sgn0 >= sgn1;
3517 break;
3518 default:
3519 gcc_unreachable ();
3522 return constant_boolean_node (result, type);
3525 /* Given EXP, a logical expression, set the range it is testing into
3526 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3527 actually being tested. *PLOW and *PHIGH will be made of the same type
3528 as the returned expression. If EXP is not a comparison, we will most
3529 likely not be returning a useful value and range. */
3531 static tree
3532 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3534 enum tree_code code;
3535 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3536 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3537 int in_p, n_in_p;
3538 tree low, high, n_low, n_high;
3540 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3541 and see if we can refine the range. Some of the cases below may not
3542 happen, but it doesn't seem worth worrying about this. We "continue"
3543 the outer loop when we've changed something; otherwise we "break"
3544 the switch, which will "break" the while. */
3546 in_p = 0;
3547 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3549 while (1)
3551 code = TREE_CODE (exp);
3552 exp_type = TREE_TYPE (exp);
3554 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3556 if (first_rtl_op (code) > 0)
3557 arg0 = TREE_OPERAND (exp, 0);
3558 if (TREE_CODE_CLASS (code) == tcc_comparison
3559 || TREE_CODE_CLASS (code) == tcc_unary
3560 || TREE_CODE_CLASS (code) == tcc_binary)
3561 arg0_type = TREE_TYPE (arg0);
3562 if (TREE_CODE_CLASS (code) == tcc_binary
3563 || TREE_CODE_CLASS (code) == tcc_comparison
3564 || (TREE_CODE_CLASS (code) == tcc_expression
3565 && TREE_CODE_LENGTH (code) > 1))
3566 arg1 = TREE_OPERAND (exp, 1);
3569 switch (code)
3571 case TRUTH_NOT_EXPR:
3572 in_p = ! in_p, exp = arg0;
3573 continue;
3575 case EQ_EXPR: case NE_EXPR:
3576 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3577 /* We can only do something if the range is testing for zero
3578 and if the second operand is an integer constant. Note that
3579 saying something is "in" the range we make is done by
3580 complementing IN_P since it will set in the initial case of
3581 being not equal to zero; "out" is leaving it alone. */
3582 if (low == 0 || high == 0
3583 || ! integer_zerop (low) || ! integer_zerop (high)
3584 || TREE_CODE (arg1) != INTEGER_CST)
3585 break;
3587 switch (code)
3589 case NE_EXPR: /* - [c, c] */
3590 low = high = arg1;
3591 break;
3592 case EQ_EXPR: /* + [c, c] */
3593 in_p = ! in_p, low = high = arg1;
3594 break;
3595 case GT_EXPR: /* - [-, c] */
3596 low = 0, high = arg1;
3597 break;
3598 case GE_EXPR: /* + [c, -] */
3599 in_p = ! in_p, low = arg1, high = 0;
3600 break;
3601 case LT_EXPR: /* - [c, -] */
3602 low = arg1, high = 0;
3603 break;
3604 case LE_EXPR: /* + [-, c] */
3605 in_p = ! in_p, low = 0, high = arg1;
3606 break;
3607 default:
3608 gcc_unreachable ();
3611 /* If this is an unsigned comparison, we also know that EXP is
3612 greater than or equal to zero. We base the range tests we make
3613 on that fact, so we record it here so we can parse existing
3614 range tests. We test arg0_type since often the return type
3615 of, e.g. EQ_EXPR, is boolean. */
3616 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3618 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3619 in_p, low, high, 1,
3620 fold_convert (arg0_type, integer_zero_node),
3621 NULL_TREE))
3622 break;
3624 in_p = n_in_p, low = n_low, high = n_high;
3626 /* If the high bound is missing, but we have a nonzero low
3627 bound, reverse the range so it goes from zero to the low bound
3628 minus 1. */
3629 if (high == 0 && low && ! integer_zerop (low))
3631 in_p = ! in_p;
3632 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3633 integer_one_node, 0);
3634 low = fold_convert (arg0_type, integer_zero_node);
3638 exp = arg0;
3639 continue;
3641 case NEGATE_EXPR:
3642 /* (-x) IN [a,b] -> x in [-b, -a] */
3643 n_low = range_binop (MINUS_EXPR, exp_type,
3644 fold_convert (exp_type, integer_zero_node),
3645 0, high, 1);
3646 n_high = range_binop (MINUS_EXPR, exp_type,
3647 fold_convert (exp_type, integer_zero_node),
3648 0, low, 0);
3649 low = n_low, high = n_high;
3650 exp = arg0;
3651 continue;
3653 case BIT_NOT_EXPR:
3654 /* ~ X -> -X - 1 */
3655 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3656 fold_convert (exp_type, integer_one_node));
3657 continue;
3659 case PLUS_EXPR: case MINUS_EXPR:
3660 if (TREE_CODE (arg1) != INTEGER_CST)
3661 break;
3663 /* If EXP is signed, any overflow in the computation is undefined,
3664 so we don't worry about it so long as our computations on
3665 the bounds don't overflow. For unsigned, overflow is defined
3666 and this is exactly the right thing. */
3667 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3668 arg0_type, low, 0, arg1, 0);
3669 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3670 arg0_type, high, 1, arg1, 0);
3671 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3672 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3673 break;
3675 /* Check for an unsigned range which has wrapped around the maximum
3676 value thus making n_high < n_low, and normalize it. */
3677 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3679 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3680 integer_one_node, 0);
3681 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3682 integer_one_node, 0);
3684 /* If the range is of the form +/- [ x+1, x ], we won't
3685 be able to normalize it. But then, it represents the
3686 whole range or the empty set, so make it
3687 +/- [ -, - ]. */
3688 if (tree_int_cst_equal (n_low, low)
3689 && tree_int_cst_equal (n_high, high))
3690 low = high = 0;
3691 else
3692 in_p = ! in_p;
3694 else
3695 low = n_low, high = n_high;
3697 exp = arg0;
3698 continue;
3700 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3701 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3702 break;
3704 if (! INTEGRAL_TYPE_P (arg0_type)
3705 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3706 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3707 break;
3709 n_low = low, n_high = high;
3711 if (n_low != 0)
3712 n_low = fold_convert (arg0_type, n_low);
3714 if (n_high != 0)
3715 n_high = fold_convert (arg0_type, n_high);
3718 /* If we're converting arg0 from an unsigned type, to exp,
3719 a signed type, we will be doing the comparison as unsigned.
3720 The tests above have already verified that LOW and HIGH
3721 are both positive.
3723 So we have to ensure that we will handle large unsigned
3724 values the same way that the current signed bounds treat
3725 negative values. */
3727 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3729 tree high_positive;
3730 tree equiv_type = lang_hooks.types.type_for_mode
3731 (TYPE_MODE (arg0_type), 1);
3733 /* A range without an upper bound is, naturally, unbounded.
3734 Since convert would have cropped a very large value, use
3735 the max value for the destination type. */
3736 high_positive
3737 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3738 : TYPE_MAX_VALUE (arg0_type);
3740 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3741 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3742 fold_convert (arg0_type,
3743 high_positive),
3744 fold_convert (arg0_type,
3745 integer_one_node)));
3747 /* If the low bound is specified, "and" the range with the
3748 range for which the original unsigned value will be
3749 positive. */
3750 if (low != 0)
3752 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3753 1, n_low, n_high, 1,
3754 fold_convert (arg0_type,
3755 integer_zero_node),
3756 high_positive))
3757 break;
3759 in_p = (n_in_p == in_p);
3761 else
3763 /* Otherwise, "or" the range with the range of the input
3764 that will be interpreted as negative. */
3765 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3766 0, n_low, n_high, 1,
3767 fold_convert (arg0_type,
3768 integer_zero_node),
3769 high_positive))
3770 break;
3772 in_p = (in_p != n_in_p);
3776 exp = arg0;
3777 low = n_low, high = n_high;
3778 continue;
3780 default:
3781 break;
3784 break;
3787 /* If EXP is a constant, we can evaluate whether this is true or false. */
3788 if (TREE_CODE (exp) == INTEGER_CST)
3790 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3791 exp, 0, low, 0))
3792 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3793 exp, 1, high, 1)));
3794 low = high = 0;
3795 exp = 0;
3798 *pin_p = in_p, *plow = low, *phigh = high;
3799 return exp;
3802 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3803 type, TYPE, return an expression to test if EXP is in (or out of, depending
3804 on IN_P) the range. Return 0 if the test couldn't be created. */
3806 static tree
3807 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3809 tree etype = TREE_TYPE (exp);
3810 tree value;
3812 if (! in_p)
3814 value = build_range_check (type, exp, 1, low, high);
3815 if (value != 0)
3816 return invert_truthvalue (value);
3818 return 0;
3821 if (low == 0 && high == 0)
3822 return fold_convert (type, integer_one_node);
3824 if (low == 0)
3825 return fold (build2 (LE_EXPR, type, exp, high));
3827 if (high == 0)
3828 return fold (build2 (GE_EXPR, type, exp, low));
3830 if (operand_equal_p (low, high, 0))
3831 return fold (build2 (EQ_EXPR, type, exp, low));
3833 if (integer_zerop (low))
3835 if (! TYPE_UNSIGNED (etype))
3837 etype = lang_hooks.types.unsigned_type (etype);
3838 high = fold_convert (etype, high);
3839 exp = fold_convert (etype, exp);
3841 return build_range_check (type, exp, 1, 0, high);
3844 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3845 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3847 unsigned HOST_WIDE_INT lo;
3848 HOST_WIDE_INT hi;
3849 int prec;
3851 prec = TYPE_PRECISION (etype);
3852 if (prec <= HOST_BITS_PER_WIDE_INT)
3854 hi = 0;
3855 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3857 else
3859 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3860 lo = (unsigned HOST_WIDE_INT) -1;
3863 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3865 if (TYPE_UNSIGNED (etype))
3867 etype = lang_hooks.types.signed_type (etype);
3868 exp = fold_convert (etype, exp);
3870 return fold (build2 (GT_EXPR, type, exp,
3871 fold_convert (etype, integer_zero_node)));
3875 value = const_binop (MINUS_EXPR, high, low, 0);
3876 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3878 tree utype, minv, maxv;
3880 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3881 for the type in question, as we rely on this here. */
3882 switch (TREE_CODE (etype))
3884 case INTEGER_TYPE:
3885 case ENUMERAL_TYPE:
3886 case CHAR_TYPE:
3887 utype = lang_hooks.types.unsigned_type (etype);
3888 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3889 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3890 integer_one_node, 1);
3891 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3892 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3893 minv, 1, maxv, 1)))
3895 etype = utype;
3896 high = fold_convert (etype, high);
3897 low = fold_convert (etype, low);
3898 exp = fold_convert (etype, exp);
3899 value = const_binop (MINUS_EXPR, high, low, 0);
3901 break;
3902 default:
3903 break;
3907 if (value != 0 && ! TREE_OVERFLOW (value))
3908 return build_range_check (type,
3909 fold (build2 (MINUS_EXPR, etype, exp, low)),
3910 1, fold_convert (etype, integer_zero_node),
3911 value);
3913 return 0;
3916 /* Given two ranges, see if we can merge them into one. Return 1 if we
3917 can, 0 if we can't. Set the output range into the specified parameters. */
3919 static int
3920 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3921 tree high0, int in1_p, tree low1, tree high1)
3923 int no_overlap;
3924 int subset;
3925 int temp;
3926 tree tem;
3927 int in_p;
3928 tree low, high;
3929 int lowequal = ((low0 == 0 && low1 == 0)
3930 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3931 low0, 0, low1, 0)));
3932 int highequal = ((high0 == 0 && high1 == 0)
3933 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3934 high0, 1, high1, 1)));
3936 /* Make range 0 be the range that starts first, or ends last if they
3937 start at the same value. Swap them if it isn't. */
3938 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3939 low0, 0, low1, 0))
3940 || (lowequal
3941 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3942 high1, 1, high0, 1))))
3944 temp = in0_p, in0_p = in1_p, in1_p = temp;
3945 tem = low0, low0 = low1, low1 = tem;
3946 tem = high0, high0 = high1, high1 = tem;
3949 /* Now flag two cases, whether the ranges are disjoint or whether the
3950 second range is totally subsumed in the first. Note that the tests
3951 below are simplified by the ones above. */
3952 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3953 high0, 1, low1, 0));
3954 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3955 high1, 1, high0, 1));
3957 /* We now have four cases, depending on whether we are including or
3958 excluding the two ranges. */
3959 if (in0_p && in1_p)
3961 /* If they don't overlap, the result is false. If the second range
3962 is a subset it is the result. Otherwise, the range is from the start
3963 of the second to the end of the first. */
3964 if (no_overlap)
3965 in_p = 0, low = high = 0;
3966 else if (subset)
3967 in_p = 1, low = low1, high = high1;
3968 else
3969 in_p = 1, low = low1, high = high0;
3972 else if (in0_p && ! in1_p)
3974 /* If they don't overlap, the result is the first range. If they are
3975 equal, the result is false. If the second range is a subset of the
3976 first, and the ranges begin at the same place, we go from just after
3977 the end of the first range to the end of the second. If the second
3978 range is not a subset of the first, or if it is a subset and both
3979 ranges end at the same place, the range starts at the start of the
3980 first range and ends just before the second range.
3981 Otherwise, we can't describe this as a single range. */
3982 if (no_overlap)
3983 in_p = 1, low = low0, high = high0;
3984 else if (lowequal && highequal)
3985 in_p = 0, low = high = 0;
3986 else if (subset && lowequal)
3988 in_p = 1, high = high0;
3989 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3990 integer_one_node, 0);
3992 else if (! subset || highequal)
3994 in_p = 1, low = low0;
3995 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3996 integer_one_node, 0);
3998 else
3999 return 0;
4002 else if (! in0_p && in1_p)
4004 /* If they don't overlap, the result is the second range. If the second
4005 is a subset of the first, the result is false. Otherwise,
4006 the range starts just after the first range and ends at the
4007 end of the second. */
4008 if (no_overlap)
4009 in_p = 1, low = low1, high = high1;
4010 else if (subset || highequal)
4011 in_p = 0, low = high = 0;
4012 else
4014 in_p = 1, high = high1;
4015 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4016 integer_one_node, 0);
4020 else
4022 /* The case where we are excluding both ranges. Here the complex case
4023 is if they don't overlap. In that case, the only time we have a
4024 range is if they are adjacent. If the second is a subset of the
4025 first, the result is the first. Otherwise, the range to exclude
4026 starts at the beginning of the first range and ends at the end of the
4027 second. */
4028 if (no_overlap)
4030 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4031 range_binop (PLUS_EXPR, NULL_TREE,
4032 high0, 1,
4033 integer_one_node, 1),
4034 1, low1, 0)))
4035 in_p = 0, low = low0, high = high1;
4036 else
4038 /* Canonicalize - [min, x] into - [-, x]. */
4039 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4040 switch (TREE_CODE (TREE_TYPE (low0)))
4042 case ENUMERAL_TYPE:
4043 if (TYPE_PRECISION (TREE_TYPE (low0))
4044 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4045 break;
4046 /* FALLTHROUGH */
4047 case INTEGER_TYPE:
4048 case CHAR_TYPE:
4049 if (tree_int_cst_equal (low0,
4050 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4051 low0 = 0;
4052 break;
4053 case POINTER_TYPE:
4054 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4055 && integer_zerop (low0))
4056 low0 = 0;
4057 break;
4058 default:
4059 break;
4062 /* Canonicalize - [x, max] into - [x, -]. */
4063 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4064 switch (TREE_CODE (TREE_TYPE (high1)))
4066 case ENUMERAL_TYPE:
4067 if (TYPE_PRECISION (TREE_TYPE (high1))
4068 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4069 break;
4070 /* FALLTHROUGH */
4071 case INTEGER_TYPE:
4072 case CHAR_TYPE:
4073 if (tree_int_cst_equal (high1,
4074 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4075 high1 = 0;
4076 break;
4077 case POINTER_TYPE:
4078 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4079 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4080 high1, 1,
4081 integer_one_node, 1)))
4082 high1 = 0;
4083 break;
4084 default:
4085 break;
4088 /* The ranges might be also adjacent between the maximum and
4089 minimum values of the given type. For
4090 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4091 return + [x + 1, y - 1]. */
4092 if (low0 == 0 && high1 == 0)
4094 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4095 integer_one_node, 1);
4096 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4097 integer_one_node, 0);
4098 if (low == 0 || high == 0)
4099 return 0;
4101 in_p = 1;
4103 else
4104 return 0;
4107 else if (subset)
4108 in_p = 0, low = low0, high = high0;
4109 else
4110 in_p = 0, low = low0, high = high1;
4113 *pin_p = in_p, *plow = low, *phigh = high;
4114 return 1;
4118 /* Subroutine of fold, looking inside expressions of the form
4119 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4120 of the COND_EXPR. This function is being used also to optimize
4121 A op B ? C : A, by reversing the comparison first.
4123 Return a folded expression whose code is not a COND_EXPR
4124 anymore, or NULL_TREE if no folding opportunity is found. */
4126 static tree
4127 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4129 enum tree_code comp_code = TREE_CODE (arg0);
4130 tree arg00 = TREE_OPERAND (arg0, 0);
4131 tree arg01 = TREE_OPERAND (arg0, 1);
4132 tree arg1_type = TREE_TYPE (arg1);
4133 tree tem;
4135 STRIP_NOPS (arg1);
4136 STRIP_NOPS (arg2);
4138 /* If we have A op 0 ? A : -A, consider applying the following
4139 transformations:
4141 A == 0? A : -A same as -A
4142 A != 0? A : -A same as A
4143 A >= 0? A : -A same as abs (A)
4144 A > 0? A : -A same as abs (A)
4145 A <= 0? A : -A same as -abs (A)
4146 A < 0? A : -A same as -abs (A)
4148 None of these transformations work for modes with signed
4149 zeros. If A is +/-0, the first two transformations will
4150 change the sign of the result (from +0 to -0, or vice
4151 versa). The last four will fix the sign of the result,
4152 even though the original expressions could be positive or
4153 negative, depending on the sign of A.
4155 Note that all these transformations are correct if A is
4156 NaN, since the two alternatives (A and -A) are also NaNs. */
4157 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4158 ? real_zerop (arg01)
4159 : integer_zerop (arg01))
4160 && TREE_CODE (arg2) == NEGATE_EXPR
4161 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4162 switch (comp_code)
4164 case EQ_EXPR:
4165 case UNEQ_EXPR:
4166 tem = fold_convert (arg1_type, arg1);
4167 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4168 case NE_EXPR:
4169 case LTGT_EXPR:
4170 return pedantic_non_lvalue (fold_convert (type, arg1));
4171 case UNGE_EXPR:
4172 case UNGT_EXPR:
4173 if (flag_trapping_math)
4174 break;
4175 /* Fall through. */
4176 case GE_EXPR:
4177 case GT_EXPR:
4178 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4179 arg1 = fold_convert (lang_hooks.types.signed_type
4180 (TREE_TYPE (arg1)), arg1);
4181 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4182 return pedantic_non_lvalue (fold_convert (type, tem));
4183 case UNLE_EXPR:
4184 case UNLT_EXPR:
4185 if (flag_trapping_math)
4186 break;
4187 case LE_EXPR:
4188 case LT_EXPR:
4189 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4190 arg1 = fold_convert (lang_hooks.types.signed_type
4191 (TREE_TYPE (arg1)), arg1);
4192 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4193 return negate_expr (fold_convert (type, tem));
4194 default:
4195 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4196 break;
4199 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4200 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4201 both transformations are correct when A is NaN: A != 0
4202 is then true, and A == 0 is false. */
4204 if (integer_zerop (arg01) && integer_zerop (arg2))
4206 if (comp_code == NE_EXPR)
4207 return pedantic_non_lvalue (fold_convert (type, arg1));
4208 else if (comp_code == EQ_EXPR)
4209 return fold_convert (type, integer_zero_node);
4212 /* Try some transformations of A op B ? A : B.
4214 A == B? A : B same as B
4215 A != B? A : B same as A
4216 A >= B? A : B same as max (A, B)
4217 A > B? A : B same as max (B, A)
4218 A <= B? A : B same as min (A, B)
4219 A < B? A : B same as min (B, A)
4221 As above, these transformations don't work in the presence
4222 of signed zeros. For example, if A and B are zeros of
4223 opposite sign, the first two transformations will change
4224 the sign of the result. In the last four, the original
4225 expressions give different results for (A=+0, B=-0) and
4226 (A=-0, B=+0), but the transformed expressions do not.
4228 The first two transformations are correct if either A or B
4229 is a NaN. In the first transformation, the condition will
4230 be false, and B will indeed be chosen. In the case of the
4231 second transformation, the condition A != B will be true,
4232 and A will be chosen.
4234 The conversions to max() and min() are not correct if B is
4235 a number and A is not. The conditions in the original
4236 expressions will be false, so all four give B. The min()
4237 and max() versions would give a NaN instead. */
4238 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4240 tree comp_op0 = arg00;
4241 tree comp_op1 = arg01;
4242 tree comp_type = TREE_TYPE (comp_op0);
4244 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4245 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4247 comp_type = type;
4248 comp_op0 = arg1;
4249 comp_op1 = arg2;
4252 switch (comp_code)
4254 case EQ_EXPR:
4255 return pedantic_non_lvalue (fold_convert (type, arg2));
4256 case NE_EXPR:
4257 return pedantic_non_lvalue (fold_convert (type, arg1));
4258 case LE_EXPR:
4259 case LT_EXPR:
4260 case UNLE_EXPR:
4261 case UNLT_EXPR:
4262 /* In C++ a ?: expression can be an lvalue, so put the
4263 operand which will be used if they are equal first
4264 so that we can convert this back to the
4265 corresponding COND_EXPR. */
4266 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4268 comp_op0 = fold_convert (comp_type, comp_op0);
4269 comp_op1 = fold_convert (comp_type, comp_op1);
4270 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4271 ? fold (build2 (MIN_EXPR, comp_type, comp_op0, comp_op1))
4272 : fold (build2 (MIN_EXPR, comp_type, comp_op1, comp_op0));
4273 return pedantic_non_lvalue (fold_convert (type, tem));
4275 break;
4276 case GE_EXPR:
4277 case GT_EXPR:
4278 case UNGE_EXPR:
4279 case UNGT_EXPR:
4280 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4282 comp_op0 = fold_convert (comp_type, comp_op0);
4283 comp_op1 = fold_convert (comp_type, comp_op1);
4284 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4285 ? fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1))
4286 : fold (build2 (MAX_EXPR, comp_type, comp_op1, comp_op0));
4287 return pedantic_non_lvalue (fold_convert (type, tem));
4289 break;
4290 case UNEQ_EXPR:
4291 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4292 return pedantic_non_lvalue (fold_convert (type, arg2));
4293 break;
4294 case LTGT_EXPR:
4295 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4296 return pedantic_non_lvalue (fold_convert (type, arg1));
4297 break;
4298 default:
4299 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4300 break;
4304 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4305 we might still be able to simplify this. For example,
4306 if C1 is one less or one more than C2, this might have started
4307 out as a MIN or MAX and been transformed by this function.
4308 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4310 if (INTEGRAL_TYPE_P (type)
4311 && TREE_CODE (arg01) == INTEGER_CST
4312 && TREE_CODE (arg2) == INTEGER_CST)
4313 switch (comp_code)
4315 case EQ_EXPR:
4316 /* We can replace A with C1 in this case. */
4317 arg1 = fold_convert (type, arg01);
4318 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4320 case LT_EXPR:
4321 /* If C1 is C2 + 1, this is min(A, C2). */
4322 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4323 OEP_ONLY_CONST)
4324 && operand_equal_p (arg01,
4325 const_binop (PLUS_EXPR, arg2,
4326 integer_one_node, 0),
4327 OEP_ONLY_CONST))
4328 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4329 type, arg1, arg2)));
4330 break;
4332 case LE_EXPR:
4333 /* If C1 is C2 - 1, this is min(A, C2). */
4334 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4335 OEP_ONLY_CONST)
4336 && operand_equal_p (arg01,
4337 const_binop (MINUS_EXPR, arg2,
4338 integer_one_node, 0),
4339 OEP_ONLY_CONST))
4340 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4341 type, arg1, arg2)));
4342 break;
4344 case GT_EXPR:
4345 /* If C1 is C2 - 1, this is max(A, C2). */
4346 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4347 OEP_ONLY_CONST)
4348 && operand_equal_p (arg01,
4349 const_binop (MINUS_EXPR, arg2,
4350 integer_one_node, 0),
4351 OEP_ONLY_CONST))
4352 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4353 type, arg1, arg2)));
4354 break;
4356 case GE_EXPR:
4357 /* If C1 is C2 + 1, this is max(A, C2). */
4358 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4359 OEP_ONLY_CONST)
4360 && operand_equal_p (arg01,
4361 const_binop (PLUS_EXPR, arg2,
4362 integer_one_node, 0),
4363 OEP_ONLY_CONST))
4364 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4365 type, arg1, arg2)));
4366 break;
4367 case NE_EXPR:
4368 break;
4369 default:
4370 gcc_unreachable ();
4373 return NULL_TREE;
4378 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
4379 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4380 #endif
4382 /* EXP is some logical combination of boolean tests. See if we can
4383 merge it into some range test. Return the new tree if so. */
4385 static tree
4386 fold_range_test (tree exp)
4388 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4389 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4390 int in0_p, in1_p, in_p;
4391 tree low0, low1, low, high0, high1, high;
4392 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4393 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4394 tree tem;
4396 /* If this is an OR operation, invert both sides; we will invert
4397 again at the end. */
4398 if (or_op)
4399 in0_p = ! in0_p, in1_p = ! in1_p;
4401 /* If both expressions are the same, if we can merge the ranges, and we
4402 can build the range test, return it or it inverted. If one of the
4403 ranges is always true or always false, consider it to be the same
4404 expression as the other. */
4405 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4406 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4407 in1_p, low1, high1)
4408 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4409 lhs != 0 ? lhs
4410 : rhs != 0 ? rhs : integer_zero_node,
4411 in_p, low, high))))
4412 return or_op ? invert_truthvalue (tem) : tem;
4414 /* On machines where the branch cost is expensive, if this is a
4415 short-circuited branch and the underlying object on both sides
4416 is the same, make a non-short-circuit operation. */
4417 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4418 && lhs != 0 && rhs != 0
4419 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4420 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4421 && operand_equal_p (lhs, rhs, 0))
4423 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4424 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4425 which cases we can't do this. */
4426 if (simple_operand_p (lhs))
4427 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4428 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4429 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4430 TREE_OPERAND (exp, 1));
4432 else if (lang_hooks.decls.global_bindings_p () == 0
4433 && ! CONTAINS_PLACEHOLDER_P (lhs))
4435 tree common = save_expr (lhs);
4437 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4438 or_op ? ! in0_p : in0_p,
4439 low0, high0))
4440 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4441 or_op ? ! in1_p : in1_p,
4442 low1, high1))))
4443 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4444 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4445 TREE_TYPE (exp), lhs, rhs);
4449 return 0;
4452 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4453 bit value. Arrange things so the extra bits will be set to zero if and
4454 only if C is signed-extended to its full width. If MASK is nonzero,
4455 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4457 static tree
4458 unextend (tree c, int p, int unsignedp, tree mask)
4460 tree type = TREE_TYPE (c);
4461 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4462 tree temp;
4464 if (p == modesize || unsignedp)
4465 return c;
4467 /* We work by getting just the sign bit into the low-order bit, then
4468 into the high-order bit, then sign-extend. We then XOR that value
4469 with C. */
4470 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4471 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4473 /* We must use a signed type in order to get an arithmetic right shift.
4474 However, we must also avoid introducing accidental overflows, so that
4475 a subsequent call to integer_zerop will work. Hence we must
4476 do the type conversion here. At this point, the constant is either
4477 zero or one, and the conversion to a signed type can never overflow.
4478 We could get an overflow if this conversion is done anywhere else. */
4479 if (TYPE_UNSIGNED (type))
4480 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4482 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4483 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4484 if (mask != 0)
4485 temp = const_binop (BIT_AND_EXPR, temp,
4486 fold_convert (TREE_TYPE (c), mask), 0);
4487 /* If necessary, convert the type back to match the type of C. */
4488 if (TYPE_UNSIGNED (type))
4489 temp = fold_convert (type, temp);
4491 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4494 /* Find ways of folding logical expressions of LHS and RHS:
4495 Try to merge two comparisons to the same innermost item.
4496 Look for range tests like "ch >= '0' && ch <= '9'".
4497 Look for combinations of simple terms on machines with expensive branches
4498 and evaluate the RHS unconditionally.
4500 For example, if we have p->a == 2 && p->b == 4 and we can make an
4501 object large enough to span both A and B, we can do this with a comparison
4502 against the object ANDed with the a mask.
4504 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4505 operations to do this with one comparison.
4507 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4508 function and the one above.
4510 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4511 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4513 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4514 two operands.
4516 We return the simplified tree or 0 if no optimization is possible. */
4518 static tree
4519 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4521 /* If this is the "or" of two comparisons, we can do something if
4522 the comparisons are NE_EXPR. If this is the "and", we can do something
4523 if the comparisons are EQ_EXPR. I.e.,
4524 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4526 WANTED_CODE is this operation code. For single bit fields, we can
4527 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4528 comparison for one-bit fields. */
4530 enum tree_code wanted_code;
4531 enum tree_code lcode, rcode;
4532 tree ll_arg, lr_arg, rl_arg, rr_arg;
4533 tree ll_inner, lr_inner, rl_inner, rr_inner;
4534 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4535 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4536 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4537 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4538 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4539 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4540 enum machine_mode lnmode, rnmode;
4541 tree ll_mask, lr_mask, rl_mask, rr_mask;
4542 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4543 tree l_const, r_const;
4544 tree lntype, rntype, result;
4545 int first_bit, end_bit;
4546 int volatilep;
4548 /* Start by getting the comparison codes. Fail if anything is volatile.
4549 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4550 it were surrounded with a NE_EXPR. */
4552 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4553 return 0;
4555 lcode = TREE_CODE (lhs);
4556 rcode = TREE_CODE (rhs);
4558 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4560 lhs = build2 (NE_EXPR, truth_type, lhs,
4561 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4562 lcode = NE_EXPR;
4565 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4567 rhs = build2 (NE_EXPR, truth_type, rhs,
4568 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4569 rcode = NE_EXPR;
4572 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4573 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4574 return 0;
4576 ll_arg = TREE_OPERAND (lhs, 0);
4577 lr_arg = TREE_OPERAND (lhs, 1);
4578 rl_arg = TREE_OPERAND (rhs, 0);
4579 rr_arg = TREE_OPERAND (rhs, 1);
4581 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4582 if (simple_operand_p (ll_arg)
4583 && simple_operand_p (lr_arg))
4585 tree result;
4586 if (operand_equal_p (ll_arg, rl_arg, 0)
4587 && operand_equal_p (lr_arg, rr_arg, 0))
4589 result = combine_comparisons (code, lcode, rcode,
4590 truth_type, ll_arg, lr_arg);
4591 if (result)
4592 return result;
4594 else if (operand_equal_p (ll_arg, rr_arg, 0)
4595 && operand_equal_p (lr_arg, rl_arg, 0))
4597 result = combine_comparisons (code, lcode,
4598 swap_tree_comparison (rcode),
4599 truth_type, ll_arg, lr_arg);
4600 if (result)
4601 return result;
4605 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4606 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4608 /* If the RHS can be evaluated unconditionally and its operands are
4609 simple, it wins to evaluate the RHS unconditionally on machines
4610 with expensive branches. In this case, this isn't a comparison
4611 that can be merged. Avoid doing this if the RHS is a floating-point
4612 comparison since those can trap. */
4614 if (BRANCH_COST >= 2
4615 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4616 && simple_operand_p (rl_arg)
4617 && simple_operand_p (rr_arg))
4619 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4620 if (code == TRUTH_OR_EXPR
4621 && lcode == NE_EXPR && integer_zerop (lr_arg)
4622 && rcode == NE_EXPR && integer_zerop (rr_arg)
4623 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4624 return build2 (NE_EXPR, truth_type,
4625 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4626 ll_arg, rl_arg),
4627 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4629 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4630 if (code == TRUTH_AND_EXPR
4631 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4632 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4633 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4634 return build2 (EQ_EXPR, truth_type,
4635 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4636 ll_arg, rl_arg),
4637 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4639 return build2 (code, truth_type, lhs, rhs);
4642 /* See if the comparisons can be merged. Then get all the parameters for
4643 each side. */
4645 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4646 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4647 return 0;
4649 volatilep = 0;
4650 ll_inner = decode_field_reference (ll_arg,
4651 &ll_bitsize, &ll_bitpos, &ll_mode,
4652 &ll_unsignedp, &volatilep, &ll_mask,
4653 &ll_and_mask);
4654 lr_inner = decode_field_reference (lr_arg,
4655 &lr_bitsize, &lr_bitpos, &lr_mode,
4656 &lr_unsignedp, &volatilep, &lr_mask,
4657 &lr_and_mask);
4658 rl_inner = decode_field_reference (rl_arg,
4659 &rl_bitsize, &rl_bitpos, &rl_mode,
4660 &rl_unsignedp, &volatilep, &rl_mask,
4661 &rl_and_mask);
4662 rr_inner = decode_field_reference (rr_arg,
4663 &rr_bitsize, &rr_bitpos, &rr_mode,
4664 &rr_unsignedp, &volatilep, &rr_mask,
4665 &rr_and_mask);
4667 /* It must be true that the inner operation on the lhs of each
4668 comparison must be the same if we are to be able to do anything.
4669 Then see if we have constants. If not, the same must be true for
4670 the rhs's. */
4671 if (volatilep || ll_inner == 0 || rl_inner == 0
4672 || ! operand_equal_p (ll_inner, rl_inner, 0))
4673 return 0;
4675 if (TREE_CODE (lr_arg) == INTEGER_CST
4676 && TREE_CODE (rr_arg) == INTEGER_CST)
4677 l_const = lr_arg, r_const = rr_arg;
4678 else if (lr_inner == 0 || rr_inner == 0
4679 || ! operand_equal_p (lr_inner, rr_inner, 0))
4680 return 0;
4681 else
4682 l_const = r_const = 0;
4684 /* If either comparison code is not correct for our logical operation,
4685 fail. However, we can convert a one-bit comparison against zero into
4686 the opposite comparison against that bit being set in the field. */
4688 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4689 if (lcode != wanted_code)
4691 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4693 /* Make the left operand unsigned, since we are only interested
4694 in the value of one bit. Otherwise we are doing the wrong
4695 thing below. */
4696 ll_unsignedp = 1;
4697 l_const = ll_mask;
4699 else
4700 return 0;
4703 /* This is analogous to the code for l_const above. */
4704 if (rcode != wanted_code)
4706 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4708 rl_unsignedp = 1;
4709 r_const = rl_mask;
4711 else
4712 return 0;
4715 /* After this point all optimizations will generate bit-field
4716 references, which we might not want. */
4717 if (! lang_hooks.can_use_bit_fields_p ())
4718 return 0;
4720 /* See if we can find a mode that contains both fields being compared on
4721 the left. If we can't, fail. Otherwise, update all constants and masks
4722 to be relative to a field of that size. */
4723 first_bit = MIN (ll_bitpos, rl_bitpos);
4724 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4725 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4726 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4727 volatilep);
4728 if (lnmode == VOIDmode)
4729 return 0;
4731 lnbitsize = GET_MODE_BITSIZE (lnmode);
4732 lnbitpos = first_bit & ~ (lnbitsize - 1);
4733 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4734 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4736 if (BYTES_BIG_ENDIAN)
4738 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4739 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4742 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4743 size_int (xll_bitpos), 0);
4744 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4745 size_int (xrl_bitpos), 0);
4747 if (l_const)
4749 l_const = fold_convert (lntype, l_const);
4750 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4751 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4752 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4753 fold (build1 (BIT_NOT_EXPR,
4754 lntype, ll_mask)),
4755 0)))
4757 warning ("comparison is always %d", wanted_code == NE_EXPR);
4759 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4762 if (r_const)
4764 r_const = fold_convert (lntype, r_const);
4765 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4766 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4767 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4768 fold (build1 (BIT_NOT_EXPR,
4769 lntype, rl_mask)),
4770 0)))
4772 warning ("comparison is always %d", wanted_code == NE_EXPR);
4774 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4778 /* If the right sides are not constant, do the same for it. Also,
4779 disallow this optimization if a size or signedness mismatch occurs
4780 between the left and right sides. */
4781 if (l_const == 0)
4783 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4784 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4785 /* Make sure the two fields on the right
4786 correspond to the left without being swapped. */
4787 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4788 return 0;
4790 first_bit = MIN (lr_bitpos, rr_bitpos);
4791 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4792 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4793 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4794 volatilep);
4795 if (rnmode == VOIDmode)
4796 return 0;
4798 rnbitsize = GET_MODE_BITSIZE (rnmode);
4799 rnbitpos = first_bit & ~ (rnbitsize - 1);
4800 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4801 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4803 if (BYTES_BIG_ENDIAN)
4805 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4806 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4809 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4810 size_int (xlr_bitpos), 0);
4811 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4812 size_int (xrr_bitpos), 0);
4814 /* Make a mask that corresponds to both fields being compared.
4815 Do this for both items being compared. If the operands are the
4816 same size and the bits being compared are in the same position
4817 then we can do this by masking both and comparing the masked
4818 results. */
4819 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4820 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4821 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4823 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4824 ll_unsignedp || rl_unsignedp);
4825 if (! all_ones_mask_p (ll_mask, lnbitsize))
4826 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4828 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4829 lr_unsignedp || rr_unsignedp);
4830 if (! all_ones_mask_p (lr_mask, rnbitsize))
4831 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4833 return build2 (wanted_code, truth_type, lhs, rhs);
4836 /* There is still another way we can do something: If both pairs of
4837 fields being compared are adjacent, we may be able to make a wider
4838 field containing them both.
4840 Note that we still must mask the lhs/rhs expressions. Furthermore,
4841 the mask must be shifted to account for the shift done by
4842 make_bit_field_ref. */
4843 if ((ll_bitsize + ll_bitpos == rl_bitpos
4844 && lr_bitsize + lr_bitpos == rr_bitpos)
4845 || (ll_bitpos == rl_bitpos + rl_bitsize
4846 && lr_bitpos == rr_bitpos + rr_bitsize))
4848 tree type;
4850 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4851 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4852 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4853 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4855 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4856 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4857 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4858 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4860 /* Convert to the smaller type before masking out unwanted bits. */
4861 type = lntype;
4862 if (lntype != rntype)
4864 if (lnbitsize > rnbitsize)
4866 lhs = fold_convert (rntype, lhs);
4867 ll_mask = fold_convert (rntype, ll_mask);
4868 type = rntype;
4870 else if (lnbitsize < rnbitsize)
4872 rhs = fold_convert (lntype, rhs);
4873 lr_mask = fold_convert (lntype, lr_mask);
4874 type = lntype;
4878 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4879 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4881 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4882 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4884 return build2 (wanted_code, truth_type, lhs, rhs);
4887 return 0;
4890 /* Handle the case of comparisons with constants. If there is something in
4891 common between the masks, those bits of the constants must be the same.
4892 If not, the condition is always false. Test for this to avoid generating
4893 incorrect code below. */
4894 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4895 if (! integer_zerop (result)
4896 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4897 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4899 if (wanted_code == NE_EXPR)
4901 warning ("%<or%> of unmatched not-equal tests is always 1");
4902 return constant_boolean_node (true, truth_type);
4904 else
4906 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4907 return constant_boolean_node (false, truth_type);
4911 /* Construct the expression we will return. First get the component
4912 reference we will make. Unless the mask is all ones the width of
4913 that field, perform the mask operation. Then compare with the
4914 merged constant. */
4915 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4916 ll_unsignedp || rl_unsignedp);
4918 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4919 if (! all_ones_mask_p (ll_mask, lnbitsize))
4920 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4922 return build2 (wanted_code, truth_type, result,
4923 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4926 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4927 constant. */
4929 static tree
4930 optimize_minmax_comparison (tree t)
4932 tree type = TREE_TYPE (t);
4933 tree arg0 = TREE_OPERAND (t, 0);
4934 enum tree_code op_code;
4935 tree comp_const = TREE_OPERAND (t, 1);
4936 tree minmax_const;
4937 int consts_equal, consts_lt;
4938 tree inner;
4940 STRIP_SIGN_NOPS (arg0);
4942 op_code = TREE_CODE (arg0);
4943 minmax_const = TREE_OPERAND (arg0, 1);
4944 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4945 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4946 inner = TREE_OPERAND (arg0, 0);
4948 /* If something does not permit us to optimize, return the original tree. */
4949 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4950 || TREE_CODE (comp_const) != INTEGER_CST
4951 || TREE_CONSTANT_OVERFLOW (comp_const)
4952 || TREE_CODE (minmax_const) != INTEGER_CST
4953 || TREE_CONSTANT_OVERFLOW (minmax_const))
4954 return t;
4956 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4957 and GT_EXPR, doing the rest with recursive calls using logical
4958 simplifications. */
4959 switch (TREE_CODE (t))
4961 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4962 return
4963 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4965 case GE_EXPR:
4966 return
4967 fold (build2 (TRUTH_ORIF_EXPR, type,
4968 optimize_minmax_comparison
4969 (build2 (EQ_EXPR, type, arg0, comp_const)),
4970 optimize_minmax_comparison
4971 (build2 (GT_EXPR, type, arg0, comp_const))));
4973 case EQ_EXPR:
4974 if (op_code == MAX_EXPR && consts_equal)
4975 /* MAX (X, 0) == 0 -> X <= 0 */
4976 return fold (build2 (LE_EXPR, type, inner, comp_const));
4978 else if (op_code == MAX_EXPR && consts_lt)
4979 /* MAX (X, 0) == 5 -> X == 5 */
4980 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4982 else if (op_code == MAX_EXPR)
4983 /* MAX (X, 0) == -1 -> false */
4984 return omit_one_operand (type, integer_zero_node, inner);
4986 else if (consts_equal)
4987 /* MIN (X, 0) == 0 -> X >= 0 */
4988 return fold (build2 (GE_EXPR, type, inner, comp_const));
4990 else if (consts_lt)
4991 /* MIN (X, 0) == 5 -> false */
4992 return omit_one_operand (type, integer_zero_node, inner);
4994 else
4995 /* MIN (X, 0) == -1 -> X == -1 */
4996 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4998 case GT_EXPR:
4999 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5000 /* MAX (X, 0) > 0 -> X > 0
5001 MAX (X, 0) > 5 -> X > 5 */
5002 return fold (build2 (GT_EXPR, type, inner, comp_const));
5004 else if (op_code == MAX_EXPR)
5005 /* MAX (X, 0) > -1 -> true */
5006 return omit_one_operand (type, integer_one_node, inner);
5008 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5009 /* MIN (X, 0) > 0 -> false
5010 MIN (X, 0) > 5 -> false */
5011 return omit_one_operand (type, integer_zero_node, inner);
5013 else
5014 /* MIN (X, 0) > -1 -> X > -1 */
5015 return fold (build2 (GT_EXPR, type, inner, comp_const));
5017 default:
5018 return t;
5022 /* T is an integer expression that is being multiplied, divided, or taken a
5023 modulus (CODE says which and what kind of divide or modulus) by a
5024 constant C. See if we can eliminate that operation by folding it with
5025 other operations already in T. WIDE_TYPE, if non-null, is a type that
5026 should be used for the computation if wider than our type.
5028 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5029 (X * 2) + (Y * 4). We must, however, be assured that either the original
5030 expression would not overflow or that overflow is undefined for the type
5031 in the language in question.
5033 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5034 the machine has a multiply-accumulate insn or that this is part of an
5035 addressing calculation.
5037 If we return a non-null expression, it is an equivalent form of the
5038 original computation, but need not be in the original type. */
5040 static tree
5041 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5043 /* To avoid exponential search depth, refuse to allow recursion past
5044 three levels. Beyond that (1) it's highly unlikely that we'll find
5045 something interesting and (2) we've probably processed it before
5046 when we built the inner expression. */
5048 static int depth;
5049 tree ret;
5051 if (depth > 3)
5052 return NULL;
5054 depth++;
5055 ret = extract_muldiv_1 (t, c, code, wide_type);
5056 depth--;
5058 return ret;
5061 static tree
5062 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5064 tree type = TREE_TYPE (t);
5065 enum tree_code tcode = TREE_CODE (t);
5066 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5067 > GET_MODE_SIZE (TYPE_MODE (type)))
5068 ? wide_type : type);
5069 tree t1, t2;
5070 int same_p = tcode == code;
5071 tree op0 = NULL_TREE, op1 = NULL_TREE;
5073 /* Don't deal with constants of zero here; they confuse the code below. */
5074 if (integer_zerop (c))
5075 return NULL_TREE;
5077 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5078 op0 = TREE_OPERAND (t, 0);
5080 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5081 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5083 /* Note that we need not handle conditional operations here since fold
5084 already handles those cases. So just do arithmetic here. */
5085 switch (tcode)
5087 case INTEGER_CST:
5088 /* For a constant, we can always simplify if we are a multiply
5089 or (for divide and modulus) if it is a multiple of our constant. */
5090 if (code == MULT_EXPR
5091 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5092 return const_binop (code, fold_convert (ctype, t),
5093 fold_convert (ctype, c), 0);
5094 break;
5096 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5097 /* If op0 is an expression ... */
5098 if ((COMPARISON_CLASS_P (op0)
5099 || UNARY_CLASS_P (op0)
5100 || BINARY_CLASS_P (op0)
5101 || EXPRESSION_CLASS_P (op0))
5102 /* ... and is unsigned, and its type is smaller than ctype,
5103 then we cannot pass through as widening. */
5104 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5105 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5106 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5107 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5108 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5109 /* ... or this is a truncation (t is narrower than op0),
5110 then we cannot pass through this narrowing. */
5111 || (GET_MODE_SIZE (TYPE_MODE (type))
5112 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5113 /* ... or signedness changes for division or modulus,
5114 then we cannot pass through this conversion. */
5115 || (code != MULT_EXPR
5116 && (TYPE_UNSIGNED (ctype)
5117 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5118 break;
5120 /* Pass the constant down and see if we can make a simplification. If
5121 we can, replace this expression with the inner simplification for
5122 possible later conversion to our or some other type. */
5123 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5124 && TREE_CODE (t2) == INTEGER_CST
5125 && ! TREE_CONSTANT_OVERFLOW (t2)
5126 && (0 != (t1 = extract_muldiv (op0, t2, code,
5127 code == MULT_EXPR
5128 ? ctype : NULL_TREE))))
5129 return t1;
5130 break;
5132 case NEGATE_EXPR: case ABS_EXPR:
5133 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5134 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5135 break;
5137 case MIN_EXPR: case MAX_EXPR:
5138 /* If widening the type changes the signedness, then we can't perform
5139 this optimization as that changes the result. */
5140 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5141 break;
5143 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5144 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5145 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5147 if (tree_int_cst_sgn (c) < 0)
5148 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5150 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5151 fold_convert (ctype, t2)));
5153 break;
5155 case LSHIFT_EXPR: case RSHIFT_EXPR:
5156 /* If the second operand is constant, this is a multiplication
5157 or floor division, by a power of two, so we can treat it that
5158 way unless the multiplier or divisor overflows. Signed
5159 left-shift overflow is implementation-defined rather than
5160 undefined in C90, so do not convert signed left shift into
5161 multiplication. */
5162 if (TREE_CODE (op1) == INTEGER_CST
5163 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5164 /* const_binop may not detect overflow correctly,
5165 so check for it explicitly here. */
5166 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5167 && TREE_INT_CST_HIGH (op1) == 0
5168 && 0 != (t1 = fold_convert (ctype,
5169 const_binop (LSHIFT_EXPR,
5170 size_one_node,
5171 op1, 0)))
5172 && ! TREE_OVERFLOW (t1))
5173 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5174 ? MULT_EXPR : FLOOR_DIV_EXPR,
5175 ctype, fold_convert (ctype, op0), t1),
5176 c, code, wide_type);
5177 break;
5179 case PLUS_EXPR: case MINUS_EXPR:
5180 /* See if we can eliminate the operation on both sides. If we can, we
5181 can return a new PLUS or MINUS. If we can't, the only remaining
5182 cases where we can do anything are if the second operand is a
5183 constant. */
5184 t1 = extract_muldiv (op0, c, code, wide_type);
5185 t2 = extract_muldiv (op1, c, code, wide_type);
5186 if (t1 != 0 && t2 != 0
5187 && (code == MULT_EXPR
5188 /* If not multiplication, we can only do this if both operands
5189 are divisible by c. */
5190 || (multiple_of_p (ctype, op0, c)
5191 && multiple_of_p (ctype, op1, c))))
5192 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5193 fold_convert (ctype, t2)));
5195 /* If this was a subtraction, negate OP1 and set it to be an addition.
5196 This simplifies the logic below. */
5197 if (tcode == MINUS_EXPR)
5198 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5200 if (TREE_CODE (op1) != INTEGER_CST)
5201 break;
5203 /* If either OP1 or C are negative, this optimization is not safe for
5204 some of the division and remainder types while for others we need
5205 to change the code. */
5206 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5208 if (code == CEIL_DIV_EXPR)
5209 code = FLOOR_DIV_EXPR;
5210 else if (code == FLOOR_DIV_EXPR)
5211 code = CEIL_DIV_EXPR;
5212 else if (code != MULT_EXPR
5213 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5214 break;
5217 /* If it's a multiply or a division/modulus operation of a multiple
5218 of our constant, do the operation and verify it doesn't overflow. */
5219 if (code == MULT_EXPR
5220 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5222 op1 = const_binop (code, fold_convert (ctype, op1),
5223 fold_convert (ctype, c), 0);
5224 /* We allow the constant to overflow with wrapping semantics. */
5225 if (op1 == 0
5226 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5227 break;
5229 else
5230 break;
5232 /* If we have an unsigned type is not a sizetype, we cannot widen
5233 the operation since it will change the result if the original
5234 computation overflowed. */
5235 if (TYPE_UNSIGNED (ctype)
5236 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5237 && ctype != type)
5238 break;
5240 /* If we were able to eliminate our operation from the first side,
5241 apply our operation to the second side and reform the PLUS. */
5242 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5243 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5245 /* The last case is if we are a multiply. In that case, we can
5246 apply the distributive law to commute the multiply and addition
5247 if the multiplication of the constants doesn't overflow. */
5248 if (code == MULT_EXPR)
5249 return fold (build2 (tcode, ctype,
5250 fold (build2 (code, ctype,
5251 fold_convert (ctype, op0),
5252 fold_convert (ctype, c))),
5253 op1));
5255 break;
5257 case MULT_EXPR:
5258 /* We have a special case here if we are doing something like
5259 (C * 8) % 4 since we know that's zero. */
5260 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5261 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5262 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5263 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5264 return omit_one_operand (type, integer_zero_node, op0);
5266 /* ... fall through ... */
5268 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5269 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5270 /* If we can extract our operation from the LHS, do so and return a
5271 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5272 do something only if the second operand is a constant. */
5273 if (same_p
5274 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5275 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5276 fold_convert (ctype, op1)));
5277 else if (tcode == MULT_EXPR && code == MULT_EXPR
5278 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5279 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5280 fold_convert (ctype, t1)));
5281 else if (TREE_CODE (op1) != INTEGER_CST)
5282 return 0;
5284 /* If these are the same operation types, we can associate them
5285 assuming no overflow. */
5286 if (tcode == code
5287 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5288 fold_convert (ctype, c), 0))
5289 && ! TREE_OVERFLOW (t1))
5290 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5292 /* If these operations "cancel" each other, we have the main
5293 optimizations of this pass, which occur when either constant is a
5294 multiple of the other, in which case we replace this with either an
5295 operation or CODE or TCODE.
5297 If we have an unsigned type that is not a sizetype, we cannot do
5298 this since it will change the result if the original computation
5299 overflowed. */
5300 if ((! TYPE_UNSIGNED (ctype)
5301 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5302 && ! flag_wrapv
5303 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5304 || (tcode == MULT_EXPR
5305 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5306 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5308 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5309 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5310 fold_convert (ctype,
5311 const_binop (TRUNC_DIV_EXPR,
5312 op1, c, 0))));
5313 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5314 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5315 fold_convert (ctype,
5316 const_binop (TRUNC_DIV_EXPR,
5317 c, op1, 0))));
5319 break;
5321 default:
5322 break;
5325 return 0;
5328 /* Return a node which has the indicated constant VALUE (either 0 or
5329 1), and is of the indicated TYPE. */
5331 tree
5332 constant_boolean_node (int value, tree type)
5334 if (type == integer_type_node)
5335 return value ? integer_one_node : integer_zero_node;
5336 else if (type == boolean_type_node)
5337 return value ? boolean_true_node : boolean_false_node;
5338 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5339 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5340 : integer_zero_node);
5341 else
5342 return build_int_cst (type, value);
5345 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5346 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5347 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5348 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5349 COND is the first argument to CODE; otherwise (as in the example
5350 given here), it is the second argument. TYPE is the type of the
5351 original expression. Return NULL_TREE if no simplification is
5352 possible. */
5354 static tree
5355 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
5356 tree cond, tree arg, int cond_first_p)
5358 tree test, true_value, false_value;
5359 tree lhs = NULL_TREE;
5360 tree rhs = NULL_TREE;
5362 /* This transformation is only worthwhile if we don't have to wrap
5363 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5364 one of the branches once its pushed inside the COND_EXPR. */
5365 if (!TREE_CONSTANT (arg))
5366 return NULL_TREE;
5368 if (TREE_CODE (cond) == COND_EXPR)
5370 test = TREE_OPERAND (cond, 0);
5371 true_value = TREE_OPERAND (cond, 1);
5372 false_value = TREE_OPERAND (cond, 2);
5373 /* If this operand throws an expression, then it does not make
5374 sense to try to perform a logical or arithmetic operation
5375 involving it. */
5376 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5377 lhs = true_value;
5378 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5379 rhs = false_value;
5381 else
5383 tree testtype = TREE_TYPE (cond);
5384 test = cond;
5385 true_value = constant_boolean_node (true, testtype);
5386 false_value = constant_boolean_node (false, testtype);
5389 if (lhs == 0)
5390 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5391 : build2 (code, type, arg, true_value));
5392 if (rhs == 0)
5393 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5394 : build2 (code, type, arg, false_value));
5396 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5397 return fold_convert (type, test);
5401 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5403 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5404 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5405 ADDEND is the same as X.
5407 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5408 and finite. The problematic cases are when X is zero, and its mode
5409 has signed zeros. In the case of rounding towards -infinity,
5410 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5411 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5413 static bool
5414 fold_real_zero_addition_p (tree type, tree addend, int negate)
5416 if (!real_zerop (addend))
5417 return false;
5419 /* Don't allow the fold with -fsignaling-nans. */
5420 if (HONOR_SNANS (TYPE_MODE (type)))
5421 return false;
5423 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5424 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5425 return true;
5427 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5428 if (TREE_CODE (addend) == REAL_CST
5429 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5430 negate = !negate;
5432 /* The mode has signed zeros, and we have to honor their sign.
5433 In this situation, there is only one case we can return true for.
5434 X - 0 is the same as X unless rounding towards -infinity is
5435 supported. */
5436 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5439 /* Subroutine of fold() that checks comparisons of built-in math
5440 functions against real constants.
5442 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5443 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5444 is the type of the result and ARG0 and ARG1 are the operands of the
5445 comparison. ARG1 must be a TREE_REAL_CST.
5447 The function returns the constant folded tree if a simplification
5448 can be made, and NULL_TREE otherwise. */
5450 static tree
5451 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5452 tree type, tree arg0, tree arg1)
5454 REAL_VALUE_TYPE c;
5456 if (BUILTIN_SQRT_P (fcode))
5458 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5459 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5461 c = TREE_REAL_CST (arg1);
5462 if (REAL_VALUE_NEGATIVE (c))
5464 /* sqrt(x) < y is always false, if y is negative. */
5465 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5466 return omit_one_operand (type, integer_zero_node, arg);
5468 /* sqrt(x) > y is always true, if y is negative and we
5469 don't care about NaNs, i.e. negative values of x. */
5470 if (code == NE_EXPR || !HONOR_NANS (mode))
5471 return omit_one_operand (type, integer_one_node, arg);
5473 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5474 return fold (build2 (GE_EXPR, type, arg,
5475 build_real (TREE_TYPE (arg), dconst0)));
5477 else if (code == GT_EXPR || code == GE_EXPR)
5479 REAL_VALUE_TYPE c2;
5481 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5482 real_convert (&c2, mode, &c2);
5484 if (REAL_VALUE_ISINF (c2))
5486 /* sqrt(x) > y is x == +Inf, when y is very large. */
5487 if (HONOR_INFINITIES (mode))
5488 return fold (build2 (EQ_EXPR, type, arg,
5489 build_real (TREE_TYPE (arg), c2)));
5491 /* sqrt(x) > y is always false, when y is very large
5492 and we don't care about infinities. */
5493 return omit_one_operand (type, integer_zero_node, arg);
5496 /* sqrt(x) > c is the same as x > c*c. */
5497 return fold (build2 (code, type, arg,
5498 build_real (TREE_TYPE (arg), c2)));
5500 else if (code == LT_EXPR || code == LE_EXPR)
5502 REAL_VALUE_TYPE c2;
5504 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5505 real_convert (&c2, mode, &c2);
5507 if (REAL_VALUE_ISINF (c2))
5509 /* sqrt(x) < y is always true, when y is a very large
5510 value and we don't care about NaNs or Infinities. */
5511 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5512 return omit_one_operand (type, integer_one_node, arg);
5514 /* sqrt(x) < y is x != +Inf when y is very large and we
5515 don't care about NaNs. */
5516 if (! HONOR_NANS (mode))
5517 return fold (build2 (NE_EXPR, type, arg,
5518 build_real (TREE_TYPE (arg), c2)));
5520 /* sqrt(x) < y is x >= 0 when y is very large and we
5521 don't care about Infinities. */
5522 if (! HONOR_INFINITIES (mode))
5523 return fold (build2 (GE_EXPR, type, arg,
5524 build_real (TREE_TYPE (arg), dconst0)));
5526 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5527 if (lang_hooks.decls.global_bindings_p () != 0
5528 || CONTAINS_PLACEHOLDER_P (arg))
5529 return NULL_TREE;
5531 arg = save_expr (arg);
5532 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5533 fold (build2 (GE_EXPR, type, arg,
5534 build_real (TREE_TYPE (arg),
5535 dconst0))),
5536 fold (build2 (NE_EXPR, type, arg,
5537 build_real (TREE_TYPE (arg),
5538 c2)))));
5541 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5542 if (! HONOR_NANS (mode))
5543 return fold (build2 (code, type, arg,
5544 build_real (TREE_TYPE (arg), c2)));
5546 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5547 if (lang_hooks.decls.global_bindings_p () == 0
5548 && ! CONTAINS_PLACEHOLDER_P (arg))
5550 arg = save_expr (arg);
5551 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5552 fold (build2 (GE_EXPR, type, arg,
5553 build_real (TREE_TYPE (arg),
5554 dconst0))),
5555 fold (build2 (code, type, arg,
5556 build_real (TREE_TYPE (arg),
5557 c2)))));
5562 return NULL_TREE;
5565 /* Subroutine of fold() that optimizes comparisons against Infinities,
5566 either +Inf or -Inf.
5568 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5569 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5570 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5572 The function returns the constant folded tree if a simplification
5573 can be made, and NULL_TREE otherwise. */
5575 static tree
5576 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5578 enum machine_mode mode;
5579 REAL_VALUE_TYPE max;
5580 tree temp;
5581 bool neg;
5583 mode = TYPE_MODE (TREE_TYPE (arg0));
5585 /* For negative infinity swap the sense of the comparison. */
5586 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5587 if (neg)
5588 code = swap_tree_comparison (code);
5590 switch (code)
5592 case GT_EXPR:
5593 /* x > +Inf is always false, if with ignore sNANs. */
5594 if (HONOR_SNANS (mode))
5595 return NULL_TREE;
5596 return omit_one_operand (type, integer_zero_node, arg0);
5598 case LE_EXPR:
5599 /* x <= +Inf is always true, if we don't case about NaNs. */
5600 if (! HONOR_NANS (mode))
5601 return omit_one_operand (type, integer_one_node, arg0);
5603 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5604 if (lang_hooks.decls.global_bindings_p () == 0
5605 && ! CONTAINS_PLACEHOLDER_P (arg0))
5607 arg0 = save_expr (arg0);
5608 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5610 break;
5612 case EQ_EXPR:
5613 case GE_EXPR:
5614 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5615 real_maxval (&max, neg, mode);
5616 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5617 arg0, build_real (TREE_TYPE (arg0), max)));
5619 case LT_EXPR:
5620 /* x < +Inf is always equal to x <= DBL_MAX. */
5621 real_maxval (&max, neg, mode);
5622 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5623 arg0, build_real (TREE_TYPE (arg0), max)));
5625 case NE_EXPR:
5626 /* x != +Inf is always equal to !(x > DBL_MAX). */
5627 real_maxval (&max, neg, mode);
5628 if (! HONOR_NANS (mode))
5629 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5630 arg0, build_real (TREE_TYPE (arg0), max)));
5632 /* The transformation below creates non-gimple code and thus is
5633 not appropriate if we are in gimple form. */
5634 if (in_gimple_form)
5635 return NULL_TREE;
5637 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5638 arg0, build_real (TREE_TYPE (arg0), max)));
5639 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5641 default:
5642 break;
5645 return NULL_TREE;
5648 /* Subroutine of fold() that optimizes comparisons of a division by
5649 a nonzero integer constant against an integer constant, i.e.
5650 X/C1 op C2.
5652 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5653 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5654 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5656 The function returns the constant folded tree if a simplification
5657 can be made, and NULL_TREE otherwise. */
5659 static tree
5660 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5662 tree prod, tmp, hi, lo;
5663 tree arg00 = TREE_OPERAND (arg0, 0);
5664 tree arg01 = TREE_OPERAND (arg0, 1);
5665 unsigned HOST_WIDE_INT lpart;
5666 HOST_WIDE_INT hpart;
5667 int overflow;
5669 /* We have to do this the hard way to detect unsigned overflow.
5670 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5671 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5672 TREE_INT_CST_HIGH (arg01),
5673 TREE_INT_CST_LOW (arg1),
5674 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5675 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5676 prod = force_fit_type (prod, -1, overflow, false);
5678 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5680 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5681 lo = prod;
5683 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5684 overflow = add_double (TREE_INT_CST_LOW (prod),
5685 TREE_INT_CST_HIGH (prod),
5686 TREE_INT_CST_LOW (tmp),
5687 TREE_INT_CST_HIGH (tmp),
5688 &lpart, &hpart);
5689 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5690 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5691 TREE_CONSTANT_OVERFLOW (prod));
5693 else if (tree_int_cst_sgn (arg01) >= 0)
5695 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5696 switch (tree_int_cst_sgn (arg1))
5698 case -1:
5699 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5700 hi = prod;
5701 break;
5703 case 0:
5704 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5705 hi = tmp;
5706 break;
5708 case 1:
5709 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5710 lo = prod;
5711 break;
5713 default:
5714 gcc_unreachable ();
5717 else
5719 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5720 switch (tree_int_cst_sgn (arg1))
5722 case -1:
5723 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5724 lo = prod;
5725 break;
5727 case 0:
5728 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5729 lo = tmp;
5730 break;
5732 case 1:
5733 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5734 hi = prod;
5735 break;
5737 default:
5738 gcc_unreachable ();
5742 switch (code)
5744 case EQ_EXPR:
5745 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5746 return omit_one_operand (type, integer_zero_node, arg00);
5747 if (TREE_OVERFLOW (hi))
5748 return fold (build2 (GE_EXPR, type, arg00, lo));
5749 if (TREE_OVERFLOW (lo))
5750 return fold (build2 (LE_EXPR, type, arg00, hi));
5751 return build_range_check (type, arg00, 1, lo, hi);
5753 case NE_EXPR:
5754 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5755 return omit_one_operand (type, integer_one_node, arg00);
5756 if (TREE_OVERFLOW (hi))
5757 return fold (build2 (LT_EXPR, type, arg00, lo));
5758 if (TREE_OVERFLOW (lo))
5759 return fold (build2 (GT_EXPR, type, arg00, hi));
5760 return build_range_check (type, arg00, 0, lo, hi);
5762 case LT_EXPR:
5763 if (TREE_OVERFLOW (lo))
5764 return omit_one_operand (type, integer_zero_node, arg00);
5765 return fold (build2 (LT_EXPR, type, arg00, lo));
5767 case LE_EXPR:
5768 if (TREE_OVERFLOW (hi))
5769 return omit_one_operand (type, integer_one_node, arg00);
5770 return fold (build2 (LE_EXPR, type, arg00, hi));
5772 case GT_EXPR:
5773 if (TREE_OVERFLOW (hi))
5774 return omit_one_operand (type, integer_zero_node, arg00);
5775 return fold (build2 (GT_EXPR, type, arg00, hi));
5777 case GE_EXPR:
5778 if (TREE_OVERFLOW (lo))
5779 return omit_one_operand (type, integer_one_node, arg00);
5780 return fold (build2 (GE_EXPR, type, arg00, lo));
5782 default:
5783 break;
5786 return NULL_TREE;
5790 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5791 equality/inequality test, then return a simplified form of
5792 the test using shifts and logical operations. Otherwise return
5793 NULL. TYPE is the desired result type. */
5795 tree
5796 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5797 tree result_type)
5799 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5800 operand 0. */
5801 if (code == TRUTH_NOT_EXPR)
5803 code = TREE_CODE (arg0);
5804 if (code != NE_EXPR && code != EQ_EXPR)
5805 return NULL_TREE;
5807 /* Extract the arguments of the EQ/NE. */
5808 arg1 = TREE_OPERAND (arg0, 1);
5809 arg0 = TREE_OPERAND (arg0, 0);
5811 /* This requires us to invert the code. */
5812 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5815 /* If this is testing a single bit, we can optimize the test. */
5816 if ((code == NE_EXPR || code == EQ_EXPR)
5817 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5818 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5820 tree inner = TREE_OPERAND (arg0, 0);
5821 tree type = TREE_TYPE (arg0);
5822 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5823 enum machine_mode operand_mode = TYPE_MODE (type);
5824 int ops_unsigned;
5825 tree signed_type, unsigned_type, intermediate_type;
5826 tree arg00;
5828 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5829 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5830 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5831 if (arg00 != NULL_TREE
5832 /* This is only a win if casting to a signed type is cheap,
5833 i.e. when arg00's type is not a partial mode. */
5834 && TYPE_PRECISION (TREE_TYPE (arg00))
5835 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5837 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5838 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5839 result_type, fold_convert (stype, arg00),
5840 fold_convert (stype, integer_zero_node)));
5843 /* Otherwise we have (A & C) != 0 where C is a single bit,
5844 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5845 Similarly for (A & C) == 0. */
5847 /* If INNER is a right shift of a constant and it plus BITNUM does
5848 not overflow, adjust BITNUM and INNER. */
5849 if (TREE_CODE (inner) == RSHIFT_EXPR
5850 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5851 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5852 && bitnum < TYPE_PRECISION (type)
5853 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5854 bitnum - TYPE_PRECISION (type)))
5856 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5857 inner = TREE_OPERAND (inner, 0);
5860 /* If we are going to be able to omit the AND below, we must do our
5861 operations as unsigned. If we must use the AND, we have a choice.
5862 Normally unsigned is faster, but for some machines signed is. */
5863 #ifdef LOAD_EXTEND_OP
5864 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5865 #else
5866 ops_unsigned = 1;
5867 #endif
5869 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5870 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5871 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5872 inner = fold_convert (intermediate_type, inner);
5874 if (bitnum != 0)
5875 inner = build2 (RSHIFT_EXPR, intermediate_type,
5876 inner, size_int (bitnum));
5878 if (code == EQ_EXPR)
5879 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5880 inner, integer_one_node));
5882 /* Put the AND last so it can combine with more things. */
5883 inner = build2 (BIT_AND_EXPR, intermediate_type,
5884 inner, integer_one_node);
5886 /* Make sure to return the proper type. */
5887 inner = fold_convert (result_type, inner);
5889 return inner;
5891 return NULL_TREE;
5894 /* Check whether we are allowed to reorder operands arg0 and arg1,
5895 such that the evaluation of arg1 occurs before arg0. */
5897 static bool
5898 reorder_operands_p (tree arg0, tree arg1)
5900 if (! flag_evaluation_order)
5901 return true;
5902 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5903 return true;
5904 return ! TREE_SIDE_EFFECTS (arg0)
5905 && ! TREE_SIDE_EFFECTS (arg1);
5908 /* Test whether it is preferable two swap two operands, ARG0 and
5909 ARG1, for example because ARG0 is an integer constant and ARG1
5910 isn't. If REORDER is true, only recommend swapping if we can
5911 evaluate the operands in reverse order. */
5913 bool
5914 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5916 STRIP_SIGN_NOPS (arg0);
5917 STRIP_SIGN_NOPS (arg1);
5919 if (TREE_CODE (arg1) == INTEGER_CST)
5920 return 0;
5921 if (TREE_CODE (arg0) == INTEGER_CST)
5922 return 1;
5924 if (TREE_CODE (arg1) == REAL_CST)
5925 return 0;
5926 if (TREE_CODE (arg0) == REAL_CST)
5927 return 1;
5929 if (TREE_CODE (arg1) == COMPLEX_CST)
5930 return 0;
5931 if (TREE_CODE (arg0) == COMPLEX_CST)
5932 return 1;
5934 if (TREE_CONSTANT (arg1))
5935 return 0;
5936 if (TREE_CONSTANT (arg0))
5937 return 1;
5939 if (optimize_size)
5940 return 0;
5942 if (reorder && flag_evaluation_order
5943 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5944 return 0;
5946 if (DECL_P (arg1))
5947 return 0;
5948 if (DECL_P (arg0))
5949 return 1;
5951 if (reorder && flag_evaluation_order
5952 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5953 return 0;
5955 if (DECL_P (arg1))
5956 return 0;
5957 if (DECL_P (arg0))
5958 return 1;
5960 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5961 for commutative and comparison operators. Ensuring a canonical
5962 form allows the optimizers to find additional redundancies without
5963 having to explicitly check for both orderings. */
5964 if (TREE_CODE (arg0) == SSA_NAME
5965 && TREE_CODE (arg1) == SSA_NAME
5966 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5967 return 1;
5969 return 0;
5972 /* Perform constant folding and related simplification of EXPR.
5973 The related simplifications include x*1 => x, x*0 => 0, etc.,
5974 and application of the associative law.
5975 NOP_EXPR conversions may be removed freely (as long as we
5976 are careful not to change the type of the overall expression).
5977 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5978 but we can constant-fold them if they have constant operands. */
5980 #ifdef ENABLE_FOLD_CHECKING
5981 # define fold(x) fold_1 (x)
5982 static tree fold_1 (tree);
5983 static
5984 #endif
5985 tree
5986 fold (tree expr)
5988 const tree t = expr;
5989 const tree type = TREE_TYPE (expr);
5990 tree t1 = NULL_TREE;
5991 tree tem;
5992 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5993 enum tree_code code = TREE_CODE (t);
5994 enum tree_code_class kind = TREE_CODE_CLASS (code);
5996 /* WINS will be nonzero when the switch is done
5997 if all operands are constant. */
5998 int wins = 1;
6000 /* Return right away if a constant. */
6001 if (kind == tcc_constant)
6002 return t;
6004 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6006 tree subop;
6008 /* Special case for conversion ops that can have fixed point args. */
6009 arg0 = TREE_OPERAND (t, 0);
6011 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6012 if (arg0 != 0)
6013 STRIP_SIGN_NOPS (arg0);
6015 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
6016 subop = TREE_REALPART (arg0);
6017 else
6018 subop = arg0;
6020 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
6021 && TREE_CODE (subop) != REAL_CST)
6022 /* Note that TREE_CONSTANT isn't enough:
6023 static var addresses are constant but we can't
6024 do arithmetic on them. */
6025 wins = 0;
6027 else if (IS_EXPR_CODE_CLASS (kind))
6029 int len = first_rtl_op (code);
6030 int i;
6031 for (i = 0; i < len; i++)
6033 tree op = TREE_OPERAND (t, i);
6034 tree subop;
6036 if (op == 0)
6037 continue; /* Valid for CALL_EXPR, at least. */
6039 /* Strip any conversions that don't change the mode. This is
6040 safe for every expression, except for a comparison expression
6041 because its signedness is derived from its operands. So, in
6042 the latter case, only strip conversions that don't change the
6043 signedness.
6045 Note that this is done as an internal manipulation within the
6046 constant folder, in order to find the simplest representation
6047 of the arguments so that their form can be studied. In any
6048 cases, the appropriate type conversions should be put back in
6049 the tree that will get out of the constant folder. */
6050 if (kind == tcc_comparison)
6051 STRIP_SIGN_NOPS (op);
6052 else
6053 STRIP_NOPS (op);
6055 if (TREE_CODE (op) == COMPLEX_CST)
6056 subop = TREE_REALPART (op);
6057 else
6058 subop = op;
6060 if (TREE_CODE (subop) != INTEGER_CST
6061 && TREE_CODE (subop) != REAL_CST)
6062 /* Note that TREE_CONSTANT isn't enough:
6063 static var addresses are constant but we can't
6064 do arithmetic on them. */
6065 wins = 0;
6067 if (i == 0)
6068 arg0 = op;
6069 else if (i == 1)
6070 arg1 = op;
6074 /* If this is a commutative operation, and ARG0 is a constant, move it
6075 to ARG1 to reduce the number of tests below. */
6076 if (commutative_tree_code (code)
6077 && tree_swap_operands_p (arg0, arg1, true))
6078 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6079 TREE_OPERAND (t, 0)));
6081 /* Now WINS is set as described above,
6082 ARG0 is the first operand of EXPR,
6083 and ARG1 is the second operand (if it has more than one operand).
6085 First check for cases where an arithmetic operation is applied to a
6086 compound, conditional, or comparison operation. Push the arithmetic
6087 operation inside the compound or conditional to see if any folding
6088 can then be done. Convert comparison to conditional for this purpose.
6089 The also optimizes non-constant cases that used to be done in
6090 expand_expr.
6092 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6093 one of the operands is a comparison and the other is a comparison, a
6094 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6095 code below would make the expression more complex. Change it to a
6096 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6097 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6099 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6100 || code == EQ_EXPR || code == NE_EXPR)
6101 && ((truth_value_p (TREE_CODE (arg0))
6102 && (truth_value_p (TREE_CODE (arg1))
6103 || (TREE_CODE (arg1) == BIT_AND_EXPR
6104 && integer_onep (TREE_OPERAND (arg1, 1)))))
6105 || (truth_value_p (TREE_CODE (arg1))
6106 && (truth_value_p (TREE_CODE (arg0))
6107 || (TREE_CODE (arg0) == BIT_AND_EXPR
6108 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6110 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6111 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6112 : TRUTH_XOR_EXPR,
6113 type, fold_convert (boolean_type_node, arg0),
6114 fold_convert (boolean_type_node, arg1)));
6116 if (code == EQ_EXPR)
6117 tem = invert_truthvalue (tem);
6119 return tem;
6122 if (TREE_CODE_CLASS (code) == tcc_unary)
6124 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6125 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6126 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6127 else if (TREE_CODE (arg0) == COND_EXPR)
6129 tree arg01 = TREE_OPERAND (arg0, 1);
6130 tree arg02 = TREE_OPERAND (arg0, 2);
6131 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6132 arg01 = fold (build1 (code, type, arg01));
6133 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6134 arg02 = fold (build1 (code, type, arg02));
6135 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6136 arg01, arg02));
6138 /* If this was a conversion, and all we did was to move into
6139 inside the COND_EXPR, bring it back out. But leave it if
6140 it is a conversion from integer to integer and the
6141 result precision is no wider than a word since such a
6142 conversion is cheap and may be optimized away by combine,
6143 while it couldn't if it were outside the COND_EXPR. Then return
6144 so we don't get into an infinite recursion loop taking the
6145 conversion out and then back in. */
6147 if ((code == NOP_EXPR || code == CONVERT_EXPR
6148 || code == NON_LVALUE_EXPR)
6149 && TREE_CODE (tem) == COND_EXPR
6150 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6151 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6152 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6153 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6154 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6155 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6156 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6157 && (INTEGRAL_TYPE_P
6158 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6159 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
6160 tem = build1 (code, type,
6161 build3 (COND_EXPR,
6162 TREE_TYPE (TREE_OPERAND
6163 (TREE_OPERAND (tem, 1), 0)),
6164 TREE_OPERAND (tem, 0),
6165 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6166 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6167 return tem;
6169 else if (COMPARISON_CLASS_P (arg0))
6171 if (TREE_CODE (type) == BOOLEAN_TYPE)
6173 arg0 = copy_node (arg0);
6174 TREE_TYPE (arg0) = type;
6175 return arg0;
6177 else if (TREE_CODE (type) != INTEGER_TYPE)
6178 return fold (build3 (COND_EXPR, type, arg0,
6179 fold (build1 (code, type,
6180 integer_one_node)),
6181 fold (build1 (code, type,
6182 integer_zero_node))));
6185 else if (TREE_CODE_CLASS (code) == tcc_comparison
6186 && TREE_CODE (arg0) == COMPOUND_EXPR)
6187 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6188 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6189 else if (TREE_CODE_CLASS (code) == tcc_comparison
6190 && TREE_CODE (arg1) == COMPOUND_EXPR)
6191 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6192 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6193 else if (TREE_CODE_CLASS (code) == tcc_binary
6194 || TREE_CODE_CLASS (code) == tcc_comparison)
6196 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6197 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6198 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6199 arg1)));
6200 if (TREE_CODE (arg1) == COMPOUND_EXPR
6201 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6202 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6203 fold (build2 (code, type,
6204 arg0, TREE_OPERAND (arg1, 1))));
6206 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
6208 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
6209 /*cond_first_p=*/1);
6210 if (tem != NULL_TREE)
6211 return tem;
6214 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
6216 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
6217 /*cond_first_p=*/0);
6218 if (tem != NULL_TREE)
6219 return tem;
6223 switch (code)
6225 case CONST_DECL:
6226 return fold (DECL_INITIAL (t));
6228 case NOP_EXPR:
6229 case FLOAT_EXPR:
6230 case CONVERT_EXPR:
6231 case FIX_TRUNC_EXPR:
6232 case FIX_CEIL_EXPR:
6233 case FIX_FLOOR_EXPR:
6234 case FIX_ROUND_EXPR:
6235 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6236 return TREE_OPERAND (t, 0);
6238 /* Handle cases of two conversions in a row. */
6239 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6240 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6242 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6243 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6244 int inside_int = INTEGRAL_TYPE_P (inside_type);
6245 int inside_ptr = POINTER_TYPE_P (inside_type);
6246 int inside_float = FLOAT_TYPE_P (inside_type);
6247 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6248 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6249 int inter_int = INTEGRAL_TYPE_P (inter_type);
6250 int inter_ptr = POINTER_TYPE_P (inter_type);
6251 int inter_float = FLOAT_TYPE_P (inter_type);
6252 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6253 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6254 int final_int = INTEGRAL_TYPE_P (type);
6255 int final_ptr = POINTER_TYPE_P (type);
6256 int final_float = FLOAT_TYPE_P (type);
6257 unsigned int final_prec = TYPE_PRECISION (type);
6258 int final_unsignedp = TYPE_UNSIGNED (type);
6260 /* In addition to the cases of two conversions in a row
6261 handled below, if we are converting something to its own
6262 type via an object of identical or wider precision, neither
6263 conversion is needed. */
6264 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6265 && ((inter_int && final_int) || (inter_float && final_float))
6266 && inter_prec >= final_prec)
6267 return fold (build1 (code, type,
6268 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6270 /* Likewise, if the intermediate and final types are either both
6271 float or both integer, we don't need the middle conversion if
6272 it is wider than the final type and doesn't change the signedness
6273 (for integers). Avoid this if the final type is a pointer
6274 since then we sometimes need the inner conversion. Likewise if
6275 the outer has a precision not equal to the size of its mode. */
6276 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6277 || (inter_float && inside_float))
6278 && inter_prec >= inside_prec
6279 && (inter_float || inter_unsignedp == inside_unsignedp)
6280 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6281 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6282 && ! final_ptr)
6283 return fold (build1 (code, type,
6284 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6286 /* If we have a sign-extension of a zero-extended value, we can
6287 replace that by a single zero-extension. */
6288 if (inside_int && inter_int && final_int
6289 && inside_prec < inter_prec && inter_prec < final_prec
6290 && inside_unsignedp && !inter_unsignedp)
6291 return fold (build1 (code, type,
6292 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6294 /* Two conversions in a row are not needed unless:
6295 - some conversion is floating-point (overstrict for now), or
6296 - the intermediate type is narrower than both initial and
6297 final, or
6298 - the intermediate type and innermost type differ in signedness,
6299 and the outermost type is wider than the intermediate, or
6300 - the initial type is a pointer type and the precisions of the
6301 intermediate and final types differ, or
6302 - the final type is a pointer type and the precisions of the
6303 initial and intermediate types differ. */
6304 if (! inside_float && ! inter_float && ! final_float
6305 && (inter_prec > inside_prec || inter_prec > final_prec)
6306 && ! (inside_int && inter_int
6307 && inter_unsignedp != inside_unsignedp
6308 && inter_prec < final_prec)
6309 && ((inter_unsignedp && inter_prec > inside_prec)
6310 == (final_unsignedp && final_prec > inter_prec))
6311 && ! (inside_ptr && inter_prec != final_prec)
6312 && ! (final_ptr && inside_prec != inter_prec)
6313 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6314 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6315 && ! final_ptr)
6316 return fold (build1 (code, type,
6317 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6320 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6321 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6322 /* Detect assigning a bitfield. */
6323 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6324 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6326 /* Don't leave an assignment inside a conversion
6327 unless assigning a bitfield. */
6328 tree prev = TREE_OPERAND (t, 0);
6329 tem = copy_node (t);
6330 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6331 /* First do the assignment, then return converted constant. */
6332 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6333 TREE_NO_WARNING (tem) = 1;
6334 TREE_USED (tem) = 1;
6335 return tem;
6338 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6339 constants (if x has signed type, the sign bit cannot be set
6340 in c). This folds extension into the BIT_AND_EXPR. */
6341 if (INTEGRAL_TYPE_P (type)
6342 && TREE_CODE (type) != BOOLEAN_TYPE
6343 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6344 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6346 tree and = TREE_OPERAND (t, 0);
6347 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6348 int change = 0;
6350 if (TYPE_UNSIGNED (TREE_TYPE (and))
6351 || (TYPE_PRECISION (type)
6352 <= TYPE_PRECISION (TREE_TYPE (and))))
6353 change = 1;
6354 else if (TYPE_PRECISION (TREE_TYPE (and1))
6355 <= HOST_BITS_PER_WIDE_INT
6356 && host_integerp (and1, 1))
6358 unsigned HOST_WIDE_INT cst;
6360 cst = tree_low_cst (and1, 1);
6361 cst &= (HOST_WIDE_INT) -1
6362 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6363 change = (cst == 0);
6364 #ifdef LOAD_EXTEND_OP
6365 if (change
6366 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6367 == ZERO_EXTEND))
6369 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6370 and0 = fold_convert (uns, and0);
6371 and1 = fold_convert (uns, and1);
6373 #endif
6375 if (change)
6376 return fold (build2 (BIT_AND_EXPR, type,
6377 fold_convert (type, and0),
6378 fold_convert (type, and1)));
6381 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6382 T2 being pointers to types of the same size. */
6383 if (POINTER_TYPE_P (TREE_TYPE (t))
6384 && BINARY_CLASS_P (arg0)
6385 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6386 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6388 tree arg00 = TREE_OPERAND (arg0, 0);
6389 tree t0 = TREE_TYPE (t);
6390 tree t1 = TREE_TYPE (arg00);
6391 tree tt0 = TREE_TYPE (t0);
6392 tree tt1 = TREE_TYPE (t1);
6393 tree s0 = TYPE_SIZE (tt0);
6394 tree s1 = TYPE_SIZE (tt1);
6396 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6397 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6398 TREE_OPERAND (arg0, 1));
6401 tem = fold_convert_const (code, type, arg0);
6402 return tem ? tem : t;
6404 case VIEW_CONVERT_EXPR:
6405 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6406 return build1 (VIEW_CONVERT_EXPR, type,
6407 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6408 return t;
6410 case COMPONENT_REF:
6411 if (TREE_CODE (arg0) == CONSTRUCTOR
6412 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6414 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6415 if (m)
6416 return TREE_VALUE (m);
6418 return t;
6420 case RANGE_EXPR:
6421 if (TREE_CONSTANT (t) != wins)
6423 tem = copy_node (t);
6424 TREE_CONSTANT (tem) = wins;
6425 TREE_INVARIANT (tem) = wins;
6426 return tem;
6428 return t;
6430 case NEGATE_EXPR:
6431 if (negate_expr_p (arg0))
6432 return fold_convert (type, negate_expr (arg0));
6433 return t;
6435 case ABS_EXPR:
6436 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6437 return fold_abs_const (arg0, type);
6438 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6439 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6440 /* Convert fabs((double)float) into (double)fabsf(float). */
6441 else if (TREE_CODE (arg0) == NOP_EXPR
6442 && TREE_CODE (type) == REAL_TYPE)
6444 tree targ0 = strip_float_extensions (arg0);
6445 if (targ0 != arg0)
6446 return fold_convert (type, fold (build1 (ABS_EXPR,
6447 TREE_TYPE (targ0),
6448 targ0)));
6450 else if (tree_expr_nonnegative_p (arg0))
6451 return arg0;
6452 return t;
6454 case CONJ_EXPR:
6455 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6456 return fold_convert (type, arg0);
6457 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6458 return build2 (COMPLEX_EXPR, type,
6459 TREE_OPERAND (arg0, 0),
6460 negate_expr (TREE_OPERAND (arg0, 1)));
6461 else if (TREE_CODE (arg0) == COMPLEX_CST)
6462 return build_complex (type, TREE_REALPART (arg0),
6463 negate_expr (TREE_IMAGPART (arg0)));
6464 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6465 return fold (build2 (TREE_CODE (arg0), type,
6466 fold (build1 (CONJ_EXPR, type,
6467 TREE_OPERAND (arg0, 0))),
6468 fold (build1 (CONJ_EXPR, type,
6469 TREE_OPERAND (arg0, 1)))));
6470 else if (TREE_CODE (arg0) == CONJ_EXPR)
6471 return TREE_OPERAND (arg0, 0);
6472 return t;
6474 case BIT_NOT_EXPR:
6475 if (TREE_CODE (arg0) == INTEGER_CST)
6476 return fold_not_const (arg0, type);
6477 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6478 return TREE_OPERAND (arg0, 0);
6479 return t;
6481 case PLUS_EXPR:
6482 /* A + (-B) -> A - B */
6483 if (TREE_CODE (arg1) == NEGATE_EXPR)
6484 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6485 /* (-A) + B -> B - A */
6486 if (TREE_CODE (arg0) == NEGATE_EXPR
6487 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6488 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6489 if (! FLOAT_TYPE_P (type))
6491 if (integer_zerop (arg1))
6492 return non_lvalue (fold_convert (type, arg0));
6494 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6495 with a constant, and the two constants have no bits in common,
6496 we should treat this as a BIT_IOR_EXPR since this may produce more
6497 simplifications. */
6498 if (TREE_CODE (arg0) == BIT_AND_EXPR
6499 && TREE_CODE (arg1) == BIT_AND_EXPR
6500 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6501 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6502 && integer_zerop (const_binop (BIT_AND_EXPR,
6503 TREE_OPERAND (arg0, 1),
6504 TREE_OPERAND (arg1, 1), 0)))
6506 code = BIT_IOR_EXPR;
6507 goto bit_ior;
6510 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6511 (plus (plus (mult) (mult)) (foo)) so that we can
6512 take advantage of the factoring cases below. */
6513 if ((TREE_CODE (arg0) == PLUS_EXPR
6514 && TREE_CODE (arg1) == MULT_EXPR)
6515 || (TREE_CODE (arg1) == PLUS_EXPR
6516 && TREE_CODE (arg0) == MULT_EXPR))
6518 tree parg0, parg1, parg, marg;
6520 if (TREE_CODE (arg0) == PLUS_EXPR)
6521 parg = arg0, marg = arg1;
6522 else
6523 parg = arg1, marg = arg0;
6524 parg0 = TREE_OPERAND (parg, 0);
6525 parg1 = TREE_OPERAND (parg, 1);
6526 STRIP_NOPS (parg0);
6527 STRIP_NOPS (parg1);
6529 if (TREE_CODE (parg0) == MULT_EXPR
6530 && TREE_CODE (parg1) != MULT_EXPR)
6531 return fold (build2 (PLUS_EXPR, type,
6532 fold (build2 (PLUS_EXPR, type,
6533 fold_convert (type, parg0),
6534 fold_convert (type, marg))),
6535 fold_convert (type, parg1)));
6536 if (TREE_CODE (parg0) != MULT_EXPR
6537 && TREE_CODE (parg1) == MULT_EXPR)
6538 return fold (build2 (PLUS_EXPR, type,
6539 fold (build2 (PLUS_EXPR, type,
6540 fold_convert (type, parg1),
6541 fold_convert (type, marg))),
6542 fold_convert (type, parg0)));
6545 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6547 tree arg00, arg01, arg10, arg11;
6548 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6550 /* (A * C) + (B * C) -> (A+B) * C.
6551 We are most concerned about the case where C is a constant,
6552 but other combinations show up during loop reduction. Since
6553 it is not difficult, try all four possibilities. */
6555 arg00 = TREE_OPERAND (arg0, 0);
6556 arg01 = TREE_OPERAND (arg0, 1);
6557 arg10 = TREE_OPERAND (arg1, 0);
6558 arg11 = TREE_OPERAND (arg1, 1);
6559 same = NULL_TREE;
6561 if (operand_equal_p (arg01, arg11, 0))
6562 same = arg01, alt0 = arg00, alt1 = arg10;
6563 else if (operand_equal_p (arg00, arg10, 0))
6564 same = arg00, alt0 = arg01, alt1 = arg11;
6565 else if (operand_equal_p (arg00, arg11, 0))
6566 same = arg00, alt0 = arg01, alt1 = arg10;
6567 else if (operand_equal_p (arg01, arg10, 0))
6568 same = arg01, alt0 = arg00, alt1 = arg11;
6570 /* No identical multiplicands; see if we can find a common
6571 power-of-two factor in non-power-of-two multiplies. This
6572 can help in multi-dimensional array access. */
6573 else if (TREE_CODE (arg01) == INTEGER_CST
6574 && TREE_CODE (arg11) == INTEGER_CST
6575 && TREE_INT_CST_HIGH (arg01) == 0
6576 && TREE_INT_CST_HIGH (arg11) == 0)
6578 HOST_WIDE_INT int01, int11, tmp;
6579 int01 = TREE_INT_CST_LOW (arg01);
6580 int11 = TREE_INT_CST_LOW (arg11);
6582 /* Move min of absolute values to int11. */
6583 if ((int01 >= 0 ? int01 : -int01)
6584 < (int11 >= 0 ? int11 : -int11))
6586 tmp = int01, int01 = int11, int11 = tmp;
6587 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6588 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6591 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6593 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6594 build_int_cst (NULL_TREE,
6595 int01 / int11)));
6596 alt1 = arg10;
6597 same = arg11;
6601 if (same)
6602 return fold (build2 (MULT_EXPR, type,
6603 fold (build2 (PLUS_EXPR, type,
6604 alt0, alt1)),
6605 same));
6608 else
6610 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6611 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6612 return non_lvalue (fold_convert (type, arg0));
6614 /* Likewise if the operands are reversed. */
6615 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6616 return non_lvalue (fold_convert (type, arg1));
6618 /* Convert X + -C into X - C. */
6619 if (TREE_CODE (arg1) == REAL_CST
6620 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
6622 tem = fold_negate_const (arg1, type);
6623 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
6624 return fold (build2 (MINUS_EXPR, type,
6625 fold_convert (type, arg0),
6626 fold_convert (type, tem)));
6629 /* Convert x+x into x*2.0. */
6630 if (operand_equal_p (arg0, arg1, 0)
6631 && SCALAR_FLOAT_TYPE_P (type))
6632 return fold (build2 (MULT_EXPR, type, arg0,
6633 build_real (type, dconst2)));
6635 /* Convert x*c+x into x*(c+1). */
6636 if (flag_unsafe_math_optimizations
6637 && TREE_CODE (arg0) == MULT_EXPR
6638 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6639 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6640 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6642 REAL_VALUE_TYPE c;
6644 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6645 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6646 return fold (build2 (MULT_EXPR, type, arg1,
6647 build_real (type, c)));
6650 /* Convert x+x*c into x*(c+1). */
6651 if (flag_unsafe_math_optimizations
6652 && TREE_CODE (arg1) == MULT_EXPR
6653 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6654 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6655 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6657 REAL_VALUE_TYPE c;
6659 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6660 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6661 return fold (build2 (MULT_EXPR, type, arg0,
6662 build_real (type, c)));
6665 /* Convert x*c1+x*c2 into x*(c1+c2). */
6666 if (flag_unsafe_math_optimizations
6667 && TREE_CODE (arg0) == MULT_EXPR
6668 && TREE_CODE (arg1) == MULT_EXPR
6669 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6670 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6671 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6672 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6673 && operand_equal_p (TREE_OPERAND (arg0, 0),
6674 TREE_OPERAND (arg1, 0), 0))
6676 REAL_VALUE_TYPE c1, c2;
6678 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6679 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6680 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6681 return fold (build2 (MULT_EXPR, type,
6682 TREE_OPERAND (arg0, 0),
6683 build_real (type, c1)));
6685 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6686 if (flag_unsafe_math_optimizations
6687 && TREE_CODE (arg1) == PLUS_EXPR
6688 && TREE_CODE (arg0) != MULT_EXPR)
6690 tree tree10 = TREE_OPERAND (arg1, 0);
6691 tree tree11 = TREE_OPERAND (arg1, 1);
6692 if (TREE_CODE (tree11) == MULT_EXPR
6693 && TREE_CODE (tree10) == MULT_EXPR)
6695 tree tree0;
6696 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6697 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6700 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6701 if (flag_unsafe_math_optimizations
6702 && TREE_CODE (arg0) == PLUS_EXPR
6703 && TREE_CODE (arg1) != MULT_EXPR)
6705 tree tree00 = TREE_OPERAND (arg0, 0);
6706 tree tree01 = TREE_OPERAND (arg0, 1);
6707 if (TREE_CODE (tree01) == MULT_EXPR
6708 && TREE_CODE (tree00) == MULT_EXPR)
6710 tree tree0;
6711 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6712 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6717 bit_rotate:
6718 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6719 is a rotate of A by C1 bits. */
6720 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6721 is a rotate of A by B bits. */
6723 enum tree_code code0, code1;
6724 code0 = TREE_CODE (arg0);
6725 code1 = TREE_CODE (arg1);
6726 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6727 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6728 && operand_equal_p (TREE_OPERAND (arg0, 0),
6729 TREE_OPERAND (arg1, 0), 0)
6730 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6732 tree tree01, tree11;
6733 enum tree_code code01, code11;
6735 tree01 = TREE_OPERAND (arg0, 1);
6736 tree11 = TREE_OPERAND (arg1, 1);
6737 STRIP_NOPS (tree01);
6738 STRIP_NOPS (tree11);
6739 code01 = TREE_CODE (tree01);
6740 code11 = TREE_CODE (tree11);
6741 if (code01 == INTEGER_CST
6742 && code11 == INTEGER_CST
6743 && TREE_INT_CST_HIGH (tree01) == 0
6744 && TREE_INT_CST_HIGH (tree11) == 0
6745 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6746 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6747 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6748 code0 == LSHIFT_EXPR ? tree01 : tree11);
6749 else if (code11 == MINUS_EXPR)
6751 tree tree110, tree111;
6752 tree110 = TREE_OPERAND (tree11, 0);
6753 tree111 = TREE_OPERAND (tree11, 1);
6754 STRIP_NOPS (tree110);
6755 STRIP_NOPS (tree111);
6756 if (TREE_CODE (tree110) == INTEGER_CST
6757 && 0 == compare_tree_int (tree110,
6758 TYPE_PRECISION
6759 (TREE_TYPE (TREE_OPERAND
6760 (arg0, 0))))
6761 && operand_equal_p (tree01, tree111, 0))
6762 return build2 ((code0 == LSHIFT_EXPR
6763 ? LROTATE_EXPR
6764 : RROTATE_EXPR),
6765 type, TREE_OPERAND (arg0, 0), tree01);
6767 else if (code01 == MINUS_EXPR)
6769 tree tree010, tree011;
6770 tree010 = TREE_OPERAND (tree01, 0);
6771 tree011 = TREE_OPERAND (tree01, 1);
6772 STRIP_NOPS (tree010);
6773 STRIP_NOPS (tree011);
6774 if (TREE_CODE (tree010) == INTEGER_CST
6775 && 0 == compare_tree_int (tree010,
6776 TYPE_PRECISION
6777 (TREE_TYPE (TREE_OPERAND
6778 (arg0, 0))))
6779 && operand_equal_p (tree11, tree011, 0))
6780 return build2 ((code0 != LSHIFT_EXPR
6781 ? LROTATE_EXPR
6782 : RROTATE_EXPR),
6783 type, TREE_OPERAND (arg0, 0), tree11);
6788 associate:
6789 /* In most languages, can't associate operations on floats through
6790 parentheses. Rather than remember where the parentheses were, we
6791 don't associate floats at all, unless the user has specified
6792 -funsafe-math-optimizations. */
6794 if (! wins
6795 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6797 tree var0, con0, lit0, minus_lit0;
6798 tree var1, con1, lit1, minus_lit1;
6800 /* Split both trees into variables, constants, and literals. Then
6801 associate each group together, the constants with literals,
6802 then the result with variables. This increases the chances of
6803 literals being recombined later and of generating relocatable
6804 expressions for the sum of a constant and literal. */
6805 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6806 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6807 code == MINUS_EXPR);
6809 /* Only do something if we found more than two objects. Otherwise,
6810 nothing has changed and we risk infinite recursion. */
6811 if (2 < ((var0 != 0) + (var1 != 0)
6812 + (con0 != 0) + (con1 != 0)
6813 + (lit0 != 0) + (lit1 != 0)
6814 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6816 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6817 if (code == MINUS_EXPR)
6818 code = PLUS_EXPR;
6820 var0 = associate_trees (var0, var1, code, type);
6821 con0 = associate_trees (con0, con1, code, type);
6822 lit0 = associate_trees (lit0, lit1, code, type);
6823 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6825 /* Preserve the MINUS_EXPR if the negative part of the literal is
6826 greater than the positive part. Otherwise, the multiplicative
6827 folding code (i.e extract_muldiv) may be fooled in case
6828 unsigned constants are subtracted, like in the following
6829 example: ((X*2 + 4) - 8U)/2. */
6830 if (minus_lit0 && lit0)
6832 if (TREE_CODE (lit0) == INTEGER_CST
6833 && TREE_CODE (minus_lit0) == INTEGER_CST
6834 && tree_int_cst_lt (lit0, minus_lit0))
6836 minus_lit0 = associate_trees (minus_lit0, lit0,
6837 MINUS_EXPR, type);
6838 lit0 = 0;
6840 else
6842 lit0 = associate_trees (lit0, minus_lit0,
6843 MINUS_EXPR, type);
6844 minus_lit0 = 0;
6847 if (minus_lit0)
6849 if (con0 == 0)
6850 return fold_convert (type,
6851 associate_trees (var0, minus_lit0,
6852 MINUS_EXPR, type));
6853 else
6855 con0 = associate_trees (con0, minus_lit0,
6856 MINUS_EXPR, type);
6857 return fold_convert (type,
6858 associate_trees (var0, con0,
6859 PLUS_EXPR, type));
6863 con0 = associate_trees (con0, lit0, code, type);
6864 return fold_convert (type, associate_trees (var0, con0,
6865 code, type));
6869 binary:
6870 if (wins)
6871 t1 = const_binop (code, arg0, arg1, 0);
6872 if (t1 != NULL_TREE)
6874 /* The return value should always have
6875 the same type as the original expression. */
6876 if (TREE_TYPE (t1) != type)
6877 t1 = fold_convert (type, t1);
6879 return t1;
6881 return t;
6883 case MINUS_EXPR:
6884 /* A - (-B) -> A + B */
6885 if (TREE_CODE (arg1) == NEGATE_EXPR)
6886 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6887 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6888 if (TREE_CODE (arg0) == NEGATE_EXPR
6889 && (FLOAT_TYPE_P (type)
6890 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6891 && negate_expr_p (arg1)
6892 && reorder_operands_p (arg0, arg1))
6893 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
6894 TREE_OPERAND (arg0, 0)));
6896 if (! FLOAT_TYPE_P (type))
6898 if (! wins && integer_zerop (arg0))
6899 return negate_expr (fold_convert (type, arg1));
6900 if (integer_zerop (arg1))
6901 return non_lvalue (fold_convert (type, arg0));
6903 /* Fold A - (A & B) into ~B & A. */
6904 if (!TREE_SIDE_EFFECTS (arg0)
6905 && TREE_CODE (arg1) == BIT_AND_EXPR)
6907 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6908 return fold (build2 (BIT_AND_EXPR, type,
6909 fold (build1 (BIT_NOT_EXPR, type,
6910 TREE_OPERAND (arg1, 0))),
6911 arg0));
6912 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6913 return fold (build2 (BIT_AND_EXPR, type,
6914 fold (build1 (BIT_NOT_EXPR, type,
6915 TREE_OPERAND (arg1, 1))),
6916 arg0));
6919 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6920 any power of 2 minus 1. */
6921 if (TREE_CODE (arg0) == BIT_AND_EXPR
6922 && TREE_CODE (arg1) == BIT_AND_EXPR
6923 && operand_equal_p (TREE_OPERAND (arg0, 0),
6924 TREE_OPERAND (arg1, 0), 0))
6926 tree mask0 = TREE_OPERAND (arg0, 1);
6927 tree mask1 = TREE_OPERAND (arg1, 1);
6928 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6930 if (operand_equal_p (tem, mask1, 0))
6932 tem = fold (build2 (BIT_XOR_EXPR, type,
6933 TREE_OPERAND (arg0, 0), mask1));
6934 return fold (build2 (MINUS_EXPR, type, tem, mask1));
6939 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6940 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6941 return non_lvalue (fold_convert (type, arg0));
6943 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6944 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6945 (-ARG1 + ARG0) reduces to -ARG1. */
6946 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6947 return negate_expr (fold_convert (type, arg1));
6949 /* Fold &x - &x. This can happen from &x.foo - &x.
6950 This is unsafe for certain floats even in non-IEEE formats.
6951 In IEEE, it is unsafe because it does wrong for NaNs.
6952 Also note that operand_equal_p is always false if an operand
6953 is volatile. */
6955 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6956 && operand_equal_p (arg0, arg1, 0))
6957 return fold_convert (type, integer_zero_node);
6959 /* A - B -> A + (-B) if B is easily negatable. */
6960 if (!wins && negate_expr_p (arg1)
6961 && ((FLOAT_TYPE_P (type)
6962 /* Avoid this transformation if B is a positive REAL_CST. */
6963 && (TREE_CODE (arg1) != REAL_CST
6964 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
6965 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6966 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6968 /* Try folding difference of addresses. */
6970 HOST_WIDE_INT diff;
6972 if (TREE_CODE (arg0) == ADDR_EXPR
6973 && TREE_CODE (arg1) == ADDR_EXPR
6974 && ptr_difference_const (TREE_OPERAND (arg0, 0),
6975 TREE_OPERAND (arg1, 0),
6976 &diff))
6977 return build_int_cst_type (type, diff);
6980 if (TREE_CODE (arg0) == MULT_EXPR
6981 && TREE_CODE (arg1) == MULT_EXPR
6982 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6984 /* (A * C) - (B * C) -> (A-B) * C. */
6985 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6986 TREE_OPERAND (arg1, 1), 0))
6987 return fold (build2 (MULT_EXPR, type,
6988 fold (build2 (MINUS_EXPR, type,
6989 TREE_OPERAND (arg0, 0),
6990 TREE_OPERAND (arg1, 0))),
6991 TREE_OPERAND (arg0, 1)));
6992 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6993 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6994 TREE_OPERAND (arg1, 0), 0))
6995 return fold (build2 (MULT_EXPR, type,
6996 TREE_OPERAND (arg0, 0),
6997 fold (build2 (MINUS_EXPR, type,
6998 TREE_OPERAND (arg0, 1),
6999 TREE_OPERAND (arg1, 1)))));
7002 goto associate;
7004 case MULT_EXPR:
7005 /* (-A) * (-B) -> A * B */
7006 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7007 return fold (build2 (MULT_EXPR, type,
7008 TREE_OPERAND (arg0, 0),
7009 negate_expr (arg1)));
7010 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7011 return fold (build2 (MULT_EXPR, type,
7012 negate_expr (arg0),
7013 TREE_OPERAND (arg1, 0)));
7015 if (! FLOAT_TYPE_P (type))
7017 if (integer_zerop (arg1))
7018 return omit_one_operand (type, arg1, arg0);
7019 if (integer_onep (arg1))
7020 return non_lvalue (fold_convert (type, arg0));
7022 /* (a * (1 << b)) is (a << b) */
7023 if (TREE_CODE (arg1) == LSHIFT_EXPR
7024 && integer_onep (TREE_OPERAND (arg1, 0)))
7025 return fold (build2 (LSHIFT_EXPR, type, arg0,
7026 TREE_OPERAND (arg1, 1)));
7027 if (TREE_CODE (arg0) == LSHIFT_EXPR
7028 && integer_onep (TREE_OPERAND (arg0, 0)))
7029 return fold (build2 (LSHIFT_EXPR, type, arg1,
7030 TREE_OPERAND (arg0, 1)));
7032 if (TREE_CODE (arg1) == INTEGER_CST
7033 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
7034 fold_convert (type, arg1),
7035 code, NULL_TREE)))
7036 return fold_convert (type, tem);
7039 else
7041 /* Maybe fold x * 0 to 0. The expressions aren't the same
7042 when x is NaN, since x * 0 is also NaN. Nor are they the
7043 same in modes with signed zeros, since multiplying a
7044 negative value by 0 gives -0, not +0. */
7045 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7046 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7047 && real_zerop (arg1))
7048 return omit_one_operand (type, arg1, arg0);
7049 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7050 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7051 && real_onep (arg1))
7052 return non_lvalue (fold_convert (type, arg0));
7054 /* Transform x * -1.0 into -x. */
7055 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7056 && real_minus_onep (arg1))
7057 return fold_convert (type, negate_expr (arg0));
7059 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7060 if (flag_unsafe_math_optimizations
7061 && TREE_CODE (arg0) == RDIV_EXPR
7062 && TREE_CODE (arg1) == REAL_CST
7063 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7065 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7066 arg1, 0);
7067 if (tem)
7068 return fold (build2 (RDIV_EXPR, type, tem,
7069 TREE_OPERAND (arg0, 1)));
7072 if (flag_unsafe_math_optimizations)
7074 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7075 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7077 /* Optimizations of root(...)*root(...). */
7078 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7080 tree rootfn, arg, arglist;
7081 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7082 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7084 /* Optimize sqrt(x)*sqrt(x) as x. */
7085 if (BUILTIN_SQRT_P (fcode0)
7086 && operand_equal_p (arg00, arg10, 0)
7087 && ! HONOR_SNANS (TYPE_MODE (type)))
7088 return arg00;
7090 /* Optimize root(x)*root(y) as root(x*y). */
7091 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7092 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7093 arglist = build_tree_list (NULL_TREE, arg);
7094 return build_function_call_expr (rootfn, arglist);
7097 /* Optimize expN(x)*expN(y) as expN(x+y). */
7098 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7100 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7101 tree arg = build2 (PLUS_EXPR, type,
7102 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7103 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7104 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7105 return build_function_call_expr (expfn, arglist);
7108 /* Optimizations of pow(...)*pow(...). */
7109 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7110 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7111 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7113 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7114 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7115 1)));
7116 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7117 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7118 1)));
7120 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7121 if (operand_equal_p (arg01, arg11, 0))
7123 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7124 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7125 tree arglist = tree_cons (NULL_TREE, fold (arg),
7126 build_tree_list (NULL_TREE,
7127 arg01));
7128 return build_function_call_expr (powfn, arglist);
7131 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7132 if (operand_equal_p (arg00, arg10, 0))
7134 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7135 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7136 tree arglist = tree_cons (NULL_TREE, arg00,
7137 build_tree_list (NULL_TREE,
7138 arg));
7139 return build_function_call_expr (powfn, arglist);
7143 /* Optimize tan(x)*cos(x) as sin(x). */
7144 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7145 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7146 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7147 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7148 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7149 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7150 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7151 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7153 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7155 if (sinfn != NULL_TREE)
7156 return build_function_call_expr (sinfn,
7157 TREE_OPERAND (arg0, 1));
7160 /* Optimize x*pow(x,c) as pow(x,c+1). */
7161 if (fcode1 == BUILT_IN_POW
7162 || fcode1 == BUILT_IN_POWF
7163 || fcode1 == BUILT_IN_POWL)
7165 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7166 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7167 1)));
7168 if (TREE_CODE (arg11) == REAL_CST
7169 && ! TREE_CONSTANT_OVERFLOW (arg11)
7170 && operand_equal_p (arg0, arg10, 0))
7172 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7173 REAL_VALUE_TYPE c;
7174 tree arg, arglist;
7176 c = TREE_REAL_CST (arg11);
7177 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7178 arg = build_real (type, c);
7179 arglist = build_tree_list (NULL_TREE, arg);
7180 arglist = tree_cons (NULL_TREE, arg0, arglist);
7181 return build_function_call_expr (powfn, arglist);
7185 /* Optimize pow(x,c)*x as pow(x,c+1). */
7186 if (fcode0 == BUILT_IN_POW
7187 || fcode0 == BUILT_IN_POWF
7188 || fcode0 == BUILT_IN_POWL)
7190 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7191 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7192 1)));
7193 if (TREE_CODE (arg01) == REAL_CST
7194 && ! TREE_CONSTANT_OVERFLOW (arg01)
7195 && operand_equal_p (arg1, arg00, 0))
7197 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7198 REAL_VALUE_TYPE c;
7199 tree arg, arglist;
7201 c = TREE_REAL_CST (arg01);
7202 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7203 arg = build_real (type, c);
7204 arglist = build_tree_list (NULL_TREE, arg);
7205 arglist = tree_cons (NULL_TREE, arg1, arglist);
7206 return build_function_call_expr (powfn, arglist);
7210 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7211 if (! optimize_size
7212 && operand_equal_p (arg0, arg1, 0))
7214 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7216 if (powfn)
7218 tree arg = build_real (type, dconst2);
7219 tree arglist = build_tree_list (NULL_TREE, arg);
7220 arglist = tree_cons (NULL_TREE, arg0, arglist);
7221 return build_function_call_expr (powfn, arglist);
7226 goto associate;
7228 case BIT_IOR_EXPR:
7229 bit_ior:
7230 if (integer_all_onesp (arg1))
7231 return omit_one_operand (type, arg1, arg0);
7232 if (integer_zerop (arg1))
7233 return non_lvalue (fold_convert (type, arg0));
7234 if (operand_equal_p (arg0, arg1, 0))
7235 return non_lvalue (fold_convert (type, arg0));
7237 /* ~X | X is -1. */
7238 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7239 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7241 t1 = build_int_cst (type, -1);
7242 t1 = force_fit_type (t1, 0, false, false);
7243 return omit_one_operand (type, t1, arg1);
7246 /* X | ~X is -1. */
7247 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7248 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7250 t1 = build_int_cst (type, -1);
7251 t1 = force_fit_type (t1, 0, false, false);
7252 return omit_one_operand (type, t1, arg0);
7255 t1 = distribute_bit_expr (code, type, arg0, arg1);
7256 if (t1 != NULL_TREE)
7257 return t1;
7259 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7261 This results in more efficient code for machines without a NAND
7262 instruction. Combine will canonicalize to the first form
7263 which will allow use of NAND instructions provided by the
7264 backend if they exist. */
7265 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7266 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7268 return fold (build1 (BIT_NOT_EXPR, type,
7269 build2 (BIT_AND_EXPR, type,
7270 TREE_OPERAND (arg0, 0),
7271 TREE_OPERAND (arg1, 0))));
7274 /* See if this can be simplified into a rotate first. If that
7275 is unsuccessful continue in the association code. */
7276 goto bit_rotate;
7278 case BIT_XOR_EXPR:
7279 if (integer_zerop (arg1))
7280 return non_lvalue (fold_convert (type, arg0));
7281 if (integer_all_onesp (arg1))
7282 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7283 if (operand_equal_p (arg0, arg1, 0))
7284 return omit_one_operand (type, integer_zero_node, arg0);
7286 /* ~X ^ X is -1. */
7287 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7288 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7290 t1 = build_int_cst (type, -1);
7291 t1 = force_fit_type (t1, 0, false, false);
7292 return omit_one_operand (type, t1, arg1);
7295 /* X ^ ~X is -1. */
7296 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7297 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7299 t1 = build_int_cst (type, -1);
7300 t1 = force_fit_type (t1, 0, false, false);
7301 return omit_one_operand (type, t1, arg0);
7304 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7305 with a constant, and the two constants have no bits in common,
7306 we should treat this as a BIT_IOR_EXPR since this may produce more
7307 simplifications. */
7308 if (TREE_CODE (arg0) == BIT_AND_EXPR
7309 && TREE_CODE (arg1) == BIT_AND_EXPR
7310 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7311 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7312 && integer_zerop (const_binop (BIT_AND_EXPR,
7313 TREE_OPERAND (arg0, 1),
7314 TREE_OPERAND (arg1, 1), 0)))
7316 code = BIT_IOR_EXPR;
7317 goto bit_ior;
7320 /* See if this can be simplified into a rotate first. If that
7321 is unsuccessful continue in the association code. */
7322 goto bit_rotate;
7324 case BIT_AND_EXPR:
7325 if (integer_all_onesp (arg1))
7326 return non_lvalue (fold_convert (type, arg0));
7327 if (integer_zerop (arg1))
7328 return omit_one_operand (type, arg1, arg0);
7329 if (operand_equal_p (arg0, arg1, 0))
7330 return non_lvalue (fold_convert (type, arg0));
7332 /* ~X & X is always zero. */
7333 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7334 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7335 return omit_one_operand (type, integer_zero_node, arg1);
7337 /* X & ~X is always zero. */
7338 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7339 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7340 return omit_one_operand (type, integer_zero_node, arg0);
7342 t1 = distribute_bit_expr (code, type, arg0, arg1);
7343 if (t1 != NULL_TREE)
7344 return t1;
7345 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7346 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7347 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7349 unsigned int prec
7350 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7352 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7353 && (~TREE_INT_CST_LOW (arg1)
7354 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7355 return fold_convert (type, TREE_OPERAND (arg0, 0));
7358 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7360 This results in more efficient code for machines without a NOR
7361 instruction. Combine will canonicalize to the first form
7362 which will allow use of NOR instructions provided by the
7363 backend if they exist. */
7364 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7365 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7367 return fold (build1 (BIT_NOT_EXPR, type,
7368 build2 (BIT_IOR_EXPR, type,
7369 TREE_OPERAND (arg0, 0),
7370 TREE_OPERAND (arg1, 0))));
7373 goto associate;
7375 case RDIV_EXPR:
7376 /* Don't touch a floating-point divide by zero unless the mode
7377 of the constant can represent infinity. */
7378 if (TREE_CODE (arg1) == REAL_CST
7379 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7380 && real_zerop (arg1))
7381 return t;
7383 /* (-A) / (-B) -> A / B */
7384 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7385 return fold (build2 (RDIV_EXPR, type,
7386 TREE_OPERAND (arg0, 0),
7387 negate_expr (arg1)));
7388 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7389 return fold (build2 (RDIV_EXPR, type,
7390 negate_expr (arg0),
7391 TREE_OPERAND (arg1, 0)));
7393 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7394 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7395 && real_onep (arg1))
7396 return non_lvalue (fold_convert (type, arg0));
7398 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7399 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7400 && real_minus_onep (arg1))
7401 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7403 /* If ARG1 is a constant, we can convert this to a multiply by the
7404 reciprocal. This does not have the same rounding properties,
7405 so only do this if -funsafe-math-optimizations. We can actually
7406 always safely do it if ARG1 is a power of two, but it's hard to
7407 tell if it is or not in a portable manner. */
7408 if (TREE_CODE (arg1) == REAL_CST)
7410 if (flag_unsafe_math_optimizations
7411 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7412 arg1, 0)))
7413 return fold (build2 (MULT_EXPR, type, arg0, tem));
7414 /* Find the reciprocal if optimizing and the result is exact. */
7415 if (optimize)
7417 REAL_VALUE_TYPE r;
7418 r = TREE_REAL_CST (arg1);
7419 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7421 tem = build_real (type, r);
7422 return fold (build2 (MULT_EXPR, type, arg0, tem));
7426 /* Convert A/B/C to A/(B*C). */
7427 if (flag_unsafe_math_optimizations
7428 && TREE_CODE (arg0) == RDIV_EXPR)
7429 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7430 fold (build2 (MULT_EXPR, type,
7431 TREE_OPERAND (arg0, 1), arg1))));
7433 /* Convert A/(B/C) to (A/B)*C. */
7434 if (flag_unsafe_math_optimizations
7435 && TREE_CODE (arg1) == RDIV_EXPR)
7436 return fold (build2 (MULT_EXPR, type,
7437 fold (build2 (RDIV_EXPR, type, arg0,
7438 TREE_OPERAND (arg1, 0))),
7439 TREE_OPERAND (arg1, 1)));
7441 /* Convert C1/(X*C2) into (C1/C2)/X. */
7442 if (flag_unsafe_math_optimizations
7443 && TREE_CODE (arg1) == MULT_EXPR
7444 && TREE_CODE (arg0) == REAL_CST
7445 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7447 tree tem = const_binop (RDIV_EXPR, arg0,
7448 TREE_OPERAND (arg1, 1), 0);
7449 if (tem)
7450 return fold (build2 (RDIV_EXPR, type, tem,
7451 TREE_OPERAND (arg1, 0)));
7454 if (flag_unsafe_math_optimizations)
7456 enum built_in_function fcode = builtin_mathfn_code (arg1);
7457 /* Optimize x/expN(y) into x*expN(-y). */
7458 if (BUILTIN_EXPONENT_P (fcode))
7460 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7461 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7462 tree arglist = build_tree_list (NULL_TREE,
7463 fold_convert (type, arg));
7464 arg1 = build_function_call_expr (expfn, arglist);
7465 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7468 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7469 if (fcode == BUILT_IN_POW
7470 || fcode == BUILT_IN_POWF
7471 || fcode == BUILT_IN_POWL)
7473 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7474 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7475 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7476 tree neg11 = fold_convert (type, negate_expr (arg11));
7477 tree arglist = tree_cons(NULL_TREE, arg10,
7478 build_tree_list (NULL_TREE, neg11));
7479 arg1 = build_function_call_expr (powfn, arglist);
7480 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7484 if (flag_unsafe_math_optimizations)
7486 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7487 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7489 /* Optimize sin(x)/cos(x) as tan(x). */
7490 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7491 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7492 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7493 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7494 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7496 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7498 if (tanfn != NULL_TREE)
7499 return build_function_call_expr (tanfn,
7500 TREE_OPERAND (arg0, 1));
7503 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7504 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7505 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7506 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7507 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7508 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7510 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7512 if (tanfn != NULL_TREE)
7514 tree tmp = TREE_OPERAND (arg0, 1);
7515 tmp = build_function_call_expr (tanfn, tmp);
7516 return fold (build2 (RDIV_EXPR, type,
7517 build_real (type, dconst1), tmp));
7521 /* Optimize pow(x,c)/x as pow(x,c-1). */
7522 if (fcode0 == BUILT_IN_POW
7523 || fcode0 == BUILT_IN_POWF
7524 || fcode0 == BUILT_IN_POWL)
7526 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7527 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7528 if (TREE_CODE (arg01) == REAL_CST
7529 && ! TREE_CONSTANT_OVERFLOW (arg01)
7530 && operand_equal_p (arg1, arg00, 0))
7532 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7533 REAL_VALUE_TYPE c;
7534 tree arg, arglist;
7536 c = TREE_REAL_CST (arg01);
7537 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7538 arg = build_real (type, c);
7539 arglist = build_tree_list (NULL_TREE, arg);
7540 arglist = tree_cons (NULL_TREE, arg1, arglist);
7541 return build_function_call_expr (powfn, arglist);
7545 goto binary;
7547 case TRUNC_DIV_EXPR:
7548 case ROUND_DIV_EXPR:
7549 case FLOOR_DIV_EXPR:
7550 case CEIL_DIV_EXPR:
7551 case EXACT_DIV_EXPR:
7552 if (integer_onep (arg1))
7553 return non_lvalue (fold_convert (type, arg0));
7554 if (integer_zerop (arg1))
7555 return t;
7556 /* X / -1 is -X. */
7557 if (!TYPE_UNSIGNED (type)
7558 && TREE_CODE (arg1) == INTEGER_CST
7559 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7560 && TREE_INT_CST_HIGH (arg1) == -1)
7561 return fold_convert (type, negate_expr (arg0));
7563 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7564 operation, EXACT_DIV_EXPR.
7566 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7567 At one time others generated faster code, it's not clear if they do
7568 after the last round to changes to the DIV code in expmed.c. */
7569 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7570 && multiple_of_p (type, arg0, arg1))
7571 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7573 if (TREE_CODE (arg1) == INTEGER_CST
7574 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7575 code, NULL_TREE)))
7576 return fold_convert (type, tem);
7578 goto binary;
7580 case CEIL_MOD_EXPR:
7581 case FLOOR_MOD_EXPR:
7582 case ROUND_MOD_EXPR:
7583 case TRUNC_MOD_EXPR:
7584 if (integer_onep (arg1))
7585 return omit_one_operand (type, integer_zero_node, arg0);
7586 if (integer_zerop (arg1))
7587 return t;
7589 /* X % -1 is zero. */
7590 if (!TYPE_UNSIGNED (type)
7591 && TREE_CODE (arg1) == INTEGER_CST
7592 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7593 && TREE_INT_CST_HIGH (arg1) == -1)
7594 return omit_one_operand (type, integer_zero_node, arg0);
7596 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7597 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7598 if (code == TRUNC_MOD_EXPR
7599 && TYPE_UNSIGNED (type)
7600 && integer_pow2p (arg1))
7602 unsigned HOST_WIDE_INT high, low;
7603 tree mask;
7604 int l;
7606 l = tree_log2 (arg1);
7607 if (l >= HOST_BITS_PER_WIDE_INT)
7609 high = ((unsigned HOST_WIDE_INT) 1
7610 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
7611 low = -1;
7613 else
7615 high = 0;
7616 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
7619 mask = build_int_cst_wide (type, low, high);
7620 return fold (build2 (BIT_AND_EXPR, type,
7621 fold_convert (type, arg0), mask));
7624 /* X % -C is the same as X % C. */
7625 if (code == TRUNC_MOD_EXPR
7626 && !TYPE_UNSIGNED (type)
7627 && TREE_CODE (arg1) == INTEGER_CST
7628 && TREE_INT_CST_HIGH (arg1) < 0
7629 && !flag_trapv
7630 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
7631 && !sign_bit_p (arg1, arg1))
7632 return fold (build2 (code, type, fold_convert (type, arg0),
7633 fold_convert (type, negate_expr (arg1))));
7635 /* X % -Y is the same as X % Y. */
7636 if (code == TRUNC_MOD_EXPR
7637 && !TYPE_UNSIGNED (type)
7638 && TREE_CODE (arg1) == NEGATE_EXPR
7639 && !flag_trapv)
7640 return fold (build2 (code, type, fold_convert (type, arg0),
7641 fold_convert (type, TREE_OPERAND (arg1, 0))));
7643 if (TREE_CODE (arg1) == INTEGER_CST
7644 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7645 code, NULL_TREE)))
7646 return fold_convert (type, tem);
7648 goto binary;
7650 case LROTATE_EXPR:
7651 case RROTATE_EXPR:
7652 if (integer_all_onesp (arg0))
7653 return omit_one_operand (type, arg0, arg1);
7654 goto shift;
7656 case RSHIFT_EXPR:
7657 /* Optimize -1 >> x for arithmetic right shifts. */
7658 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7659 return omit_one_operand (type, arg0, arg1);
7660 /* ... fall through ... */
7662 case LSHIFT_EXPR:
7663 shift:
7664 if (integer_zerop (arg1))
7665 return non_lvalue (fold_convert (type, arg0));
7666 if (integer_zerop (arg0))
7667 return omit_one_operand (type, arg0, arg1);
7669 /* Since negative shift count is not well-defined,
7670 don't try to compute it in the compiler. */
7671 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7672 return t;
7673 /* Rewrite an LROTATE_EXPR by a constant into an
7674 RROTATE_EXPR by a new constant. */
7675 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7677 tree tem = build_int_cst (NULL_TREE,
7678 GET_MODE_BITSIZE (TYPE_MODE (type)));
7679 tem = fold_convert (TREE_TYPE (arg1), tem);
7680 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7681 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7684 /* If we have a rotate of a bit operation with the rotate count and
7685 the second operand of the bit operation both constant,
7686 permute the two operations. */
7687 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7688 && (TREE_CODE (arg0) == BIT_AND_EXPR
7689 || TREE_CODE (arg0) == BIT_IOR_EXPR
7690 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7691 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7692 return fold (build2 (TREE_CODE (arg0), type,
7693 fold (build2 (code, type,
7694 TREE_OPERAND (arg0, 0), arg1)),
7695 fold (build2 (code, type,
7696 TREE_OPERAND (arg0, 1), arg1))));
7698 /* Two consecutive rotates adding up to the width of the mode can
7699 be ignored. */
7700 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7701 && TREE_CODE (arg0) == RROTATE_EXPR
7702 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7703 && TREE_INT_CST_HIGH (arg1) == 0
7704 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7705 && ((TREE_INT_CST_LOW (arg1)
7706 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7707 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7708 return TREE_OPERAND (arg0, 0);
7710 goto binary;
7712 case MIN_EXPR:
7713 if (operand_equal_p (arg0, arg1, 0))
7714 return omit_one_operand (type, arg0, arg1);
7715 if (INTEGRAL_TYPE_P (type)
7716 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
7717 return omit_one_operand (type, arg1, arg0);
7718 goto associate;
7720 case MAX_EXPR:
7721 if (operand_equal_p (arg0, arg1, 0))
7722 return omit_one_operand (type, arg0, arg1);
7723 if (INTEGRAL_TYPE_P (type)
7724 && TYPE_MAX_VALUE (type)
7725 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
7726 return omit_one_operand (type, arg1, arg0);
7727 goto associate;
7729 case TRUTH_NOT_EXPR:
7730 /* The argument to invert_truthvalue must have Boolean type. */
7731 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7732 arg0 = fold_convert (boolean_type_node, arg0);
7734 /* Note that the operand of this must be an int
7735 and its values must be 0 or 1.
7736 ("true" is a fixed value perhaps depending on the language,
7737 but we don't handle values other than 1 correctly yet.) */
7738 tem = invert_truthvalue (arg0);
7739 /* Avoid infinite recursion. */
7740 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7742 tem = fold_single_bit_test (code, arg0, arg1, type);
7743 if (tem)
7744 return tem;
7745 return t;
7747 return fold_convert (type, tem);
7749 case TRUTH_ANDIF_EXPR:
7750 /* Note that the operands of this must be ints
7751 and their values must be 0 or 1.
7752 ("true" is a fixed value perhaps depending on the language.) */
7753 /* If first arg is constant zero, return it. */
7754 if (integer_zerop (arg0))
7755 return fold_convert (type, arg0);
7756 case TRUTH_AND_EXPR:
7757 /* If either arg is constant true, drop it. */
7758 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7759 return non_lvalue (fold_convert (type, arg1));
7760 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7761 /* Preserve sequence points. */
7762 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7763 return non_lvalue (fold_convert (type, arg0));
7764 /* If second arg is constant zero, result is zero, but first arg
7765 must be evaluated. */
7766 if (integer_zerop (arg1))
7767 return omit_one_operand (type, arg1, arg0);
7768 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7769 case will be handled here. */
7770 if (integer_zerop (arg0))
7771 return omit_one_operand (type, arg0, arg1);
7773 /* !X && X is always false. */
7774 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7775 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7776 return omit_one_operand (type, integer_zero_node, arg1);
7777 /* X && !X is always false. */
7778 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7779 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7780 return omit_one_operand (type, integer_zero_node, arg0);
7782 truth_andor:
7783 /* We only do these simplifications if we are optimizing. */
7784 if (!optimize)
7785 return t;
7787 /* Check for things like (A || B) && (A || C). We can convert this
7788 to A || (B && C). Note that either operator can be any of the four
7789 truth and/or operations and the transformation will still be
7790 valid. Also note that we only care about order for the
7791 ANDIF and ORIF operators. If B contains side effects, this
7792 might change the truth-value of A. */
7793 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7794 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7795 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7796 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7797 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7798 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7800 tree a00 = TREE_OPERAND (arg0, 0);
7801 tree a01 = TREE_OPERAND (arg0, 1);
7802 tree a10 = TREE_OPERAND (arg1, 0);
7803 tree a11 = TREE_OPERAND (arg1, 1);
7804 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7805 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7806 && (code == TRUTH_AND_EXPR
7807 || code == TRUTH_OR_EXPR));
7809 if (operand_equal_p (a00, a10, 0))
7810 return fold (build2 (TREE_CODE (arg0), type, a00,
7811 fold (build2 (code, type, a01, a11))));
7812 else if (commutative && operand_equal_p (a00, a11, 0))
7813 return fold (build2 (TREE_CODE (arg0), type, a00,
7814 fold (build2 (code, type, a01, a10))));
7815 else if (commutative && operand_equal_p (a01, a10, 0))
7816 return fold (build2 (TREE_CODE (arg0), type, a01,
7817 fold (build2 (code, type, a00, a11))));
7819 /* This case if tricky because we must either have commutative
7820 operators or else A10 must not have side-effects. */
7822 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7823 && operand_equal_p (a01, a11, 0))
7824 return fold (build2 (TREE_CODE (arg0), type,
7825 fold (build2 (code, type, a00, a10)),
7826 a01));
7829 /* See if we can build a range comparison. */
7830 if (0 != (tem = fold_range_test (t)))
7831 return tem;
7833 /* Check for the possibility of merging component references. If our
7834 lhs is another similar operation, try to merge its rhs with our
7835 rhs. Then try to merge our lhs and rhs. */
7836 if (TREE_CODE (arg0) == code
7837 && 0 != (tem = fold_truthop (code, type,
7838 TREE_OPERAND (arg0, 1), arg1)))
7839 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7841 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7842 return tem;
7844 return t;
7846 case TRUTH_ORIF_EXPR:
7847 /* Note that the operands of this must be ints
7848 and their values must be 0 or true.
7849 ("true" is a fixed value perhaps depending on the language.) */
7850 /* If first arg is constant true, return it. */
7851 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7852 return fold_convert (type, arg0);
7853 case TRUTH_OR_EXPR:
7854 /* If either arg is constant zero, drop it. */
7855 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7856 return non_lvalue (fold_convert (type, arg1));
7857 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7858 /* Preserve sequence points. */
7859 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7860 return non_lvalue (fold_convert (type, arg0));
7861 /* If second arg is constant true, result is true, but we must
7862 evaluate first arg. */
7863 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7864 return omit_one_operand (type, arg1, arg0);
7865 /* Likewise for first arg, but note this only occurs here for
7866 TRUTH_OR_EXPR. */
7867 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7868 return omit_one_operand (type, arg0, arg1);
7870 /* !X || X is always true. */
7871 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7872 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7873 return omit_one_operand (type, integer_one_node, arg1);
7874 /* X || !X is always true. */
7875 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7876 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7877 return omit_one_operand (type, integer_one_node, arg0);
7879 goto truth_andor;
7881 case TRUTH_XOR_EXPR:
7882 /* If the second arg is constant zero, drop it. */
7883 if (integer_zerop (arg1))
7884 return non_lvalue (fold_convert (type, arg0));
7885 /* If the second arg is constant true, this is a logical inversion. */
7886 if (integer_onep (arg1))
7887 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7888 /* Identical arguments cancel to zero. */
7889 if (operand_equal_p (arg0, arg1, 0))
7890 return omit_one_operand (type, integer_zero_node, arg0);
7892 /* !X ^ X is always true. */
7893 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7894 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7895 return omit_one_operand (type, integer_one_node, arg1);
7897 /* X ^ !X is always true. */
7898 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7899 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7900 return omit_one_operand (type, integer_one_node, arg0);
7902 return t;
7904 case EQ_EXPR:
7905 case NE_EXPR:
7906 case LT_EXPR:
7907 case GT_EXPR:
7908 case LE_EXPR:
7909 case GE_EXPR:
7910 /* If one arg is a real or integer constant, put it last. */
7911 if (tree_swap_operands_p (arg0, arg1, true))
7912 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
7914 /* If this is an equality comparison of the address of a non-weak
7915 object against zero, then we know the result. */
7916 if ((code == EQ_EXPR || code == NE_EXPR)
7917 && TREE_CODE (arg0) == ADDR_EXPR
7918 && DECL_P (TREE_OPERAND (arg0, 0))
7919 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7920 && integer_zerop (arg1))
7921 return constant_boolean_node (code != EQ_EXPR, type);
7923 /* If this is an equality comparison of the address of two non-weak,
7924 unaliased symbols neither of which are extern (since we do not
7925 have access to attributes for externs), then we know the result. */
7926 if ((code == EQ_EXPR || code == NE_EXPR)
7927 && TREE_CODE (arg0) == ADDR_EXPR
7928 && DECL_P (TREE_OPERAND (arg0, 0))
7929 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7930 && ! lookup_attribute ("alias",
7931 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7932 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7933 && TREE_CODE (arg1) == ADDR_EXPR
7934 && DECL_P (TREE_OPERAND (arg1, 0))
7935 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7936 && ! lookup_attribute ("alias",
7937 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7938 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7939 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
7940 ? code == EQ_EXPR : code != EQ_EXPR,
7941 type);
7943 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7945 tree targ0 = strip_float_extensions (arg0);
7946 tree targ1 = strip_float_extensions (arg1);
7947 tree newtype = TREE_TYPE (targ0);
7949 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7950 newtype = TREE_TYPE (targ1);
7952 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7953 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7954 return fold (build2 (code, type, fold_convert (newtype, targ0),
7955 fold_convert (newtype, targ1)));
7957 /* (-a) CMP (-b) -> b CMP a */
7958 if (TREE_CODE (arg0) == NEGATE_EXPR
7959 && TREE_CODE (arg1) == NEGATE_EXPR)
7960 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
7961 TREE_OPERAND (arg0, 0)));
7963 if (TREE_CODE (arg1) == REAL_CST)
7965 REAL_VALUE_TYPE cst;
7966 cst = TREE_REAL_CST (arg1);
7968 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7969 if (TREE_CODE (arg0) == NEGATE_EXPR)
7970 return
7971 fold (build2 (swap_tree_comparison (code), type,
7972 TREE_OPERAND (arg0, 0),
7973 build_real (TREE_TYPE (arg1),
7974 REAL_VALUE_NEGATE (cst))));
7976 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7977 /* a CMP (-0) -> a CMP 0 */
7978 if (REAL_VALUE_MINUS_ZERO (cst))
7979 return fold (build2 (code, type, arg0,
7980 build_real (TREE_TYPE (arg1), dconst0)));
7982 /* x != NaN is always true, other ops are always false. */
7983 if (REAL_VALUE_ISNAN (cst)
7984 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7986 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7987 return omit_one_operand (type, tem, arg0);
7990 /* Fold comparisons against infinity. */
7991 if (REAL_VALUE_ISINF (cst))
7993 tem = fold_inf_compare (code, type, arg0, arg1);
7994 if (tem != NULL_TREE)
7995 return tem;
7999 /* If this is a comparison of a real constant with a PLUS_EXPR
8000 or a MINUS_EXPR of a real constant, we can convert it into a
8001 comparison with a revised real constant as long as no overflow
8002 occurs when unsafe_math_optimizations are enabled. */
8003 if (flag_unsafe_math_optimizations
8004 && TREE_CODE (arg1) == REAL_CST
8005 && (TREE_CODE (arg0) == PLUS_EXPR
8006 || TREE_CODE (arg0) == MINUS_EXPR)
8007 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8008 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8009 ? MINUS_EXPR : PLUS_EXPR,
8010 arg1, TREE_OPERAND (arg0, 1), 0))
8011 && ! TREE_CONSTANT_OVERFLOW (tem))
8012 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8014 /* Likewise, we can simplify a comparison of a real constant with
8015 a MINUS_EXPR whose first operand is also a real constant, i.e.
8016 (c1 - x) < c2 becomes x > c1-c2. */
8017 if (flag_unsafe_math_optimizations
8018 && TREE_CODE (arg1) == REAL_CST
8019 && TREE_CODE (arg0) == MINUS_EXPR
8020 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8021 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8022 arg1, 0))
8023 && ! TREE_CONSTANT_OVERFLOW (tem))
8024 return fold (build2 (swap_tree_comparison (code), type,
8025 TREE_OPERAND (arg0, 1), tem));
8027 /* Fold comparisons against built-in math functions. */
8028 if (TREE_CODE (arg1) == REAL_CST
8029 && flag_unsafe_math_optimizations
8030 && ! flag_errno_math)
8032 enum built_in_function fcode = builtin_mathfn_code (arg0);
8034 if (fcode != END_BUILTINS)
8036 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8037 if (tem != NULL_TREE)
8038 return tem;
8043 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8044 if (TREE_CONSTANT (arg1)
8045 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8046 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8047 /* This optimization is invalid for ordered comparisons
8048 if CONST+INCR overflows or if foo+incr might overflow.
8049 This optimization is invalid for floating point due to rounding.
8050 For pointer types we assume overflow doesn't happen. */
8051 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8052 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8053 && (code == EQ_EXPR || code == NE_EXPR))))
8055 tree varop, newconst;
8057 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8059 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
8060 arg1, TREE_OPERAND (arg0, 1)));
8061 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8062 TREE_OPERAND (arg0, 0),
8063 TREE_OPERAND (arg0, 1));
8065 else
8067 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
8068 arg1, TREE_OPERAND (arg0, 1)));
8069 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8070 TREE_OPERAND (arg0, 0),
8071 TREE_OPERAND (arg0, 1));
8075 /* If VAROP is a reference to a bitfield, we must mask
8076 the constant by the width of the field. */
8077 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8078 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8079 && host_integerp (DECL_SIZE (TREE_OPERAND
8080 (TREE_OPERAND (varop, 0), 1)), 1))
8082 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8083 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8084 tree folded_compare, shift;
8086 /* First check whether the comparison would come out
8087 always the same. If we don't do that we would
8088 change the meaning with the masking. */
8089 folded_compare = fold (build2 (code, type,
8090 TREE_OPERAND (varop, 0), arg1));
8091 if (integer_zerop (folded_compare)
8092 || integer_onep (folded_compare))
8093 return omit_one_operand (type, folded_compare, varop);
8095 shift = build_int_cst (NULL_TREE,
8096 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8097 shift = fold_convert (TREE_TYPE (varop), shift);
8098 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8099 newconst, shift));
8100 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8101 newconst, shift));
8104 return fold (build2 (code, type, varop, newconst));
8107 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8108 This transformation affects the cases which are handled in later
8109 optimizations involving comparisons with non-negative constants. */
8110 if (TREE_CODE (arg1) == INTEGER_CST
8111 && TREE_CODE (arg0) != INTEGER_CST
8112 && tree_int_cst_sgn (arg1) > 0)
8114 switch (code)
8116 case GE_EXPR:
8117 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8118 return fold (build2 (GT_EXPR, type, arg0, arg1));
8120 case LT_EXPR:
8121 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8122 return fold (build2 (LE_EXPR, type, arg0, arg1));
8124 default:
8125 break;
8129 /* Comparisons with the highest or lowest possible integer of
8130 the specified size will have known values.
8132 This is quite similar to fold_relational_hi_lo; however, my
8133 attempts to share the code have been nothing but trouble.
8134 I give up for now. */
8136 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8138 if (TREE_CODE (arg1) == INTEGER_CST
8139 && ! TREE_CONSTANT_OVERFLOW (arg1)
8140 && width <= HOST_BITS_PER_WIDE_INT
8141 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8142 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8144 unsigned HOST_WIDE_INT signed_max;
8145 unsigned HOST_WIDE_INT max, min;
8147 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
8149 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8151 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8152 min = 0;
8154 else
8156 max = signed_max;
8157 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8160 if (TREE_INT_CST_HIGH (arg1) == 0
8161 && TREE_INT_CST_LOW (arg1) == max)
8162 switch (code)
8164 case GT_EXPR:
8165 return omit_one_operand (type, integer_zero_node, arg0);
8167 case GE_EXPR:
8168 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8170 case LE_EXPR:
8171 return omit_one_operand (type, integer_one_node, arg0);
8173 case LT_EXPR:
8174 return fold (build2 (NE_EXPR, type, arg0, arg1));
8176 /* The GE_EXPR and LT_EXPR cases above are not normally
8177 reached because of previous transformations. */
8179 default:
8180 break;
8182 else if (TREE_INT_CST_HIGH (arg1) == 0
8183 && TREE_INT_CST_LOW (arg1) == max - 1)
8184 switch (code)
8186 case GT_EXPR:
8187 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8188 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8189 case LE_EXPR:
8190 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8191 return fold (build2 (NE_EXPR, type, arg0, arg1));
8192 default:
8193 break;
8195 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8196 && TREE_INT_CST_LOW (arg1) == min)
8197 switch (code)
8199 case LT_EXPR:
8200 return omit_one_operand (type, integer_zero_node, arg0);
8202 case LE_EXPR:
8203 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8205 case GE_EXPR:
8206 return omit_one_operand (type, integer_one_node, arg0);
8208 case GT_EXPR:
8209 return fold (build2 (NE_EXPR, type, arg0, arg1));
8211 default:
8212 break;
8214 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8215 && TREE_INT_CST_LOW (arg1) == min + 1)
8216 switch (code)
8218 case GE_EXPR:
8219 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8220 return fold (build2 (NE_EXPR, type, arg0, arg1));
8221 case LT_EXPR:
8222 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8223 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8224 default:
8225 break;
8228 else if (!in_gimple_form
8229 && TREE_INT_CST_HIGH (arg1) == 0
8230 && TREE_INT_CST_LOW (arg1) == signed_max
8231 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8232 /* signed_type does not work on pointer types. */
8233 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8235 /* The following case also applies to X < signed_max+1
8236 and X >= signed_max+1 because previous transformations. */
8237 if (code == LE_EXPR || code == GT_EXPR)
8239 tree st0, st1;
8240 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8241 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8242 return fold
8243 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8244 type, fold_convert (st0, arg0),
8245 fold_convert (st1, integer_zero_node)));
8251 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8252 a MINUS_EXPR of a constant, we can convert it into a comparison with
8253 a revised constant as long as no overflow occurs. */
8254 if ((code == EQ_EXPR || code == NE_EXPR)
8255 && TREE_CODE (arg1) == INTEGER_CST
8256 && (TREE_CODE (arg0) == PLUS_EXPR
8257 || TREE_CODE (arg0) == MINUS_EXPR)
8258 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8259 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8260 ? MINUS_EXPR : PLUS_EXPR,
8261 arg1, TREE_OPERAND (arg0, 1), 0))
8262 && ! TREE_CONSTANT_OVERFLOW (tem))
8263 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8265 /* Similarly for a NEGATE_EXPR. */
8266 else if ((code == EQ_EXPR || code == NE_EXPR)
8267 && TREE_CODE (arg0) == NEGATE_EXPR
8268 && TREE_CODE (arg1) == INTEGER_CST
8269 && 0 != (tem = negate_expr (arg1))
8270 && TREE_CODE (tem) == INTEGER_CST
8271 && ! TREE_CONSTANT_OVERFLOW (tem))
8272 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8274 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8275 for !=. Don't do this for ordered comparisons due to overflow. */
8276 else if ((code == NE_EXPR || code == EQ_EXPR)
8277 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8278 return fold (build2 (code, type,
8279 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8281 /* If we are widening one operand of an integer comparison,
8282 see if the other operand is similarly being widened. Perhaps we
8283 can do the comparison in the narrower type. */
8284 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8285 && TREE_CODE (arg0) == NOP_EXPR
8286 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
8287 && (code == EQ_EXPR || code == NE_EXPR
8288 || TYPE_UNSIGNED (TREE_TYPE (arg0))
8289 == TYPE_UNSIGNED (TREE_TYPE (tem)))
8290 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
8291 && (TREE_TYPE (t1) == TREE_TYPE (tem)
8292 || (TREE_CODE (t1) == INTEGER_CST
8293 && int_fits_type_p (t1, TREE_TYPE (tem)))))
8294 return fold (build2 (code, type, tem,
8295 fold_convert (TREE_TYPE (tem), t1)));
8297 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8298 constant, we can simplify it. */
8299 else if (TREE_CODE (arg1) == INTEGER_CST
8300 && (TREE_CODE (arg0) == MIN_EXPR
8301 || TREE_CODE (arg0) == MAX_EXPR)
8302 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8303 return optimize_minmax_comparison (t);
8305 /* If we are comparing an ABS_EXPR with a constant, we can
8306 convert all the cases into explicit comparisons, but they may
8307 well not be faster than doing the ABS and one comparison.
8308 But ABS (X) <= C is a range comparison, which becomes a subtraction
8309 and a comparison, and is probably faster. */
8310 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8311 && TREE_CODE (arg0) == ABS_EXPR
8312 && ! TREE_SIDE_EFFECTS (arg0)
8313 && (0 != (tem = negate_expr (arg1)))
8314 && TREE_CODE (tem) == INTEGER_CST
8315 && ! TREE_CONSTANT_OVERFLOW (tem))
8316 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8317 build2 (GE_EXPR, type,
8318 TREE_OPERAND (arg0, 0), tem),
8319 build2 (LE_EXPR, type,
8320 TREE_OPERAND (arg0, 0), arg1)));
8322 /* If this is an EQ or NE comparison with zero and ARG0 is
8323 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8324 two operations, but the latter can be done in one less insn
8325 on machines that have only two-operand insns or on which a
8326 constant cannot be the first operand. */
8327 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8328 && TREE_CODE (arg0) == BIT_AND_EXPR)
8330 tree arg00 = TREE_OPERAND (arg0, 0);
8331 tree arg01 = TREE_OPERAND (arg0, 1);
8332 if (TREE_CODE (arg00) == LSHIFT_EXPR
8333 && integer_onep (TREE_OPERAND (arg00, 0)))
8334 return
8335 fold (build2 (code, type,
8336 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8337 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8338 arg01, TREE_OPERAND (arg00, 1)),
8339 fold_convert (TREE_TYPE (arg0),
8340 integer_one_node)),
8341 arg1));
8342 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8343 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8344 return
8345 fold (build2 (code, type,
8346 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8347 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8348 arg00, TREE_OPERAND (arg01, 1)),
8349 fold_convert (TREE_TYPE (arg0),
8350 integer_one_node)),
8351 arg1));
8354 /* If this is an NE or EQ comparison of zero against the result of a
8355 signed MOD operation whose second operand is a power of 2, make
8356 the MOD operation unsigned since it is simpler and equivalent. */
8357 if ((code == NE_EXPR || code == EQ_EXPR)
8358 && integer_zerop (arg1)
8359 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8360 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8361 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8362 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8363 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8364 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8366 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8367 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
8368 fold_convert (newtype,
8369 TREE_OPERAND (arg0, 0)),
8370 fold_convert (newtype,
8371 TREE_OPERAND (arg0, 1))));
8373 return fold (build2 (code, type, newmod,
8374 fold_convert (newtype, arg1)));
8377 /* If this is an NE comparison of zero with an AND of one, remove the
8378 comparison since the AND will give the correct value. */
8379 if (code == NE_EXPR && integer_zerop (arg1)
8380 && TREE_CODE (arg0) == BIT_AND_EXPR
8381 && integer_onep (TREE_OPERAND (arg0, 1)))
8382 return fold_convert (type, arg0);
8384 /* If we have (A & C) == C where C is a power of 2, convert this into
8385 (A & C) != 0. Similarly for NE_EXPR. */
8386 if ((code == EQ_EXPR || code == NE_EXPR)
8387 && TREE_CODE (arg0) == BIT_AND_EXPR
8388 && integer_pow2p (TREE_OPERAND (arg0, 1))
8389 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8390 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8391 arg0, fold_convert (TREE_TYPE (arg0),
8392 integer_zero_node)));
8394 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8395 2, then fold the expression into shifts and logical operations. */
8396 tem = fold_single_bit_test (code, arg0, arg1, type);
8397 if (tem)
8398 return tem;
8400 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8401 Similarly for NE_EXPR. */
8402 if ((code == EQ_EXPR || code == NE_EXPR)
8403 && TREE_CODE (arg0) == BIT_AND_EXPR
8404 && TREE_CODE (arg1) == INTEGER_CST
8405 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8407 tree dandnotc
8408 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8409 arg1, build1 (BIT_NOT_EXPR,
8410 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8411 TREE_OPERAND (arg0, 1))));
8412 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8413 if (integer_nonzerop (dandnotc))
8414 return omit_one_operand (type, rslt, arg0);
8417 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8418 Similarly for NE_EXPR. */
8419 if ((code == EQ_EXPR || code == NE_EXPR)
8420 && TREE_CODE (arg0) == BIT_IOR_EXPR
8421 && TREE_CODE (arg1) == INTEGER_CST
8422 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8424 tree candnotd
8425 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8426 TREE_OPERAND (arg0, 1),
8427 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
8428 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8429 if (integer_nonzerop (candnotd))
8430 return omit_one_operand (type, rslt, arg0);
8433 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8434 and similarly for >= into !=. */
8435 if ((code == LT_EXPR || code == GE_EXPR)
8436 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8437 && TREE_CODE (arg1) == LSHIFT_EXPR
8438 && integer_onep (TREE_OPERAND (arg1, 0)))
8439 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8440 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8441 TREE_OPERAND (arg1, 1)),
8442 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8444 else if ((code == LT_EXPR || code == GE_EXPR)
8445 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8446 && (TREE_CODE (arg1) == NOP_EXPR
8447 || TREE_CODE (arg1) == CONVERT_EXPR)
8448 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8449 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8450 return
8451 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8452 fold_convert (TREE_TYPE (arg0),
8453 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8454 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8455 1))),
8456 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8458 /* Simplify comparison of something with itself. (For IEEE
8459 floating-point, we can only do some of these simplifications.) */
8460 if (operand_equal_p (arg0, arg1, 0))
8462 switch (code)
8464 case EQ_EXPR:
8465 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8466 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8467 return constant_boolean_node (1, type);
8468 break;
8470 case GE_EXPR:
8471 case LE_EXPR:
8472 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8473 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8474 return constant_boolean_node (1, type);
8475 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8477 case NE_EXPR:
8478 /* For NE, we can only do this simplification if integer
8479 or we don't honor IEEE floating point NaNs. */
8480 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8481 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8482 break;
8483 /* ... fall through ... */
8484 case GT_EXPR:
8485 case LT_EXPR:
8486 return constant_boolean_node (0, type);
8487 default:
8488 gcc_unreachable ();
8492 /* If we are comparing an expression that just has comparisons
8493 of two integer values, arithmetic expressions of those comparisons,
8494 and constants, we can simplify it. There are only three cases
8495 to check: the two values can either be equal, the first can be
8496 greater, or the second can be greater. Fold the expression for
8497 those three values. Since each value must be 0 or 1, we have
8498 eight possibilities, each of which corresponds to the constant 0
8499 or 1 or one of the six possible comparisons.
8501 This handles common cases like (a > b) == 0 but also handles
8502 expressions like ((x > y) - (y > x)) > 0, which supposedly
8503 occur in macroized code. */
8505 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8507 tree cval1 = 0, cval2 = 0;
8508 int save_p = 0;
8510 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8511 /* Don't handle degenerate cases here; they should already
8512 have been handled anyway. */
8513 && cval1 != 0 && cval2 != 0
8514 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8515 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8516 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8517 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8518 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8519 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8520 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8522 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8523 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8525 /* We can't just pass T to eval_subst in case cval1 or cval2
8526 was the same as ARG1. */
8528 tree high_result
8529 = fold (build2 (code, type,
8530 eval_subst (arg0, cval1, maxval,
8531 cval2, minval),
8532 arg1));
8533 tree equal_result
8534 = fold (build2 (code, type,
8535 eval_subst (arg0, cval1, maxval,
8536 cval2, maxval),
8537 arg1));
8538 tree low_result
8539 = fold (build2 (code, type,
8540 eval_subst (arg0, cval1, minval,
8541 cval2, maxval),
8542 arg1));
8544 /* All three of these results should be 0 or 1. Confirm they
8545 are. Then use those values to select the proper code
8546 to use. */
8548 if ((integer_zerop (high_result)
8549 || integer_onep (high_result))
8550 && (integer_zerop (equal_result)
8551 || integer_onep (equal_result))
8552 && (integer_zerop (low_result)
8553 || integer_onep (low_result)))
8555 /* Make a 3-bit mask with the high-order bit being the
8556 value for `>', the next for '=', and the low for '<'. */
8557 switch ((integer_onep (high_result) * 4)
8558 + (integer_onep (equal_result) * 2)
8559 + integer_onep (low_result))
8561 case 0:
8562 /* Always false. */
8563 return omit_one_operand (type, integer_zero_node, arg0);
8564 case 1:
8565 code = LT_EXPR;
8566 break;
8567 case 2:
8568 code = EQ_EXPR;
8569 break;
8570 case 3:
8571 code = LE_EXPR;
8572 break;
8573 case 4:
8574 code = GT_EXPR;
8575 break;
8576 case 5:
8577 code = NE_EXPR;
8578 break;
8579 case 6:
8580 code = GE_EXPR;
8581 break;
8582 case 7:
8583 /* Always true. */
8584 return omit_one_operand (type, integer_one_node, arg0);
8587 tem = build2 (code, type, cval1, cval2);
8588 if (save_p)
8589 return save_expr (tem);
8590 else
8591 return fold (tem);
8596 /* If this is a comparison of a field, we may be able to simplify it. */
8597 if (((TREE_CODE (arg0) == COMPONENT_REF
8598 && lang_hooks.can_use_bit_fields_p ())
8599 || TREE_CODE (arg0) == BIT_FIELD_REF)
8600 && (code == EQ_EXPR || code == NE_EXPR)
8601 /* Handle the constant case even without -O
8602 to make sure the warnings are given. */
8603 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8605 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8606 if (t1)
8607 return t1;
8610 /* If this is a comparison of complex values and either or both sides
8611 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8612 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8613 This may prevent needless evaluations. */
8614 if ((code == EQ_EXPR || code == NE_EXPR)
8615 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8616 && (TREE_CODE (arg0) == COMPLEX_EXPR
8617 || TREE_CODE (arg1) == COMPLEX_EXPR
8618 || TREE_CODE (arg0) == COMPLEX_CST
8619 || TREE_CODE (arg1) == COMPLEX_CST))
8621 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8622 tree real0, imag0, real1, imag1;
8624 arg0 = save_expr (arg0);
8625 arg1 = save_expr (arg1);
8626 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8627 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8628 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8629 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8631 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8632 : TRUTH_ORIF_EXPR),
8633 type,
8634 fold (build2 (code, type, real0, real1)),
8635 fold (build2 (code, type, imag0, imag1))));
8638 /* Optimize comparisons of strlen vs zero to a compare of the
8639 first character of the string vs zero. To wit,
8640 strlen(ptr) == 0 => *ptr == 0
8641 strlen(ptr) != 0 => *ptr != 0
8642 Other cases should reduce to one of these two (or a constant)
8643 due to the return value of strlen being unsigned. */
8644 if ((code == EQ_EXPR || code == NE_EXPR)
8645 && integer_zerop (arg1)
8646 && TREE_CODE (arg0) == CALL_EXPR)
8648 tree fndecl = get_callee_fndecl (arg0);
8649 tree arglist;
8651 if (fndecl
8652 && DECL_BUILT_IN (fndecl)
8653 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8654 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8655 && (arglist = TREE_OPERAND (arg0, 1))
8656 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8657 && ! TREE_CHAIN (arglist))
8658 return fold (build2 (code, type,
8659 build1 (INDIRECT_REF, char_type_node,
8660 TREE_VALUE (arglist)),
8661 fold_convert (char_type_node,
8662 integer_zero_node)));
8665 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8666 into a single range test. */
8667 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8668 && TREE_CODE (arg1) == INTEGER_CST
8669 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8670 && !integer_zerop (TREE_OPERAND (arg0, 1))
8671 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8672 && !TREE_OVERFLOW (arg1))
8674 t1 = fold_div_compare (code, type, arg0, arg1);
8675 if (t1 != NULL_TREE)
8676 return t1;
8679 if ((code == EQ_EXPR || code == NE_EXPR)
8680 && !TREE_SIDE_EFFECTS (arg0)
8681 && integer_zerop (arg1)
8682 && tree_expr_nonzero_p (arg0))
8683 return constant_boolean_node (code==NE_EXPR, type);
8685 t1 = fold_relational_const (code, type, arg0, arg1);
8686 return t1 == NULL_TREE ? t : t1;
8688 case UNORDERED_EXPR:
8689 case ORDERED_EXPR:
8690 case UNLT_EXPR:
8691 case UNLE_EXPR:
8692 case UNGT_EXPR:
8693 case UNGE_EXPR:
8694 case UNEQ_EXPR:
8695 case LTGT_EXPR:
8696 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8698 t1 = fold_relational_const (code, type, arg0, arg1);
8699 if (t1 != NULL_TREE)
8700 return t1;
8703 /* If the first operand is NaN, the result is constant. */
8704 if (TREE_CODE (arg0) == REAL_CST
8705 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8706 && (code != LTGT_EXPR || ! flag_trapping_math))
8708 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8709 ? integer_zero_node
8710 : integer_one_node;
8711 return omit_one_operand (type, t1, arg1);
8714 /* If the second operand is NaN, the result is constant. */
8715 if (TREE_CODE (arg1) == REAL_CST
8716 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
8717 && (code != LTGT_EXPR || ! flag_trapping_math))
8719 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8720 ? integer_zero_node
8721 : integer_one_node;
8722 return omit_one_operand (type, t1, arg0);
8725 /* Simplify unordered comparison of something with itself. */
8726 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
8727 && operand_equal_p (arg0, arg1, 0))
8728 return constant_boolean_node (1, type);
8730 if (code == LTGT_EXPR
8731 && !flag_trapping_math
8732 && operand_equal_p (arg0, arg1, 0))
8733 return constant_boolean_node (0, type);
8735 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8737 tree targ0 = strip_float_extensions (arg0);
8738 tree targ1 = strip_float_extensions (arg1);
8739 tree newtype = TREE_TYPE (targ0);
8741 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8742 newtype = TREE_TYPE (targ1);
8744 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8745 return fold (build2 (code, type, fold_convert (newtype, targ0),
8746 fold_convert (newtype, targ1)));
8749 return t;
8751 case COND_EXPR:
8752 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8753 so all simple results must be passed through pedantic_non_lvalue. */
8754 if (TREE_CODE (arg0) == INTEGER_CST)
8756 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8757 /* Only optimize constant conditions when the selected branch
8758 has the same type as the COND_EXPR. This avoids optimizing
8759 away "c ? x : throw", where the throw has a void type. */
8760 if (! VOID_TYPE_P (TREE_TYPE (tem))
8761 || VOID_TYPE_P (type))
8762 return pedantic_non_lvalue (tem);
8763 return t;
8765 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
8766 return pedantic_omit_one_operand (type, arg1, arg0);
8768 /* If we have A op B ? A : C, we may be able to convert this to a
8769 simpler expression, depending on the operation and the values
8770 of B and C. Signed zeros prevent all of these transformations,
8771 for reasons given above each one.
8773 Also try swapping the arguments and inverting the conditional. */
8774 if (COMPARISON_CLASS_P (arg0)
8775 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8776 arg1, TREE_OPERAND (arg0, 1))
8777 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8779 tem = fold_cond_expr_with_comparison (type, arg0,
8780 TREE_OPERAND (t, 1),
8781 TREE_OPERAND (t, 2));
8782 if (tem)
8783 return tem;
8786 if (COMPARISON_CLASS_P (arg0)
8787 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8788 TREE_OPERAND (t, 2),
8789 TREE_OPERAND (arg0, 1))
8790 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
8792 tem = invert_truthvalue (arg0);
8793 if (COMPARISON_CLASS_P (tem))
8795 tem = fold_cond_expr_with_comparison (type, tem,
8796 TREE_OPERAND (t, 2),
8797 TREE_OPERAND (t, 1));
8798 if (tem)
8799 return tem;
8803 /* If the second operand is simpler than the third, swap them
8804 since that produces better jump optimization results. */
8805 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8806 TREE_OPERAND (t, 2), false))
8808 /* See if this can be inverted. If it can't, possibly because
8809 it was a floating-point inequality comparison, don't do
8810 anything. */
8811 tem = invert_truthvalue (arg0);
8813 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8814 return fold (build3 (code, type, tem,
8815 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8818 /* Convert A ? 1 : 0 to simply A. */
8819 if (integer_onep (TREE_OPERAND (t, 1))
8820 && integer_zerop (TREE_OPERAND (t, 2))
8821 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8822 call to fold will try to move the conversion inside
8823 a COND, which will recurse. In that case, the COND_EXPR
8824 is probably the best choice, so leave it alone. */
8825 && type == TREE_TYPE (arg0))
8826 return pedantic_non_lvalue (arg0);
8828 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8829 over COND_EXPR in cases such as floating point comparisons. */
8830 if (integer_zerop (TREE_OPERAND (t, 1))
8831 && integer_onep (TREE_OPERAND (t, 2))
8832 && truth_value_p (TREE_CODE (arg0)))
8833 return pedantic_non_lvalue (fold_convert (type,
8834 invert_truthvalue (arg0)));
8836 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
8837 if (TREE_CODE (arg0) == LT_EXPR
8838 && integer_zerop (TREE_OPERAND (arg0, 1))
8839 && integer_zerop (TREE_OPERAND (t, 2))
8840 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
8841 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
8842 TREE_TYPE (tem), tem, arg1)));
8844 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
8845 already handled above. */
8846 if (TREE_CODE (arg0) == BIT_AND_EXPR
8847 && integer_onep (TREE_OPERAND (arg0, 1))
8848 && integer_zerop (TREE_OPERAND (t, 2))
8849 && integer_pow2p (arg1))
8851 tree tem = TREE_OPERAND (arg0, 0);
8852 STRIP_NOPS (tem);
8853 if (TREE_CODE (tem) == RSHIFT_EXPR
8854 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
8855 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
8856 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
8857 return fold (build2 (BIT_AND_EXPR, type,
8858 TREE_OPERAND (tem, 0), arg1));
8861 /* A & N ? N : 0 is simply A & N if N is a power of two. This
8862 is probably obsolete because the first operand should be a
8863 truth value (that's why we have the two cases above), but let's
8864 leave it in until we can confirm this for all front-ends. */
8865 if (integer_zerop (TREE_OPERAND (t, 2))
8866 && TREE_CODE (arg0) == NE_EXPR
8867 && integer_zerop (TREE_OPERAND (arg0, 1))
8868 && integer_pow2p (arg1)
8869 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8870 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8871 arg1, OEP_ONLY_CONST))
8872 return pedantic_non_lvalue (fold_convert (type,
8873 TREE_OPERAND (arg0, 0)));
8875 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8876 if (integer_zerop (TREE_OPERAND (t, 2))
8877 && truth_value_p (TREE_CODE (arg0))
8878 && truth_value_p (TREE_CODE (arg1)))
8879 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
8881 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8882 if (integer_onep (TREE_OPERAND (t, 2))
8883 && truth_value_p (TREE_CODE (arg0))
8884 && truth_value_p (TREE_CODE (arg1)))
8886 /* Only perform transformation if ARG0 is easily inverted. */
8887 tem = invert_truthvalue (arg0);
8888 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8889 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
8892 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
8893 if (integer_zerop (arg1)
8894 && truth_value_p (TREE_CODE (arg0))
8895 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8897 /* Only perform transformation if ARG0 is easily inverted. */
8898 tem = invert_truthvalue (arg0);
8899 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8900 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
8901 TREE_OPERAND (t, 2)));
8904 /* Convert A ? 1 : B into A || B if A and B are truth values. */
8905 if (integer_onep (arg1)
8906 && truth_value_p (TREE_CODE (arg0))
8907 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8908 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
8909 TREE_OPERAND (t, 2)));
8911 return t;
8913 case COMPOUND_EXPR:
8914 /* When pedantic, a compound expression can be neither an lvalue
8915 nor an integer constant expression. */
8916 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8917 return t;
8918 /* Don't let (0, 0) be null pointer constant. */
8919 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8920 : fold_convert (type, arg1);
8921 return pedantic_non_lvalue (tem);
8923 case COMPLEX_EXPR:
8924 if (wins)
8925 return build_complex (type, arg0, arg1);
8926 return t;
8928 case REALPART_EXPR:
8929 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8930 return t;
8931 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8932 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8933 TREE_OPERAND (arg0, 1));
8934 else if (TREE_CODE (arg0) == COMPLEX_CST)
8935 return TREE_REALPART (arg0);
8936 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8937 return fold (build2 (TREE_CODE (arg0), type,
8938 fold (build1 (REALPART_EXPR, type,
8939 TREE_OPERAND (arg0, 0))),
8940 fold (build1 (REALPART_EXPR, type,
8941 TREE_OPERAND (arg0, 1)))));
8942 return t;
8944 case IMAGPART_EXPR:
8945 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8946 return fold_convert (type, integer_zero_node);
8947 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8948 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8949 TREE_OPERAND (arg0, 0));
8950 else if (TREE_CODE (arg0) == COMPLEX_CST)
8951 return TREE_IMAGPART (arg0);
8952 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8953 return fold (build2 (TREE_CODE (arg0), type,
8954 fold (build1 (IMAGPART_EXPR, type,
8955 TREE_OPERAND (arg0, 0))),
8956 fold (build1 (IMAGPART_EXPR, type,
8957 TREE_OPERAND (arg0, 1)))));
8958 return t;
8960 case CALL_EXPR:
8961 /* Check for a built-in function. */
8962 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
8963 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
8964 == FUNCTION_DECL)
8965 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
8967 tree tmp = fold_builtin (t, false);
8968 if (tmp)
8969 return tmp;
8971 return t;
8973 default:
8974 return t;
8975 } /* switch (code) */
8978 #ifdef ENABLE_FOLD_CHECKING
8979 #undef fold
8981 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8982 static void fold_check_failed (tree, tree);
8983 void print_fold_checksum (tree);
8985 /* When --enable-checking=fold, compute a digest of expr before
8986 and after actual fold call to see if fold did not accidentally
8987 change original expr. */
8989 tree
8990 fold (tree expr)
8992 tree ret;
8993 struct md5_ctx ctx;
8994 unsigned char checksum_before[16], checksum_after[16];
8995 htab_t ht;
8997 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8998 md5_init_ctx (&ctx);
8999 fold_checksum_tree (expr, &ctx, ht);
9000 md5_finish_ctx (&ctx, checksum_before);
9001 htab_empty (ht);
9003 ret = fold_1 (expr);
9005 md5_init_ctx (&ctx);
9006 fold_checksum_tree (expr, &ctx, ht);
9007 md5_finish_ctx (&ctx, checksum_after);
9008 htab_delete (ht);
9010 if (memcmp (checksum_before, checksum_after, 16))
9011 fold_check_failed (expr, ret);
9013 return ret;
9016 void
9017 print_fold_checksum (tree expr)
9019 struct md5_ctx ctx;
9020 unsigned char checksum[16], cnt;
9021 htab_t ht;
9023 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9024 md5_init_ctx (&ctx);
9025 fold_checksum_tree (expr, &ctx, ht);
9026 md5_finish_ctx (&ctx, checksum);
9027 htab_delete (ht);
9028 for (cnt = 0; cnt < 16; ++cnt)
9029 fprintf (stderr, "%02x", checksum[cnt]);
9030 putc ('\n', stderr);
9033 static void
9034 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9036 internal_error ("fold check: original tree changed by fold");
9039 static void
9040 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9042 void **slot;
9043 enum tree_code code;
9044 char buf[sizeof (struct tree_decl)];
9045 int i, len;
9047 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
9048 <= sizeof (struct tree_decl))
9049 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
9050 if (expr == NULL)
9051 return;
9052 slot = htab_find_slot (ht, expr, INSERT);
9053 if (*slot != NULL)
9054 return;
9055 *slot = expr;
9056 code = TREE_CODE (expr);
9057 if (TREE_CODE_CLASS (code) == tcc_declaration
9058 && DECL_ASSEMBLER_NAME_SET_P (expr))
9060 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9061 memcpy (buf, expr, tree_size (expr));
9062 expr = (tree) buf;
9063 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9065 else if (TREE_CODE_CLASS (code) == tcc_type
9066 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
9067 || TYPE_CACHED_VALUES_P (expr)))
9069 /* Allow these fields to be modified. */
9070 memcpy (buf, expr, tree_size (expr));
9071 expr = (tree) buf;
9072 TYPE_POINTER_TO (expr) = NULL;
9073 TYPE_REFERENCE_TO (expr) = NULL;
9074 TYPE_CACHED_VALUES_P (expr) = 0;
9075 TYPE_CACHED_VALUES (expr) = NULL;
9077 md5_process_bytes (expr, tree_size (expr), ctx);
9078 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9079 if (TREE_CODE_CLASS (code) != tcc_type
9080 && TREE_CODE_CLASS (code) != tcc_declaration)
9081 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9082 switch (TREE_CODE_CLASS (code))
9084 case tcc_constant:
9085 switch (code)
9087 case STRING_CST:
9088 md5_process_bytes (TREE_STRING_POINTER (expr),
9089 TREE_STRING_LENGTH (expr), ctx);
9090 break;
9091 case COMPLEX_CST:
9092 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9093 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9094 break;
9095 case VECTOR_CST:
9096 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9097 break;
9098 default:
9099 break;
9101 break;
9102 case tcc_exceptional:
9103 switch (code)
9105 case TREE_LIST:
9106 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9107 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9108 break;
9109 case TREE_VEC:
9110 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9111 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9112 break;
9113 default:
9114 break;
9116 break;
9117 case tcc_expression:
9118 case tcc_reference:
9119 case tcc_comparison:
9120 case tcc_unary:
9121 case tcc_binary:
9122 case tcc_statement:
9123 len = first_rtl_op (code);
9124 for (i = 0; i < len; ++i)
9125 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9126 break;
9127 case tcc_declaration:
9128 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9129 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9130 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9131 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9132 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9133 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9134 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9135 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9136 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9137 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9138 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9139 break;
9140 case tcc_type:
9141 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9142 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9143 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9144 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9145 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9146 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9147 if (INTEGRAL_TYPE_P (expr)
9148 || SCALAR_FLOAT_TYPE_P (expr))
9150 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9151 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9153 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9154 if (TREE_CODE (expr) == RECORD_TYPE
9155 || TREE_CODE (expr) == UNION_TYPE
9156 || TREE_CODE (expr) == QUAL_UNION_TYPE)
9157 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9158 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9159 break;
9160 default:
9161 break;
9165 #endif
9167 /* Perform constant folding and related simplification of initializer
9168 expression EXPR. This behaves identically to "fold" but ignores
9169 potential run-time traps and exceptions that fold must preserve. */
9171 tree
9172 fold_initializer (tree expr)
9174 int saved_signaling_nans = flag_signaling_nans;
9175 int saved_trapping_math = flag_trapping_math;
9176 int saved_trapv = flag_trapv;
9177 tree result;
9179 flag_signaling_nans = 0;
9180 flag_trapping_math = 0;
9181 flag_trapv = 0;
9183 result = fold (expr);
9185 flag_signaling_nans = saved_signaling_nans;
9186 flag_trapping_math = saved_trapping_math;
9187 flag_trapv = saved_trapv;
9189 return result;
9192 /* Determine if first argument is a multiple of second argument. Return 0 if
9193 it is not, or we cannot easily determined it to be.
9195 An example of the sort of thing we care about (at this point; this routine
9196 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9197 fold cases do now) is discovering that
9199 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9201 is a multiple of
9203 SAVE_EXPR (J * 8)
9205 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9207 This code also handles discovering that
9209 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9211 is a multiple of 8 so we don't have to worry about dealing with a
9212 possible remainder.
9214 Note that we *look* inside a SAVE_EXPR only to determine how it was
9215 calculated; it is not safe for fold to do much of anything else with the
9216 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9217 at run time. For example, the latter example above *cannot* be implemented
9218 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9219 evaluation time of the original SAVE_EXPR is not necessarily the same at
9220 the time the new expression is evaluated. The only optimization of this
9221 sort that would be valid is changing
9223 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9225 divided by 8 to
9227 SAVE_EXPR (I) * SAVE_EXPR (J)
9229 (where the same SAVE_EXPR (J) is used in the original and the
9230 transformed version). */
9232 static int
9233 multiple_of_p (tree type, tree top, tree bottom)
9235 if (operand_equal_p (top, bottom, 0))
9236 return 1;
9238 if (TREE_CODE (type) != INTEGER_TYPE)
9239 return 0;
9241 switch (TREE_CODE (top))
9243 case MULT_EXPR:
9244 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9245 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9247 case PLUS_EXPR:
9248 case MINUS_EXPR:
9249 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9250 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9252 case LSHIFT_EXPR:
9253 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9255 tree op1, t1;
9257 op1 = TREE_OPERAND (top, 1);
9258 /* const_binop may not detect overflow correctly,
9259 so check for it explicitly here. */
9260 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9261 > TREE_INT_CST_LOW (op1)
9262 && TREE_INT_CST_HIGH (op1) == 0
9263 && 0 != (t1 = fold_convert (type,
9264 const_binop (LSHIFT_EXPR,
9265 size_one_node,
9266 op1, 0)))
9267 && ! TREE_OVERFLOW (t1))
9268 return multiple_of_p (type, t1, bottom);
9270 return 0;
9272 case NOP_EXPR:
9273 /* Can't handle conversions from non-integral or wider integral type. */
9274 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9275 || (TYPE_PRECISION (type)
9276 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9277 return 0;
9279 /* .. fall through ... */
9281 case SAVE_EXPR:
9282 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9284 case INTEGER_CST:
9285 if (TREE_CODE (bottom) != INTEGER_CST
9286 || (TYPE_UNSIGNED (type)
9287 && (tree_int_cst_sgn (top) < 0
9288 || tree_int_cst_sgn (bottom) < 0)))
9289 return 0;
9290 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9291 top, bottom, 0));
9293 default:
9294 return 0;
9298 /* Return true if `t' is known to be non-negative. */
9301 tree_expr_nonnegative_p (tree t)
9303 switch (TREE_CODE (t))
9305 case ABS_EXPR:
9306 return 1;
9308 case INTEGER_CST:
9309 return tree_int_cst_sgn (t) >= 0;
9311 case REAL_CST:
9312 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9314 case PLUS_EXPR:
9315 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9316 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9317 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9319 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9320 both unsigned and at least 2 bits shorter than the result. */
9321 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9322 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9323 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9325 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9326 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9327 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9328 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9330 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9331 TYPE_PRECISION (inner2)) + 1;
9332 return prec < TYPE_PRECISION (TREE_TYPE (t));
9335 break;
9337 case MULT_EXPR:
9338 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9340 /* x * x for floating point x is always non-negative. */
9341 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9342 return 1;
9343 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9344 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9347 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9348 both unsigned and their total bits is shorter than the result. */
9349 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9350 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9351 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9353 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9354 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9355 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9356 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9357 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9358 < TYPE_PRECISION (TREE_TYPE (t));
9360 return 0;
9362 case TRUNC_DIV_EXPR:
9363 case CEIL_DIV_EXPR:
9364 case FLOOR_DIV_EXPR:
9365 case ROUND_DIV_EXPR:
9366 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9367 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9369 case TRUNC_MOD_EXPR:
9370 case CEIL_MOD_EXPR:
9371 case FLOOR_MOD_EXPR:
9372 case ROUND_MOD_EXPR:
9373 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9375 case RDIV_EXPR:
9376 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9377 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9379 case BIT_AND_EXPR:
9380 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9381 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9382 case BIT_IOR_EXPR:
9383 case BIT_XOR_EXPR:
9384 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9385 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9387 case NOP_EXPR:
9389 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9390 tree outer_type = TREE_TYPE (t);
9392 if (TREE_CODE (outer_type) == REAL_TYPE)
9394 if (TREE_CODE (inner_type) == REAL_TYPE)
9395 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9396 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9398 if (TYPE_UNSIGNED (inner_type))
9399 return 1;
9400 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9403 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9405 if (TREE_CODE (inner_type) == REAL_TYPE)
9406 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9407 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9408 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9409 && TYPE_UNSIGNED (inner_type);
9412 break;
9414 case COND_EXPR:
9415 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9416 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9417 case COMPOUND_EXPR:
9418 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9419 case MIN_EXPR:
9420 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9421 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9422 case MAX_EXPR:
9423 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9424 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9425 case MODIFY_EXPR:
9426 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9427 case BIND_EXPR:
9428 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9429 case SAVE_EXPR:
9430 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9431 case NON_LVALUE_EXPR:
9432 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9433 case FLOAT_EXPR:
9434 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9436 case TARGET_EXPR:
9438 tree temp = TARGET_EXPR_SLOT (t);
9439 t = TARGET_EXPR_INITIAL (t);
9441 /* If the initializer is non-void, then it's a normal expression
9442 that will be assigned to the slot. */
9443 if (!VOID_TYPE_P (t))
9444 return tree_expr_nonnegative_p (t);
9446 /* Otherwise, the initializer sets the slot in some way. One common
9447 way is an assignment statement at the end of the initializer. */
9448 while (1)
9450 if (TREE_CODE (t) == BIND_EXPR)
9451 t = expr_last (BIND_EXPR_BODY (t));
9452 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9453 || TREE_CODE (t) == TRY_CATCH_EXPR)
9454 t = expr_last (TREE_OPERAND (t, 0));
9455 else if (TREE_CODE (t) == STATEMENT_LIST)
9456 t = expr_last (t);
9457 else
9458 break;
9460 if (TREE_CODE (t) == MODIFY_EXPR
9461 && TREE_OPERAND (t, 0) == temp)
9462 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9464 return 0;
9467 case CALL_EXPR:
9469 tree fndecl = get_callee_fndecl (t);
9470 tree arglist = TREE_OPERAND (t, 1);
9471 if (fndecl
9472 && DECL_BUILT_IN (fndecl)
9473 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9474 switch (DECL_FUNCTION_CODE (fndecl))
9476 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9477 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9478 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9479 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9481 CASE_BUILTIN_F (BUILT_IN_ACOS)
9482 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9483 CASE_BUILTIN_F (BUILT_IN_CABS)
9484 CASE_BUILTIN_F (BUILT_IN_COSH)
9485 CASE_BUILTIN_F (BUILT_IN_ERFC)
9486 CASE_BUILTIN_F (BUILT_IN_EXP)
9487 CASE_BUILTIN_F (BUILT_IN_EXP10)
9488 CASE_BUILTIN_F (BUILT_IN_EXP2)
9489 CASE_BUILTIN_F (BUILT_IN_FABS)
9490 CASE_BUILTIN_F (BUILT_IN_FDIM)
9491 CASE_BUILTIN_F (BUILT_IN_FREXP)
9492 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9493 CASE_BUILTIN_F (BUILT_IN_POW10)
9494 CASE_BUILTIN_I (BUILT_IN_FFS)
9495 CASE_BUILTIN_I (BUILT_IN_PARITY)
9496 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9497 /* Always true. */
9498 return 1;
9500 CASE_BUILTIN_F (BUILT_IN_SQRT)
9501 /* sqrt(-0.0) is -0.0. */
9502 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9503 return 1;
9504 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9506 CASE_BUILTIN_F (BUILT_IN_ASINH)
9507 CASE_BUILTIN_F (BUILT_IN_ATAN)
9508 CASE_BUILTIN_F (BUILT_IN_ATANH)
9509 CASE_BUILTIN_F (BUILT_IN_CBRT)
9510 CASE_BUILTIN_F (BUILT_IN_CEIL)
9511 CASE_BUILTIN_F (BUILT_IN_ERF)
9512 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9513 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9514 CASE_BUILTIN_F (BUILT_IN_FMOD)
9515 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9516 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9517 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9518 CASE_BUILTIN_F (BUILT_IN_LRINT)
9519 CASE_BUILTIN_F (BUILT_IN_LROUND)
9520 CASE_BUILTIN_F (BUILT_IN_MODF)
9521 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9522 CASE_BUILTIN_F (BUILT_IN_POW)
9523 CASE_BUILTIN_F (BUILT_IN_RINT)
9524 CASE_BUILTIN_F (BUILT_IN_ROUND)
9525 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9526 CASE_BUILTIN_F (BUILT_IN_SINH)
9527 CASE_BUILTIN_F (BUILT_IN_TANH)
9528 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9529 /* True if the 1st argument is nonnegative. */
9530 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9532 CASE_BUILTIN_F (BUILT_IN_FMAX)
9533 /* True if the 1st OR 2nd arguments are nonnegative. */
9534 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9535 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9537 CASE_BUILTIN_F (BUILT_IN_FMIN)
9538 /* True if the 1st AND 2nd arguments are nonnegative. */
9539 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9540 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9542 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9543 /* True if the 2nd argument is nonnegative. */
9544 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9546 default:
9547 break;
9548 #undef CASE_BUILTIN_F
9549 #undef CASE_BUILTIN_I
9553 /* ... fall through ... */
9555 default:
9556 if (truth_value_p (TREE_CODE (t)))
9557 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9558 return 1;
9561 /* We don't know sign of `t', so be conservative and return false. */
9562 return 0;
9565 /* Return true when T is an address and is known to be nonzero.
9566 For floating point we further ensure that T is not denormal.
9567 Similar logic is present in nonzero_address in rtlanal.h */
9569 static bool
9570 tree_expr_nonzero_p (tree t)
9572 tree type = TREE_TYPE (t);
9574 /* Doing something useful for floating point would need more work. */
9575 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9576 return false;
9578 switch (TREE_CODE (t))
9580 case ABS_EXPR:
9581 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9582 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9584 case INTEGER_CST:
9585 /* We used to test for !integer_zerop here. This does not work correctly
9586 if TREE_CONSTANT_OVERFLOW (t). */
9587 return (TREE_INT_CST_LOW (t) != 0
9588 || TREE_INT_CST_HIGH (t) != 0);
9590 case PLUS_EXPR:
9591 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9593 /* With the presence of negative values it is hard
9594 to say something. */
9595 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9596 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9597 return false;
9598 /* One of operands must be positive and the other non-negative. */
9599 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9600 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9602 break;
9604 case MULT_EXPR:
9605 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9607 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9608 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9610 break;
9612 case NOP_EXPR:
9614 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9615 tree outer_type = TREE_TYPE (t);
9617 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9618 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9620 break;
9622 case ADDR_EXPR:
9624 tree base = get_base_address (TREE_OPERAND (t, 0));
9626 if (!base)
9627 return false;
9629 /* Weak declarations may link to NULL. */
9630 if (DECL_P (base))
9631 return !DECL_WEAK (base);
9633 /* Constants are never weak. */
9634 if (CONSTANT_CLASS_P (base))
9635 return true;
9637 return false;
9640 case COND_EXPR:
9641 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9642 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9644 case MIN_EXPR:
9645 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9646 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9648 case MAX_EXPR:
9649 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9651 /* When both operands are nonzero, then MAX must be too. */
9652 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9653 return true;
9655 /* MAX where operand 0 is positive is positive. */
9656 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9658 /* MAX where operand 1 is positive is positive. */
9659 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9660 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9661 return true;
9662 break;
9664 case COMPOUND_EXPR:
9665 case MODIFY_EXPR:
9666 case BIND_EXPR:
9667 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9669 case SAVE_EXPR:
9670 case NON_LVALUE_EXPR:
9671 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9673 case BIT_IOR_EXPR:
9674 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9675 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9677 default:
9678 break;
9680 return false;
9683 /* See if we are applying CODE, a relational to the highest or lowest
9684 possible integer of TYPE. If so, then the result is a compile
9685 time constant. */
9687 static tree
9688 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9689 tree *op1_p)
9691 tree op0 = *op0_p;
9692 tree op1 = *op1_p;
9693 enum tree_code code = *code_p;
9694 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9696 if (TREE_CODE (op1) == INTEGER_CST
9697 && ! TREE_CONSTANT_OVERFLOW (op1)
9698 && width <= HOST_BITS_PER_WIDE_INT
9699 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9700 || POINTER_TYPE_P (TREE_TYPE (op1))))
9702 unsigned HOST_WIDE_INT signed_max;
9703 unsigned HOST_WIDE_INT max, min;
9705 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
9707 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
9709 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9710 min = 0;
9712 else
9714 max = signed_max;
9715 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9718 if (TREE_INT_CST_HIGH (op1) == 0
9719 && TREE_INT_CST_LOW (op1) == max)
9720 switch (code)
9722 case GT_EXPR:
9723 return omit_one_operand (type, integer_zero_node, op0);
9725 case GE_EXPR:
9726 *code_p = EQ_EXPR;
9727 break;
9728 case LE_EXPR:
9729 return omit_one_operand (type, integer_one_node, op0);
9731 case LT_EXPR:
9732 *code_p = NE_EXPR;
9733 break;
9735 /* The GE_EXPR and LT_EXPR cases above are not normally
9736 reached because of previous transformations. */
9738 default:
9739 break;
9741 else if (TREE_INT_CST_HIGH (op1) == 0
9742 && TREE_INT_CST_LOW (op1) == max - 1)
9743 switch (code)
9745 case GT_EXPR:
9746 *code_p = EQ_EXPR;
9747 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9748 break;
9749 case LE_EXPR:
9750 *code_p = NE_EXPR;
9751 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9752 break;
9753 default:
9754 break;
9756 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9757 && TREE_INT_CST_LOW (op1) == min)
9758 switch (code)
9760 case LT_EXPR:
9761 return omit_one_operand (type, integer_zero_node, op0);
9763 case LE_EXPR:
9764 *code_p = EQ_EXPR;
9765 break;
9767 case GE_EXPR:
9768 return omit_one_operand (type, integer_one_node, op0);
9770 case GT_EXPR:
9771 *code_p = NE_EXPR;
9772 break;
9774 default:
9775 break;
9777 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9778 && TREE_INT_CST_LOW (op1) == min + 1)
9779 switch (code)
9781 case GE_EXPR:
9782 *code_p = NE_EXPR;
9783 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9784 break;
9785 case LT_EXPR:
9786 *code_p = EQ_EXPR;
9787 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9788 break;
9789 default:
9790 break;
9793 else if (TREE_INT_CST_HIGH (op1) == 0
9794 && TREE_INT_CST_LOW (op1) == signed_max
9795 && TYPE_UNSIGNED (TREE_TYPE (op1))
9796 /* signed_type does not work on pointer types. */
9797 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
9799 /* The following case also applies to X < signed_max+1
9800 and X >= signed_max+1 because previous transformations. */
9801 if (code == LE_EXPR || code == GT_EXPR)
9803 tree st0, st1, exp, retval;
9804 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
9805 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
9807 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9808 type,
9809 fold_convert (st0, op0),
9810 fold_convert (st1, integer_zero_node));
9812 retval
9813 = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
9814 TREE_TYPE (exp),
9815 TREE_OPERAND (exp, 0),
9816 TREE_OPERAND (exp, 1));
9818 /* If we are in gimple form, then returning EXP would create
9819 non-gimple expressions. Clearing it is safe and insures
9820 we do not allow a non-gimple expression to escape. */
9821 if (in_gimple_form)
9822 exp = NULL;
9824 return (retval ? retval : exp);
9829 return NULL_TREE;
9833 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9834 attempt to fold the expression to a constant without modifying TYPE,
9835 OP0 or OP1.
9837 If the expression could be simplified to a constant, then return
9838 the constant. If the expression would not be simplified to a
9839 constant, then return NULL_TREE.
9841 Note this is primarily designed to be called after gimplification
9842 of the tree structures and when at least one operand is a constant.
9843 As a result of those simplifying assumptions this routine is far
9844 simpler than the generic fold routine. */
9846 tree
9847 nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
9848 tree op0, tree op1)
9850 int wins = 1;
9851 tree subop0;
9852 tree subop1;
9853 tree tem;
9855 /* If this is a commutative operation, and ARG0 is a constant, move it
9856 to ARG1 to reduce the number of tests below. */
9857 if (commutative_tree_code (code)
9858 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
9860 tem = op0;
9861 op0 = op1;
9862 op1 = tem;
9865 /* If either operand is a complex type, extract its real component. */
9866 if (TREE_CODE (op0) == COMPLEX_CST)
9867 subop0 = TREE_REALPART (op0);
9868 else
9869 subop0 = op0;
9871 if (TREE_CODE (op1) == COMPLEX_CST)
9872 subop1 = TREE_REALPART (op1);
9873 else
9874 subop1 = op1;
9876 /* Note if either argument is not a real or integer constant.
9877 With a few exceptions, simplification is limited to cases
9878 where both arguments are constants. */
9879 if ((TREE_CODE (subop0) != INTEGER_CST
9880 && TREE_CODE (subop0) != REAL_CST)
9881 || (TREE_CODE (subop1) != INTEGER_CST
9882 && TREE_CODE (subop1) != REAL_CST))
9883 wins = 0;
9885 switch (code)
9887 case PLUS_EXPR:
9888 /* (plus (address) (const_int)) is a constant. */
9889 if (TREE_CODE (op0) == PLUS_EXPR
9890 && TREE_CODE (op1) == INTEGER_CST
9891 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
9892 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
9893 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
9894 == ADDR_EXPR)))
9895 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9897 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
9898 const_binop (PLUS_EXPR, op1,
9899 TREE_OPERAND (op0, 1), 0));
9901 case BIT_XOR_EXPR:
9903 binary:
9904 if (!wins)
9905 return NULL_TREE;
9907 /* Both arguments are constants. Simplify. */
9908 tem = const_binop (code, op0, op1, 0);
9909 if (tem != NULL_TREE)
9911 /* The return value should always have the same type as
9912 the original expression. */
9913 if (TREE_TYPE (tem) != type)
9914 tem = fold_convert (type, tem);
9916 return tem;
9918 return NULL_TREE;
9920 case MINUS_EXPR:
9921 /* Fold &x - &x. This can happen from &x.foo - &x.
9922 This is unsafe for certain floats even in non-IEEE formats.
9923 In IEEE, it is unsafe because it does wrong for NaNs.
9924 Also note that operand_equal_p is always false if an
9925 operand is volatile. */
9926 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
9927 return fold_convert (type, integer_zero_node);
9929 goto binary;
9931 case MULT_EXPR:
9932 case BIT_AND_EXPR:
9933 /* Special case multiplication or bitwise AND where one argument
9934 is zero. */
9935 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
9936 return omit_one_operand (type, op1, op0);
9937 else
9938 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
9939 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
9940 && real_zerop (op1))
9941 return omit_one_operand (type, op1, op0);
9943 goto binary;
9945 case BIT_IOR_EXPR:
9946 /* Special case when we know the result will be all ones. */
9947 if (integer_all_onesp (op1))
9948 return omit_one_operand (type, op1, op0);
9950 goto binary;
9952 case TRUNC_DIV_EXPR:
9953 case ROUND_DIV_EXPR:
9954 case FLOOR_DIV_EXPR:
9955 case CEIL_DIV_EXPR:
9956 case EXACT_DIV_EXPR:
9957 case TRUNC_MOD_EXPR:
9958 case ROUND_MOD_EXPR:
9959 case FLOOR_MOD_EXPR:
9960 case CEIL_MOD_EXPR:
9961 case RDIV_EXPR:
9962 /* Division by zero is undefined. */
9963 if (integer_zerop (op1))
9964 return NULL_TREE;
9966 if (TREE_CODE (op1) == REAL_CST
9967 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
9968 && real_zerop (op1))
9969 return NULL_TREE;
9971 goto binary;
9973 case MIN_EXPR:
9974 if (INTEGRAL_TYPE_P (type)
9975 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9976 return omit_one_operand (type, op1, op0);
9978 goto binary;
9980 case MAX_EXPR:
9981 if (INTEGRAL_TYPE_P (type)
9982 && TYPE_MAX_VALUE (type)
9983 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
9984 return omit_one_operand (type, op1, op0);
9986 goto binary;
9988 case RSHIFT_EXPR:
9989 /* Optimize -1 >> x for arithmetic right shifts. */
9990 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
9991 return omit_one_operand (type, op0, op1);
9992 /* ... fall through ... */
9994 case LSHIFT_EXPR:
9995 if (integer_zerop (op0))
9996 return omit_one_operand (type, op0, op1);
9998 /* Since negative shift count is not well-defined, don't
9999 try to compute it in the compiler. */
10000 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
10001 return NULL_TREE;
10003 goto binary;
10005 case LROTATE_EXPR:
10006 case RROTATE_EXPR:
10007 /* -1 rotated either direction by any amount is still -1. */
10008 if (integer_all_onesp (op0))
10009 return omit_one_operand (type, op0, op1);
10011 /* 0 rotated either direction by any amount is still zero. */
10012 if (integer_zerop (op0))
10013 return omit_one_operand (type, op0, op1);
10015 goto binary;
10017 case COMPLEX_EXPR:
10018 if (wins)
10019 return build_complex (type, op0, op1);
10020 return NULL_TREE;
10022 case LT_EXPR:
10023 case LE_EXPR:
10024 case GT_EXPR:
10025 case GE_EXPR:
10026 case EQ_EXPR:
10027 case NE_EXPR:
10028 /* If one arg is a real or integer constant, put it last. */
10029 if ((TREE_CODE (op0) == INTEGER_CST
10030 && TREE_CODE (op1) != INTEGER_CST)
10031 || (TREE_CODE (op0) == REAL_CST
10032 && TREE_CODE (op0) != REAL_CST))
10034 tree temp;
10036 temp = op0;
10037 op0 = op1;
10038 op1 = temp;
10039 code = swap_tree_comparison (code);
10042 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10043 This transformation affects the cases which are handled in later
10044 optimizations involving comparisons with non-negative constants. */
10045 if (TREE_CODE (op1) == INTEGER_CST
10046 && TREE_CODE (op0) != INTEGER_CST
10047 && tree_int_cst_sgn (op1) > 0)
10049 switch (code)
10051 case GE_EXPR:
10052 code = GT_EXPR;
10053 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10054 break;
10056 case LT_EXPR:
10057 code = LE_EXPR;
10058 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10059 break;
10061 default:
10062 break;
10066 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10067 if (tem)
10068 return tem;
10070 /* Fall through. */
10072 case ORDERED_EXPR:
10073 case UNORDERED_EXPR:
10074 case UNLT_EXPR:
10075 case UNLE_EXPR:
10076 case UNGT_EXPR:
10077 case UNGE_EXPR:
10078 case UNEQ_EXPR:
10079 case LTGT_EXPR:
10080 if (!wins)
10081 return NULL_TREE;
10083 return fold_relational_const (code, type, op0, op1);
10085 case RANGE_EXPR:
10086 /* This could probably be handled. */
10087 return NULL_TREE;
10089 case TRUTH_AND_EXPR:
10090 /* If second arg is constant zero, result is zero, but first arg
10091 must be evaluated. */
10092 if (integer_zerop (op1))
10093 return omit_one_operand (type, op1, op0);
10094 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10095 case will be handled here. */
10096 if (integer_zerop (op0))
10097 return omit_one_operand (type, op0, op1);
10098 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10099 return constant_boolean_node (true, type);
10100 return NULL_TREE;
10102 case TRUTH_OR_EXPR:
10103 /* If second arg is constant true, result is true, but we must
10104 evaluate first arg. */
10105 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10106 return omit_one_operand (type, op1, op0);
10107 /* Likewise for first arg, but note this only occurs here for
10108 TRUTH_OR_EXPR. */
10109 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10110 return omit_one_operand (type, op0, op1);
10111 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10112 return constant_boolean_node (false, type);
10113 return NULL_TREE;
10115 case TRUTH_XOR_EXPR:
10116 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10118 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10119 return constant_boolean_node (x, type);
10121 return NULL_TREE;
10123 default:
10124 return NULL_TREE;
10128 /* Given the components of a unary expression CODE, TYPE and OP0,
10129 attempt to fold the expression to a constant without modifying
10130 TYPE or OP0.
10132 If the expression could be simplified to a constant, then return
10133 the constant. If the expression would not be simplified to a
10134 constant, then return NULL_TREE.
10136 Note this is primarily designed to be called after gimplification
10137 of the tree structures and when op0 is a constant. As a result
10138 of those simplifying assumptions this routine is far simpler than
10139 the generic fold routine. */
10141 tree
10142 nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
10143 tree op0)
10145 /* Make sure we have a suitable constant argument. */
10146 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10148 tree subop;
10150 if (TREE_CODE (op0) == COMPLEX_CST)
10151 subop = TREE_REALPART (op0);
10152 else
10153 subop = op0;
10155 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10156 return NULL_TREE;
10159 switch (code)
10161 case NOP_EXPR:
10162 case FLOAT_EXPR:
10163 case CONVERT_EXPR:
10164 case FIX_TRUNC_EXPR:
10165 case FIX_FLOOR_EXPR:
10166 case FIX_CEIL_EXPR:
10167 return fold_convert_const (code, type, op0);
10169 case NEGATE_EXPR:
10170 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10171 return fold_negate_const (op0, type);
10172 else
10173 return NULL_TREE;
10175 case ABS_EXPR:
10176 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10177 return fold_abs_const (op0, type);
10178 else
10179 return NULL_TREE;
10181 case BIT_NOT_EXPR:
10182 if (TREE_CODE (op0) == INTEGER_CST)
10183 return fold_not_const (op0, type);
10184 else
10185 return NULL_TREE;
10187 case REALPART_EXPR:
10188 if (TREE_CODE (op0) == COMPLEX_CST)
10189 return TREE_REALPART (op0);
10190 else
10191 return NULL_TREE;
10193 case IMAGPART_EXPR:
10194 if (TREE_CODE (op0) == COMPLEX_CST)
10195 return TREE_IMAGPART (op0);
10196 else
10197 return NULL_TREE;
10199 case CONJ_EXPR:
10200 if (TREE_CODE (op0) == COMPLEX_CST
10201 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10202 return build_complex (type, TREE_REALPART (op0),
10203 negate_expr (TREE_IMAGPART (op0)));
10204 return NULL_TREE;
10206 default:
10207 return NULL_TREE;
10211 /* If EXP represents referencing an element in a constant string
10212 (either via pointer arithmetic or array indexing), return the
10213 tree representing the value accessed, otherwise return NULL. */
10215 tree
10216 fold_read_from_constant_string (tree exp)
10218 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10220 tree exp1 = TREE_OPERAND (exp, 0);
10221 tree index;
10222 tree string;
10224 if (TREE_CODE (exp) == INDIRECT_REF)
10225 string = string_constant (exp1, &index);
10226 else
10228 tree low_bound = array_ref_low_bound (exp);
10229 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10231 /* Optimize the special-case of a zero lower bound.
10233 We convert the low_bound to sizetype to avoid some problems
10234 with constant folding. (E.g. suppose the lower bound is 1,
10235 and its mode is QI. Without the conversion,l (ARRAY
10236 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10237 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10238 if (! integer_zerop (low_bound))
10239 index = size_diffop (index, fold_convert (sizetype, low_bound));
10241 string = exp1;
10244 if (string
10245 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10246 && TREE_CODE (string) == STRING_CST
10247 && TREE_CODE (index) == INTEGER_CST
10248 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10249 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10250 == MODE_INT)
10251 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10252 return fold_convert (TREE_TYPE (exp),
10253 build_int_cst (NULL_TREE,
10254 (TREE_STRING_POINTER (string)
10255 [TREE_INT_CST_LOW (index)])));
10257 return NULL;
10260 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10261 an integer constant or real constant.
10263 TYPE is the type of the result. */
10265 static tree
10266 fold_negate_const (tree arg0, tree type)
10268 tree t = NULL_TREE;
10270 switch (TREE_CODE (arg0))
10272 case INTEGER_CST:
10274 unsigned HOST_WIDE_INT low;
10275 HOST_WIDE_INT high;
10276 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10277 TREE_INT_CST_HIGH (arg0),
10278 &low, &high);
10279 t = build_int_cst_wide (type, low, high);
10280 t = force_fit_type (t, 1,
10281 (overflow | TREE_OVERFLOW (arg0))
10282 && !TYPE_UNSIGNED (type),
10283 TREE_CONSTANT_OVERFLOW (arg0));
10284 break;
10287 case REAL_CST:
10288 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10289 break;
10291 default:
10292 gcc_unreachable ();
10295 return t;
10298 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10299 an integer constant or real constant.
10301 TYPE is the type of the result. */
10303 tree
10304 fold_abs_const (tree arg0, tree type)
10306 tree t = NULL_TREE;
10308 switch (TREE_CODE (arg0))
10310 case INTEGER_CST:
10311 /* If the value is unsigned, then the absolute value is
10312 the same as the ordinary value. */
10313 if (TYPE_UNSIGNED (type))
10314 t = arg0;
10315 /* Similarly, if the value is non-negative. */
10316 else if (INT_CST_LT (integer_minus_one_node, arg0))
10317 t = arg0;
10318 /* If the value is negative, then the absolute value is
10319 its negation. */
10320 else
10322 unsigned HOST_WIDE_INT low;
10323 HOST_WIDE_INT high;
10324 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10325 TREE_INT_CST_HIGH (arg0),
10326 &low, &high);
10327 t = build_int_cst_wide (type, low, high);
10328 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
10329 TREE_CONSTANT_OVERFLOW (arg0));
10331 break;
10333 case REAL_CST:
10334 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10335 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10336 else
10337 t = arg0;
10338 break;
10340 default:
10341 gcc_unreachable ();
10344 return t;
10347 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10348 constant. TYPE is the type of the result. */
10350 static tree
10351 fold_not_const (tree arg0, tree type)
10353 tree t = NULL_TREE;
10355 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
10357 t = build_int_cst_wide (type,
10358 ~ TREE_INT_CST_LOW (arg0),
10359 ~ TREE_INT_CST_HIGH (arg0));
10360 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
10361 TREE_CONSTANT_OVERFLOW (arg0));
10363 return t;
10366 /* Given CODE, a relational operator, the target type, TYPE and two
10367 constant operands OP0 and OP1, return the result of the
10368 relational operation. If the result is not a compile time
10369 constant, then return NULL_TREE. */
10371 static tree
10372 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10374 int result, invert;
10376 /* From here on, the only cases we handle are when the result is
10377 known to be a constant. */
10379 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10381 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
10382 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
10384 /* Handle the cases where either operand is a NaN. */
10385 if (real_isnan (c0) || real_isnan (c1))
10387 switch (code)
10389 case EQ_EXPR:
10390 case ORDERED_EXPR:
10391 result = 0;
10392 break;
10394 case NE_EXPR:
10395 case UNORDERED_EXPR:
10396 case UNLT_EXPR:
10397 case UNLE_EXPR:
10398 case UNGT_EXPR:
10399 case UNGE_EXPR:
10400 case UNEQ_EXPR:
10401 result = 1;
10402 break;
10404 case LT_EXPR:
10405 case LE_EXPR:
10406 case GT_EXPR:
10407 case GE_EXPR:
10408 case LTGT_EXPR:
10409 if (flag_trapping_math)
10410 return NULL_TREE;
10411 result = 0;
10412 break;
10414 default:
10415 gcc_unreachable ();
10418 return constant_boolean_node (result, type);
10421 return constant_boolean_node (real_compare (code, c0, c1), type);
10424 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10426 To compute GT, swap the arguments and do LT.
10427 To compute GE, do LT and invert the result.
10428 To compute LE, swap the arguments, do LT and invert the result.
10429 To compute NE, do EQ and invert the result.
10431 Therefore, the code below must handle only EQ and LT. */
10433 if (code == LE_EXPR || code == GT_EXPR)
10435 tree tem = op0;
10436 op0 = op1;
10437 op1 = tem;
10438 code = swap_tree_comparison (code);
10441 /* Note that it is safe to invert for real values here because we
10442 have already handled the one case that it matters. */
10444 invert = 0;
10445 if (code == NE_EXPR || code == GE_EXPR)
10447 invert = 1;
10448 code = invert_tree_comparison (code, false);
10451 /* Compute a result for LT or EQ if args permit;
10452 Otherwise return T. */
10453 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10455 if (code == EQ_EXPR)
10456 result = tree_int_cst_equal (op0, op1);
10457 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10458 result = INT_CST_LT_UNSIGNED (op0, op1);
10459 else
10460 result = INT_CST_LT (op0, op1);
10462 else
10463 return NULL_TREE;
10465 if (invert)
10466 result ^= 1;
10467 return constant_boolean_node (result, type);
10470 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10471 avoid confusing the gimplify process. */
10473 tree
10474 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10476 /* The size of the object is not relevant when talking about its address. */
10477 if (TREE_CODE (t) == WITH_SIZE_EXPR)
10478 t = TREE_OPERAND (t, 0);
10480 if (TREE_CODE (t) == INDIRECT_REF)
10482 t = TREE_OPERAND (t, 0);
10483 if (TREE_TYPE (t) != ptrtype)
10484 t = build1 (NOP_EXPR, ptrtype, t);
10486 else
10488 tree base = t;
10490 while (handled_component_p (base)
10491 || TREE_CODE (base) == REALPART_EXPR
10492 || TREE_CODE (base) == IMAGPART_EXPR)
10493 base = TREE_OPERAND (base, 0);
10494 if (DECL_P (base))
10495 TREE_ADDRESSABLE (base) = 1;
10497 t = build1 (ADDR_EXPR, ptrtype, t);
10500 return t;
10503 tree
10504 build_fold_addr_expr (tree t)
10506 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10509 /* Builds an expression for an indirection through T, simplifying some
10510 cases. */
10512 tree
10513 build_fold_indirect_ref (tree t)
10515 tree type = TREE_TYPE (TREE_TYPE (t));
10516 tree sub = t;
10517 tree subtype;
10519 STRIP_NOPS (sub);
10520 if (TREE_CODE (sub) == ADDR_EXPR)
10522 tree op = TREE_OPERAND (sub, 0);
10523 tree optype = TREE_TYPE (op);
10524 /* *&p => p */
10525 if (lang_hooks.types_compatible_p (type, optype))
10526 return op;
10527 /* *(foo *)&fooarray => fooarray[0] */
10528 else if (TREE_CODE (optype) == ARRAY_TYPE
10529 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10530 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
10533 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10534 subtype = TREE_TYPE (sub);
10535 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10536 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10538 sub = build_fold_indirect_ref (sub);
10539 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
10542 return build1 (INDIRECT_REF, type, t);
10545 /* Strip non-trapping, non-side-effecting tree nodes from an expression
10546 whose result is ignored. The type of the returned tree need not be
10547 the same as the original expression. */
10549 tree
10550 fold_ignored_result (tree t)
10552 if (!TREE_SIDE_EFFECTS (t))
10553 return integer_zero_node;
10555 for (;;)
10556 switch (TREE_CODE_CLASS (TREE_CODE (t)))
10558 case tcc_unary:
10559 t = TREE_OPERAND (t, 0);
10560 break;
10562 case tcc_binary:
10563 case tcc_comparison:
10564 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10565 t = TREE_OPERAND (t, 0);
10566 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
10567 t = TREE_OPERAND (t, 1);
10568 else
10569 return t;
10570 break;
10572 case tcc_expression:
10573 switch (TREE_CODE (t))
10575 case COMPOUND_EXPR:
10576 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10577 return t;
10578 t = TREE_OPERAND (t, 0);
10579 break;
10581 case COND_EXPR:
10582 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
10583 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
10584 return t;
10585 t = TREE_OPERAND (t, 0);
10586 break;
10588 default:
10589 return t;
10591 break;
10593 default:
10594 return t;
10598 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
10599 This can only be applied to objects of a sizetype. */
10601 tree
10602 round_up (tree value, int divisor)
10604 tree div = NULL_TREE;
10606 gcc_assert (divisor > 0);
10607 if (divisor == 1)
10608 return value;
10610 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10611 have to do anything. Only do this when we are not given a const,
10612 because in that case, this check is more expensive than just
10613 doing it. */
10614 if (TREE_CODE (value) != INTEGER_CST)
10616 div = build_int_cst (TREE_TYPE (value), divisor);
10618 if (multiple_of_p (TREE_TYPE (value), value, div))
10619 return value;
10622 /* If divisor is a power of two, simplify this to bit manipulation. */
10623 if (divisor == (divisor & -divisor))
10625 tree t;
10627 t = build_int_cst (TREE_TYPE (value), divisor - 1);
10628 value = size_binop (PLUS_EXPR, value, t);
10629 t = build_int_cst (TREE_TYPE (value), -divisor);
10630 value = size_binop (BIT_AND_EXPR, value, t);
10632 else
10634 if (!div)
10635 div = build_int_cst (TREE_TYPE (value), divisor);
10636 value = size_binop (CEIL_DIV_EXPR, value, div);
10637 value = size_binop (MULT_EXPR, value, div);
10640 return value;
10643 /* Likewise, but round down. */
10645 tree
10646 round_down (tree value, int divisor)
10648 tree div = NULL_TREE;
10650 gcc_assert (divisor > 0);
10651 if (divisor == 1)
10652 return value;
10654 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10655 have to do anything. Only do this when we are not given a const,
10656 because in that case, this check is more expensive than just
10657 doing it. */
10658 if (TREE_CODE (value) != INTEGER_CST)
10660 div = build_int_cst (TREE_TYPE (value), divisor);
10662 if (multiple_of_p (TREE_TYPE (value), value, div))
10663 return value;
10666 /* If divisor is a power of two, simplify this to bit manipulation. */
10667 if (divisor == (divisor & -divisor))
10669 tree t;
10671 t = build_int_cst (TREE_TYPE (value), -divisor);
10672 value = size_binop (BIT_AND_EXPR, value, t);
10674 else
10676 if (!div)
10677 div = build_int_cst (TREE_TYPE (value), divisor);
10678 value = size_binop (FLOOR_DIV_EXPR, value, div);
10679 value = size_binop (MULT_EXPR, value, div);
10682 return value;
10685 /* Returns true if addresses of E1 and E2 differ by a constant, false
10686 otherwise. If they do, &E1 - &E2 is stored in *DIFF. */
10688 bool
10689 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
10691 tree core1, core2;
10692 HOST_WIDE_INT bitsize1, bitsize2;
10693 HOST_WIDE_INT bitpos1, bitpos2;
10694 tree toffset1, toffset2, tdiff, type;
10695 enum machine_mode mode1, mode2;
10696 int unsignedp1, unsignedp2, volatilep1, volatilep2;
10698 core1 = get_inner_reference (e1, &bitsize1, &bitpos1, &toffset1, &mode1,
10699 &unsignedp1, &volatilep1);
10700 core2 = get_inner_reference (e2, &bitsize2, &bitpos2, &toffset2, &mode2,
10701 &unsignedp2, &volatilep2);
10703 if (bitpos1 % BITS_PER_UNIT != 0
10704 || bitpos2 % BITS_PER_UNIT != 0
10705 || !operand_equal_p (core1, core2, 0))
10706 return false;
10708 if (toffset1 && toffset2)
10710 type = TREE_TYPE (toffset1);
10711 if (type != TREE_TYPE (toffset2))
10712 toffset2 = fold_convert (type, toffset2);
10714 tdiff = fold (build2 (MINUS_EXPR, type, toffset1, toffset2));
10715 if (!host_integerp (tdiff, 0))
10716 return false;
10718 *diff = tree_low_cst (tdiff, 0);
10720 else if (toffset1 || toffset2)
10722 /* If only one of the offsets is non-constant, the difference cannot
10723 be a constant. */
10724 return false;
10726 else
10727 *diff = 0;
10729 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
10730 return true;