* dwarf2out.c, fold-const.c, ipa-type-escape.c,
[official-gcc.git] / gcc / fold-const.c
blobc74f8910374a97a3388d622cdf70210a8fcfd554
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
106 tree *, tree *);
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
114 tree);
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
124 tree, tree,
125 tree, tree, int);
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
128 tree, tree, tree);
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
139 addition.
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
143 sign. */
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
151 #define LOWPART(x) \
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
161 static void
162 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
164 words[0] = LOWPART (low);
165 words[1] = HIGHPART (low);
166 words[2] = LOWPART (hi);
167 words[3] = HIGHPART (hi);
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
174 static void
175 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
176 HOST_WIDE_INT *hi)
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
197 tree
198 force_fit_type (tree t, int overflowable,
199 bool overflowed, bool overflowed_const)
201 unsigned HOST_WIDE_INT low;
202 HOST_WIDE_INT high;
203 unsigned int prec;
204 int sign_extended_type;
206 gcc_assert (TREE_CODE (t) == INTEGER_CST);
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
211 if (POINTER_TYPE_P (TREE_TYPE (t))
212 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
213 prec = POINTER_SIZE;
214 else
215 prec = TYPE_PRECISION (TREE_TYPE (t));
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
218 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 else
229 high = 0;
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 low &= ~((HOST_WIDE_INT) (-1) << prec);
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (high & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)low < 0)
248 high = -1;
250 else
252 /* Sign extend bottom half? */
253 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
255 high = -1;
256 low |= (HOST_WIDE_INT)(-1) << prec;
260 /* If the value changed, return a new node. */
261 if (overflowed || overflowed_const
262 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
264 t = build_int_cst_wide (TREE_TYPE (t), low, high);
266 if (overflowed
267 || overflowable < 0
268 || (overflowable > 0 && sign_extended_type))
270 t = copy_node (t);
271 TREE_OVERFLOW (t) = 1;
272 TREE_CONSTANT_OVERFLOW (t) = 1;
274 else if (overflowed_const)
276 t = copy_node (t);
277 TREE_CONSTANT_OVERFLOW (t) = 1;
281 return t;
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
291 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
292 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
294 unsigned HOST_WIDE_INT l;
295 HOST_WIDE_INT h;
297 l = l1 + l2;
298 h = h1 + h2 + (l < l1);
300 *lv = l;
301 *hv = h;
302 return OVERFLOW_SUM_SIGN (h1, h2, h);
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
314 if (l1 == 0)
316 *lv = 0;
317 *hv = - h1;
318 return (*hv & h1) < 0;
320 else
322 *lv = -l1;
323 *hv = ~h1;
324 return 0;
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
335 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
336 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
337 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
339 HOST_WIDE_INT arg1[4];
340 HOST_WIDE_INT arg2[4];
341 HOST_WIDE_INT prod[4 * 2];
342 unsigned HOST_WIDE_INT carry;
343 int i, j, k;
344 unsigned HOST_WIDE_INT toplow, neglow;
345 HOST_WIDE_INT tophigh, neghigh;
347 encode (arg1, l1, h1);
348 encode (arg2, l2, h2);
350 memset (prod, 0, sizeof prod);
352 for (i = 0; i < 4; i++)
354 carry = 0;
355 for (j = 0; j < 4; j++)
357 k = i + j;
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry += arg1[i] * arg2[j];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
361 carry += prod[k];
362 prod[k] = LOWPART (carry);
363 carry = HIGHPART (carry);
365 prod[i + 4] = carry;
368 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod + 4, &toplow, &tophigh);
373 if (h1 < 0)
375 neg_double (l2, h2, &neglow, &neghigh);
376 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
378 if (h2 < 0)
380 neg_double (l1, h1, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
392 void
393 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
394 HOST_WIDE_INT count, unsigned int prec,
395 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
397 unsigned HOST_WIDE_INT signmask;
399 if (count < 0)
401 rshift_double (l1, h1, -count, prec, lv, hv, arith);
402 return;
405 if (SHIFT_COUNT_TRUNCATED)
406 count %= prec;
408 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
412 *hv = 0;
413 *lv = 0;
415 else if (count >= HOST_BITS_PER_WIDE_INT)
417 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
418 *lv = 0;
420 else
422 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
423 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
424 *lv = l1 << count;
427 /* Sign extend all bits that are beyond the precision. */
429 signmask = -((prec > HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT) *hv
431 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
432 : (*lv >> (prec - 1))) & 1);
434 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
436 else if (prec >= HOST_BITS_PER_WIDE_INT)
438 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
439 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
441 else
443 *hv = signmask;
444 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
445 *lv |= signmask << prec;
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
454 void
455 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
456 HOST_WIDE_INT count, unsigned int prec,
457 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
458 int arith)
460 unsigned HOST_WIDE_INT signmask;
462 signmask = (arith
463 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
464 : 0);
466 if (SHIFT_COUNT_TRUNCATED)
467 count %= prec;
469 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
473 *hv = 0;
474 *lv = 0;
476 else if (count >= HOST_BITS_PER_WIDE_INT)
478 *hv = 0;
479 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
481 else
483 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
484 *lv = ((l1 >> count)
485 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
488 /* Zero / sign extend all bits that are beyond the precision. */
490 if (count >= (HOST_WIDE_INT)prec)
492 *hv = signmask;
493 *lv = signmask;
495 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
497 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
499 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
500 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
502 else
504 *hv = signmask;
505 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
506 *lv |= signmask << (prec - count);
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
515 void
516 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
517 HOST_WIDE_INT count, unsigned int prec,
518 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
520 unsigned HOST_WIDE_INT s1l, s2l;
521 HOST_WIDE_INT s1h, s2h;
523 count %= prec;
524 if (count < 0)
525 count += prec;
527 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
528 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
529 *lv = s1l | s2l;
530 *hv = s1h | s2h;
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
537 void
538 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
545 count %= prec;
546 if (count < 0)
547 count += prec;
549 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551 *lv = s1l | s2l;
552 *hv = s1h | s2h;
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
559 or EXACT_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
565 div_and_round_double (enum tree_code code, int uns,
566 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig,
568 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig,
570 unsigned HOST_WIDE_INT *lquo,
571 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
572 HOST_WIDE_INT *hrem)
574 int quo_neg = 0;
575 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den[4], quo[4];
577 int i, j;
578 unsigned HOST_WIDE_INT work;
579 unsigned HOST_WIDE_INT carry = 0;
580 unsigned HOST_WIDE_INT lnum = lnum_orig;
581 HOST_WIDE_INT hnum = hnum_orig;
582 unsigned HOST_WIDE_INT lden = lden_orig;
583 HOST_WIDE_INT hden = hden_orig;
584 int overflow = 0;
586 if (hden == 0 && lden == 0)
587 overflow = 1, lden = 1;
589 /* Calculate quotient sign and convert operands to unsigned. */
590 if (!uns)
592 if (hnum < 0)
594 quo_neg = ~ quo_neg;
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum, hnum, &lnum, &hnum)
597 && ((HOST_WIDE_INT) lden & hden) == -1)
598 overflow = 1;
600 if (hden < 0)
602 quo_neg = ~ quo_neg;
603 neg_double (lden, hden, &lden, &hden);
607 if (hnum == 0 && hden == 0)
608 { /* single precision */
609 *hquo = *hrem = 0;
610 /* This unsigned division rounds toward zero. */
611 *lquo = lnum / lden;
612 goto finish_up;
615 if (hnum == 0)
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
618 *hquo = *lquo = 0;
619 *hrem = hnum;
620 *lrem = lnum;
621 goto finish_up;
624 memset (quo, 0, sizeof quo);
626 memset (num, 0, sizeof num); /* to zero 9th element */
627 memset (den, 0, sizeof den);
629 encode (num, lnum, hnum);
630 encode (den, lden, hden);
632 /* Special code for when the divisor < BASE. */
633 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
635 /* hnum != 0 already checked. */
636 for (i = 4 - 1; i >= 0; i--)
638 work = num[i] + carry * BASE;
639 quo[i] = work / lden;
640 carry = work % lden;
643 else
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig, den_hi_sig;
648 unsigned HOST_WIDE_INT quo_est, scale;
650 /* Find the highest nonzero divisor digit. */
651 for (i = 4 - 1;; i--)
652 if (den[i] != 0)
654 den_hi_sig = i;
655 break;
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
661 scale = BASE / (den[den_hi_sig] + 1);
662 if (scale > 1)
663 { /* scale divisor and dividend */
664 carry = 0;
665 for (i = 0; i <= 4 - 1; i++)
667 work = (num[i] * scale) + carry;
668 num[i] = LOWPART (work);
669 carry = HIGHPART (work);
672 num[4] = carry;
673 carry = 0;
674 for (i = 0; i <= 4 - 1; i++)
676 work = (den[i] * scale) + carry;
677 den[i] = LOWPART (work);
678 carry = HIGHPART (work);
679 if (den[i] != 0) den_hi_sig = i;
683 num_hi_sig = 4;
685 /* Main loop */
686 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp;
693 num_hi_sig = i + den_hi_sig + 1;
694 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
695 if (num[num_hi_sig] != den[den_hi_sig])
696 quo_est = work / den[den_hi_sig];
697 else
698 quo_est = BASE - 1;
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp = work - quo_est * den[den_hi_sig];
702 if (tmp < BASE
703 && (den[den_hi_sig - 1] * quo_est
704 > (tmp * BASE + num[num_hi_sig - 2])))
705 quo_est--;
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
711 carry = 0;
712 for (j = 0; j <= den_hi_sig; j++)
714 work = quo_est * den[j] + carry;
715 carry = HIGHPART (work);
716 work = num[i + j] - LOWPART (work);
717 num[i + j] = LOWPART (work);
718 carry += HIGHPART (work) != 0;
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
725 quo_est--;
726 carry = 0; /* add divisor back in */
727 for (j = 0; j <= den_hi_sig; j++)
729 work = num[i + j] + den[j] + carry;
730 carry = HIGHPART (work);
731 num[i + j] = LOWPART (work);
734 num [num_hi_sig] += carry;
737 /* Store the quotient digit. */
738 quo[i] = quo_est;
742 decode (quo, lquo, hquo);
744 finish_up:
745 /* If result is negative, make it so. */
746 if (quo_neg)
747 neg_double (*lquo, *hquo, lquo, hquo);
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
751 neg_double (*lrem, *hrem, lrem, hrem);
752 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
754 switch (code)
756 case TRUNC_DIV_EXPR:
757 case TRUNC_MOD_EXPR: /* round toward zero */
758 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
759 return overflow;
761 case FLOOR_DIV_EXPR:
762 case FLOOR_MOD_EXPR: /* round toward negative infinity */
763 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
765 /* quo = quo - 1; */
766 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
767 lquo, hquo);
769 else
770 return overflow;
771 break;
773 case CEIL_DIV_EXPR:
774 case CEIL_MOD_EXPR: /* round toward positive infinity */
775 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
778 lquo, hquo);
780 else
781 return overflow;
782 break;
784 case ROUND_DIV_EXPR:
785 case ROUND_MOD_EXPR: /* round to closest integer */
787 unsigned HOST_WIDE_INT labs_rem = *lrem;
788 HOST_WIDE_INT habs_rem = *hrem;
789 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
790 HOST_WIDE_INT habs_den = hden, htwice;
792 /* Get absolute values. */
793 if (*hrem < 0)
794 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
795 if (hden < 0)
796 neg_double (lden, hden, &labs_den, &habs_den);
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
800 labs_rem, habs_rem, &ltwice, &htwice);
802 if (((unsigned HOST_WIDE_INT) habs_den
803 < (unsigned HOST_WIDE_INT) htwice)
804 || (((unsigned HOST_WIDE_INT) habs_den
805 == (unsigned HOST_WIDE_INT) htwice)
806 && (labs_den < ltwice)))
808 if (*hquo < 0)
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo,
811 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
812 else
813 /* quo = quo + 1; */
814 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
815 lquo, hquo);
817 else
818 return overflow;
820 break;
822 default:
823 gcc_unreachable ();
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
828 neg_double (*lrem, *hrem, lrem, hrem);
829 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
830 return overflow;
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
837 static tree
838 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
840 unsigned HOST_WIDE_INT int1l, int2l;
841 HOST_WIDE_INT int1h, int2h;
842 unsigned HOST_WIDE_INT quol, reml;
843 HOST_WIDE_INT quoh, remh;
844 tree type = TREE_TYPE (arg1);
845 int uns = TYPE_UNSIGNED (type);
847 int1l = TREE_INT_CST_LOW (arg1);
848 int1h = TREE_INT_CST_HIGH (arg1);
849 int2l = TREE_INT_CST_LOW (arg2);
850 int2h = TREE_INT_CST_HIGH (arg2);
852 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
853 &quol, &quoh, &reml, &remh);
854 if (remh != 0 || reml != 0)
855 return NULL_TREE;
857 return build_int_cst_wide (type, quol, quoh);
860 /* Return true if built-in mathematical function specified by CODE
861 preserves the sign of it argument, i.e. -f(x) == f(-x). */
863 static bool
864 negate_mathfn_p (enum built_in_function code)
866 switch (code)
868 case BUILT_IN_ASIN:
869 case BUILT_IN_ASINF:
870 case BUILT_IN_ASINL:
871 case BUILT_IN_ATAN:
872 case BUILT_IN_ATANF:
873 case BUILT_IN_ATANL:
874 case BUILT_IN_SIN:
875 case BUILT_IN_SINF:
876 case BUILT_IN_SINL:
877 case BUILT_IN_TAN:
878 case BUILT_IN_TANF:
879 case BUILT_IN_TANL:
880 return true;
882 default:
883 break;
885 return false;
888 /* Check whether we may negate an integer constant T without causing
889 overflow. */
891 bool
892 may_negate_without_overflow_p (tree t)
894 unsigned HOST_WIDE_INT val;
895 unsigned int prec;
896 tree type;
898 gcc_assert (TREE_CODE (t) == INTEGER_CST);
900 type = TREE_TYPE (t);
901 if (TYPE_UNSIGNED (type))
902 return false;
904 prec = TYPE_PRECISION (type);
905 if (prec > HOST_BITS_PER_WIDE_INT)
907 if (TREE_INT_CST_LOW (t) != 0)
908 return true;
909 prec -= HOST_BITS_PER_WIDE_INT;
910 val = TREE_INT_CST_HIGH (t);
912 else
913 val = TREE_INT_CST_LOW (t);
914 if (prec < HOST_BITS_PER_WIDE_INT)
915 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
916 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
919 /* Determine whether an expression T can be cheaply negated using
920 the function negate_expr. */
922 static bool
923 negate_expr_p (tree t)
925 tree type;
927 if (t == 0)
928 return false;
930 type = TREE_TYPE (t);
932 STRIP_SIGN_NOPS (t);
933 switch (TREE_CODE (t))
935 case INTEGER_CST:
936 if (TYPE_UNSIGNED (type) || ! flag_trapv)
937 return true;
939 /* Check that -CST will not overflow type. */
940 return may_negate_without_overflow_p (t);
942 case REAL_CST:
943 case NEGATE_EXPR:
944 return true;
946 case COMPLEX_CST:
947 return negate_expr_p (TREE_REALPART (t))
948 && negate_expr_p (TREE_IMAGPART (t));
950 case PLUS_EXPR:
951 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
952 return false;
953 /* -(A + B) -> (-B) - A. */
954 if (negate_expr_p (TREE_OPERAND (t, 1))
955 && reorder_operands_p (TREE_OPERAND (t, 0),
956 TREE_OPERAND (t, 1)))
957 return true;
958 /* -(A + B) -> (-A) - B. */
959 return negate_expr_p (TREE_OPERAND (t, 0));
961 case MINUS_EXPR:
962 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
963 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
964 && reorder_operands_p (TREE_OPERAND (t, 0),
965 TREE_OPERAND (t, 1));
967 case MULT_EXPR:
968 if (TYPE_UNSIGNED (TREE_TYPE (t)))
969 break;
971 /* Fall through. */
973 case RDIV_EXPR:
974 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
975 return negate_expr_p (TREE_OPERAND (t, 1))
976 || negate_expr_p (TREE_OPERAND (t, 0));
977 break;
979 case NOP_EXPR:
980 /* Negate -((double)float) as (double)(-float). */
981 if (TREE_CODE (type) == REAL_TYPE)
983 tree tem = strip_float_extensions (t);
984 if (tem != t)
985 return negate_expr_p (tem);
987 break;
989 case CALL_EXPR:
990 /* Negate -f(x) as f(-x). */
991 if (negate_mathfn_p (builtin_mathfn_code (t)))
992 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
993 break;
995 case RSHIFT_EXPR:
996 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
997 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
999 tree op1 = TREE_OPERAND (t, 1);
1000 if (TREE_INT_CST_HIGH (op1) == 0
1001 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1002 == TREE_INT_CST_LOW (op1))
1003 return true;
1005 break;
1007 default:
1008 break;
1010 return false;
1013 /* Given T, an expression, return the negation of T. Allow for T to be
1014 null, in which case return null. */
1016 static tree
1017 negate_expr (tree t)
1019 tree type;
1020 tree tem;
1022 if (t == 0)
1023 return 0;
1025 type = TREE_TYPE (t);
1026 STRIP_SIGN_NOPS (t);
1028 switch (TREE_CODE (t))
1030 case INTEGER_CST:
1031 tem = fold_negate_const (t, type);
1032 if (! TREE_OVERFLOW (tem)
1033 || TYPE_UNSIGNED (type)
1034 || ! flag_trapv)
1035 return tem;
1036 break;
1038 case REAL_CST:
1039 tem = fold_negate_const (t, type);
1040 /* Two's complement FP formats, such as c4x, may overflow. */
1041 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1042 return fold_convert (type, tem);
1043 break;
1045 case COMPLEX_CST:
1047 tree rpart = negate_expr (TREE_REALPART (t));
1048 tree ipart = negate_expr (TREE_IMAGPART (t));
1050 if ((TREE_CODE (rpart) == REAL_CST
1051 && TREE_CODE (ipart) == REAL_CST)
1052 || (TREE_CODE (rpart) == INTEGER_CST
1053 && TREE_CODE (ipart) == INTEGER_CST))
1054 return build_complex (type, rpart, ipart);
1056 break;
1058 case NEGATE_EXPR:
1059 return fold_convert (type, TREE_OPERAND (t, 0));
1061 case PLUS_EXPR:
1062 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1064 /* -(A + B) -> (-B) - A. */
1065 if (negate_expr_p (TREE_OPERAND (t, 1))
1066 && reorder_operands_p (TREE_OPERAND (t, 0),
1067 TREE_OPERAND (t, 1)))
1069 tem = negate_expr (TREE_OPERAND (t, 1));
1070 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1071 tem, TREE_OPERAND (t, 0));
1072 return fold_convert (type, tem);
1075 /* -(A + B) -> (-A) - B. */
1076 if (negate_expr_p (TREE_OPERAND (t, 0)))
1078 tem = negate_expr (TREE_OPERAND (t, 0));
1079 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1080 tem, TREE_OPERAND (t, 1));
1081 return fold_convert (type, tem);
1084 break;
1086 case MINUS_EXPR:
1087 /* - (A - B) -> B - A */
1088 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1089 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1090 return fold_convert (type,
1091 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1092 TREE_OPERAND (t, 1),
1093 TREE_OPERAND (t, 0)));
1094 break;
1096 case MULT_EXPR:
1097 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1098 break;
1100 /* Fall through. */
1102 case RDIV_EXPR:
1103 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1105 tem = TREE_OPERAND (t, 1);
1106 if (negate_expr_p (tem))
1107 return fold_convert (type,
1108 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1109 TREE_OPERAND (t, 0),
1110 negate_expr (tem)));
1111 tem = TREE_OPERAND (t, 0);
1112 if (negate_expr_p (tem))
1113 return fold_convert (type,
1114 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1115 negate_expr (tem),
1116 TREE_OPERAND (t, 1)));
1118 break;
1120 case NOP_EXPR:
1121 /* Convert -((double)float) into (double)(-float). */
1122 if (TREE_CODE (type) == REAL_TYPE)
1124 tem = strip_float_extensions (t);
1125 if (tem != t && negate_expr_p (tem))
1126 return fold_convert (type, negate_expr (tem));
1128 break;
1130 case CALL_EXPR:
1131 /* Negate -f(x) as f(-x). */
1132 if (negate_mathfn_p (builtin_mathfn_code (t))
1133 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1135 tree fndecl, arg, arglist;
1137 fndecl = get_callee_fndecl (t);
1138 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1139 arglist = build_tree_list (NULL_TREE, arg);
1140 return build_function_call_expr (fndecl, arglist);
1142 break;
1144 case RSHIFT_EXPR:
1145 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1146 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1148 tree op1 = TREE_OPERAND (t, 1);
1149 if (TREE_INT_CST_HIGH (op1) == 0
1150 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1151 == TREE_INT_CST_LOW (op1))
1153 tree ntype = TYPE_UNSIGNED (type)
1154 ? lang_hooks.types.signed_type (type)
1155 : lang_hooks.types.unsigned_type (type);
1156 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1157 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1158 return fold_convert (type, temp);
1161 break;
1163 default:
1164 break;
1167 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1168 return fold_convert (type, tem);
1171 /* Split a tree IN into a constant, literal and variable parts that could be
1172 combined with CODE to make IN. "constant" means an expression with
1173 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1174 commutative arithmetic operation. Store the constant part into *CONP,
1175 the literal in *LITP and return the variable part. If a part isn't
1176 present, set it to null. If the tree does not decompose in this way,
1177 return the entire tree as the variable part and the other parts as null.
1179 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1180 case, we negate an operand that was subtracted. Except if it is a
1181 literal for which we use *MINUS_LITP instead.
1183 If NEGATE_P is true, we are negating all of IN, again except a literal
1184 for which we use *MINUS_LITP instead.
1186 If IN is itself a literal or constant, return it as appropriate.
1188 Note that we do not guarantee that any of the three values will be the
1189 same type as IN, but they will have the same signedness and mode. */
1191 static tree
1192 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1193 tree *minus_litp, int negate_p)
1195 tree var = 0;
1197 *conp = 0;
1198 *litp = 0;
1199 *minus_litp = 0;
1201 /* Strip any conversions that don't change the machine mode or signedness. */
1202 STRIP_SIGN_NOPS (in);
1204 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1205 *litp = in;
1206 else if (TREE_CODE (in) == code
1207 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1208 /* We can associate addition and subtraction together (even
1209 though the C standard doesn't say so) for integers because
1210 the value is not affected. For reals, the value might be
1211 affected, so we can't. */
1212 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1213 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1215 tree op0 = TREE_OPERAND (in, 0);
1216 tree op1 = TREE_OPERAND (in, 1);
1217 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1218 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1220 /* First see if either of the operands is a literal, then a constant. */
1221 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1222 *litp = op0, op0 = 0;
1223 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1224 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1226 if (op0 != 0 && TREE_CONSTANT (op0))
1227 *conp = op0, op0 = 0;
1228 else if (op1 != 0 && TREE_CONSTANT (op1))
1229 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1231 /* If we haven't dealt with either operand, this is not a case we can
1232 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1233 if (op0 != 0 && op1 != 0)
1234 var = in;
1235 else if (op0 != 0)
1236 var = op0;
1237 else
1238 var = op1, neg_var_p = neg1_p;
1240 /* Now do any needed negations. */
1241 if (neg_litp_p)
1242 *minus_litp = *litp, *litp = 0;
1243 if (neg_conp_p)
1244 *conp = negate_expr (*conp);
1245 if (neg_var_p)
1246 var = negate_expr (var);
1248 else if (TREE_CONSTANT (in))
1249 *conp = in;
1250 else
1251 var = in;
1253 if (negate_p)
1255 if (*litp)
1256 *minus_litp = *litp, *litp = 0;
1257 else if (*minus_litp)
1258 *litp = *minus_litp, *minus_litp = 0;
1259 *conp = negate_expr (*conp);
1260 var = negate_expr (var);
1263 return var;
1266 /* Re-associate trees split by the above function. T1 and T2 are either
1267 expressions to associate or null. Return the new expression, if any. If
1268 we build an operation, do it in TYPE and with CODE. */
1270 static tree
1271 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1273 if (t1 == 0)
1274 return t2;
1275 else if (t2 == 0)
1276 return t1;
1278 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1279 try to fold this since we will have infinite recursion. But do
1280 deal with any NEGATE_EXPRs. */
1281 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1282 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1284 if (code == PLUS_EXPR)
1286 if (TREE_CODE (t1) == NEGATE_EXPR)
1287 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1288 fold_convert (type, TREE_OPERAND (t1, 0)));
1289 else if (TREE_CODE (t2) == NEGATE_EXPR)
1290 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1291 fold_convert (type, TREE_OPERAND (t2, 0)));
1292 else if (integer_zerop (t2))
1293 return fold_convert (type, t1);
1295 else if (code == MINUS_EXPR)
1297 if (integer_zerop (t2))
1298 return fold_convert (type, t1);
1301 return build2 (code, type, fold_convert (type, t1),
1302 fold_convert (type, t2));
1305 return fold_build2 (code, type, fold_convert (type, t1),
1306 fold_convert (type, t2));
1309 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1310 to produce a new constant.
1312 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1314 tree
1315 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1317 unsigned HOST_WIDE_INT int1l, int2l;
1318 HOST_WIDE_INT int1h, int2h;
1319 unsigned HOST_WIDE_INT low;
1320 HOST_WIDE_INT hi;
1321 unsigned HOST_WIDE_INT garbagel;
1322 HOST_WIDE_INT garbageh;
1323 tree t;
1324 tree type = TREE_TYPE (arg1);
1325 int uns = TYPE_UNSIGNED (type);
1326 int is_sizetype
1327 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1328 int overflow = 0;
1330 int1l = TREE_INT_CST_LOW (arg1);
1331 int1h = TREE_INT_CST_HIGH (arg1);
1332 int2l = TREE_INT_CST_LOW (arg2);
1333 int2h = TREE_INT_CST_HIGH (arg2);
1335 switch (code)
1337 case BIT_IOR_EXPR:
1338 low = int1l | int2l, hi = int1h | int2h;
1339 break;
1341 case BIT_XOR_EXPR:
1342 low = int1l ^ int2l, hi = int1h ^ int2h;
1343 break;
1345 case BIT_AND_EXPR:
1346 low = int1l & int2l, hi = int1h & int2h;
1347 break;
1349 case RSHIFT_EXPR:
1350 int2l = -int2l;
1351 case LSHIFT_EXPR:
1352 /* It's unclear from the C standard whether shifts can overflow.
1353 The following code ignores overflow; perhaps a C standard
1354 interpretation ruling is needed. */
1355 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1356 &low, &hi, !uns);
1357 break;
1359 case RROTATE_EXPR:
1360 int2l = - int2l;
1361 case LROTATE_EXPR:
1362 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1363 &low, &hi);
1364 break;
1366 case PLUS_EXPR:
1367 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1368 break;
1370 case MINUS_EXPR:
1371 neg_double (int2l, int2h, &low, &hi);
1372 add_double (int1l, int1h, low, hi, &low, &hi);
1373 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1374 break;
1376 case MULT_EXPR:
1377 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1378 break;
1380 case TRUNC_DIV_EXPR:
1381 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1382 case EXACT_DIV_EXPR:
1383 /* This is a shortcut for a common special case. */
1384 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1385 && ! TREE_CONSTANT_OVERFLOW (arg1)
1386 && ! TREE_CONSTANT_OVERFLOW (arg2)
1387 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1389 if (code == CEIL_DIV_EXPR)
1390 int1l += int2l - 1;
1392 low = int1l / int2l, hi = 0;
1393 break;
1396 /* ... fall through ... */
1398 case ROUND_DIV_EXPR:
1399 if (int2h == 0 && int2l == 1)
1401 low = int1l, hi = int1h;
1402 break;
1404 if (int1l == int2l && int1h == int2h
1405 && ! (int1l == 0 && int1h == 0))
1407 low = 1, hi = 0;
1408 break;
1410 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1411 &low, &hi, &garbagel, &garbageh);
1412 break;
1414 case TRUNC_MOD_EXPR:
1415 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1416 /* This is a shortcut for a common special case. */
1417 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1418 && ! TREE_CONSTANT_OVERFLOW (arg1)
1419 && ! TREE_CONSTANT_OVERFLOW (arg2)
1420 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1422 if (code == CEIL_MOD_EXPR)
1423 int1l += int2l - 1;
1424 low = int1l % int2l, hi = 0;
1425 break;
1428 /* ... fall through ... */
1430 case ROUND_MOD_EXPR:
1431 overflow = div_and_round_double (code, uns,
1432 int1l, int1h, int2l, int2h,
1433 &garbagel, &garbageh, &low, &hi);
1434 break;
1436 case MIN_EXPR:
1437 case MAX_EXPR:
1438 if (uns)
1439 low = (((unsigned HOST_WIDE_INT) int1h
1440 < (unsigned HOST_WIDE_INT) int2h)
1441 || (((unsigned HOST_WIDE_INT) int1h
1442 == (unsigned HOST_WIDE_INT) int2h)
1443 && int1l < int2l));
1444 else
1445 low = (int1h < int2h
1446 || (int1h == int2h && int1l < int2l));
1448 if (low == (code == MIN_EXPR))
1449 low = int1l, hi = int1h;
1450 else
1451 low = int2l, hi = int2h;
1452 break;
1454 default:
1455 gcc_unreachable ();
1458 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1460 if (notrunc)
1462 /* Propagate overflow flags ourselves. */
1463 if (((!uns || is_sizetype) && overflow)
1464 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1466 t = copy_node (t);
1467 TREE_OVERFLOW (t) = 1;
1468 TREE_CONSTANT_OVERFLOW (t) = 1;
1470 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1472 t = copy_node (t);
1473 TREE_CONSTANT_OVERFLOW (t) = 1;
1476 else
1477 t = force_fit_type (t, 1,
1478 ((!uns || is_sizetype) && overflow)
1479 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1480 TREE_CONSTANT_OVERFLOW (arg1)
1481 | TREE_CONSTANT_OVERFLOW (arg2));
1483 return t;
1486 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1487 constant. We assume ARG1 and ARG2 have the same data type, or at least
1488 are the same kind of constant and the same machine mode.
1490 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1492 static tree
1493 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1495 STRIP_NOPS (arg1);
1496 STRIP_NOPS (arg2);
1498 if (TREE_CODE (arg1) == INTEGER_CST)
1499 return int_const_binop (code, arg1, arg2, notrunc);
1501 if (TREE_CODE (arg1) == REAL_CST)
1503 enum machine_mode mode;
1504 REAL_VALUE_TYPE d1;
1505 REAL_VALUE_TYPE d2;
1506 REAL_VALUE_TYPE value;
1507 REAL_VALUE_TYPE result;
1508 bool inexact;
1509 tree t, type;
1511 d1 = TREE_REAL_CST (arg1);
1512 d2 = TREE_REAL_CST (arg2);
1514 type = TREE_TYPE (arg1);
1515 mode = TYPE_MODE (type);
1517 /* Don't perform operation if we honor signaling NaNs and
1518 either operand is a NaN. */
1519 if (HONOR_SNANS (mode)
1520 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1521 return NULL_TREE;
1523 /* Don't perform operation if it would raise a division
1524 by zero exception. */
1525 if (code == RDIV_EXPR
1526 && REAL_VALUES_EQUAL (d2, dconst0)
1527 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1528 return NULL_TREE;
1530 /* If either operand is a NaN, just return it. Otherwise, set up
1531 for floating-point trap; we return an overflow. */
1532 if (REAL_VALUE_ISNAN (d1))
1533 return arg1;
1534 else if (REAL_VALUE_ISNAN (d2))
1535 return arg2;
1537 inexact = real_arithmetic (&value, code, &d1, &d2);
1538 real_convert (&result, mode, &value);
1540 /* Don't constant fold this floating point operation if the
1541 result may dependent upon the run-time rounding mode and
1542 flag_rounding_math is set, or if GCC's software emulation
1543 is unable to accurately represent the result. */
1545 if ((flag_rounding_math
1546 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1547 && !flag_unsafe_math_optimizations))
1548 && (inexact || !real_identical (&result, &value)))
1549 return NULL_TREE;
1551 t = build_real (type, result);
1553 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1554 TREE_CONSTANT_OVERFLOW (t)
1555 = TREE_OVERFLOW (t)
1556 | TREE_CONSTANT_OVERFLOW (arg1)
1557 | TREE_CONSTANT_OVERFLOW (arg2);
1558 return t;
1560 if (TREE_CODE (arg1) == COMPLEX_CST)
1562 tree type = TREE_TYPE (arg1);
1563 tree r1 = TREE_REALPART (arg1);
1564 tree i1 = TREE_IMAGPART (arg1);
1565 tree r2 = TREE_REALPART (arg2);
1566 tree i2 = TREE_IMAGPART (arg2);
1567 tree t;
1569 switch (code)
1571 case PLUS_EXPR:
1572 t = build_complex (type,
1573 const_binop (PLUS_EXPR, r1, r2, notrunc),
1574 const_binop (PLUS_EXPR, i1, i2, notrunc));
1575 break;
1577 case MINUS_EXPR:
1578 t = build_complex (type,
1579 const_binop (MINUS_EXPR, r1, r2, notrunc),
1580 const_binop (MINUS_EXPR, i1, i2, notrunc));
1581 break;
1583 case MULT_EXPR:
1584 t = build_complex (type,
1585 const_binop (MINUS_EXPR,
1586 const_binop (MULT_EXPR,
1587 r1, r2, notrunc),
1588 const_binop (MULT_EXPR,
1589 i1, i2, notrunc),
1590 notrunc),
1591 const_binop (PLUS_EXPR,
1592 const_binop (MULT_EXPR,
1593 r1, i2, notrunc),
1594 const_binop (MULT_EXPR,
1595 i1, r2, notrunc),
1596 notrunc));
1597 break;
1599 case RDIV_EXPR:
1601 tree t1, t2, real, imag;
1602 tree magsquared
1603 = const_binop (PLUS_EXPR,
1604 const_binop (MULT_EXPR, r2, r2, notrunc),
1605 const_binop (MULT_EXPR, i2, i2, notrunc),
1606 notrunc);
1608 t1 = const_binop (PLUS_EXPR,
1609 const_binop (MULT_EXPR, r1, r2, notrunc),
1610 const_binop (MULT_EXPR, i1, i2, notrunc),
1611 notrunc);
1612 t2 = const_binop (MINUS_EXPR,
1613 const_binop (MULT_EXPR, i1, r2, notrunc),
1614 const_binop (MULT_EXPR, r1, i2, notrunc),
1615 notrunc);
1617 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1619 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1620 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1622 else
1624 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1625 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1626 if (!real || !imag)
1627 return NULL_TREE;
1630 t = build_complex (type, real, imag);
1632 break;
1634 default:
1635 gcc_unreachable ();
1637 return t;
1639 return 0;
1642 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1643 indicates which particular sizetype to create. */
1645 tree
1646 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1648 return build_int_cst (sizetype_tab[(int) kind], number);
1651 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1652 is a tree code. The type of the result is taken from the operands.
1653 Both must be the same type integer type and it must be a size type.
1654 If the operands are constant, so is the result. */
1656 tree
1657 size_binop (enum tree_code code, tree arg0, tree arg1)
1659 tree type = TREE_TYPE (arg0);
1661 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1662 && type == TREE_TYPE (arg1));
1664 /* Handle the special case of two integer constants faster. */
1665 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1667 /* And some specific cases even faster than that. */
1668 if (code == PLUS_EXPR && integer_zerop (arg0))
1669 return arg1;
1670 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1671 && integer_zerop (arg1))
1672 return arg0;
1673 else if (code == MULT_EXPR && integer_onep (arg0))
1674 return arg1;
1676 /* Handle general case of two integer constants. */
1677 return int_const_binop (code, arg0, arg1, 0);
1680 if (arg0 == error_mark_node || arg1 == error_mark_node)
1681 return error_mark_node;
1683 return fold_build2 (code, type, arg0, arg1);
1686 /* Given two values, either both of sizetype or both of bitsizetype,
1687 compute the difference between the two values. Return the value
1688 in signed type corresponding to the type of the operands. */
1690 tree
1691 size_diffop (tree arg0, tree arg1)
1693 tree type = TREE_TYPE (arg0);
1694 tree ctype;
1696 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1697 && type == TREE_TYPE (arg1));
1699 /* If the type is already signed, just do the simple thing. */
1700 if (!TYPE_UNSIGNED (type))
1701 return size_binop (MINUS_EXPR, arg0, arg1);
1703 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1705 /* If either operand is not a constant, do the conversions to the signed
1706 type and subtract. The hardware will do the right thing with any
1707 overflow in the subtraction. */
1708 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1709 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1710 fold_convert (ctype, arg1));
1712 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1713 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1714 overflow) and negate (which can't either). Special-case a result
1715 of zero while we're here. */
1716 if (tree_int_cst_equal (arg0, arg1))
1717 return fold_convert (ctype, integer_zero_node);
1718 else if (tree_int_cst_lt (arg1, arg0))
1719 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1720 else
1721 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1722 fold_convert (ctype, size_binop (MINUS_EXPR,
1723 arg1, arg0)));
1726 /* A subroutine of fold_convert_const handling conversions of an
1727 INTEGER_CST to another integer type. */
1729 static tree
1730 fold_convert_const_int_from_int (tree type, tree arg1)
1732 tree t;
1734 /* Given an integer constant, make new constant with new type,
1735 appropriately sign-extended or truncated. */
1736 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1737 TREE_INT_CST_HIGH (arg1));
1739 t = force_fit_type (t,
1740 /* Don't set the overflow when
1741 converting a pointer */
1742 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1743 (TREE_INT_CST_HIGH (arg1) < 0
1744 && (TYPE_UNSIGNED (type)
1745 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1746 | TREE_OVERFLOW (arg1),
1747 TREE_CONSTANT_OVERFLOW (arg1));
1749 return t;
1752 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1753 to an integer type. */
1755 static tree
1756 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1758 int overflow = 0;
1759 tree t;
1761 /* The following code implements the floating point to integer
1762 conversion rules required by the Java Language Specification,
1763 that IEEE NaNs are mapped to zero and values that overflow
1764 the target precision saturate, i.e. values greater than
1765 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1766 are mapped to INT_MIN. These semantics are allowed by the
1767 C and C++ standards that simply state that the behavior of
1768 FP-to-integer conversion is unspecified upon overflow. */
1770 HOST_WIDE_INT high, low;
1771 REAL_VALUE_TYPE r;
1772 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1774 switch (code)
1776 case FIX_TRUNC_EXPR:
1777 real_trunc (&r, VOIDmode, &x);
1778 break;
1780 case FIX_CEIL_EXPR:
1781 real_ceil (&r, VOIDmode, &x);
1782 break;
1784 case FIX_FLOOR_EXPR:
1785 real_floor (&r, VOIDmode, &x);
1786 break;
1788 case FIX_ROUND_EXPR:
1789 real_round (&r, VOIDmode, &x);
1790 break;
1792 default:
1793 gcc_unreachable ();
1796 /* If R is NaN, return zero and show we have an overflow. */
1797 if (REAL_VALUE_ISNAN (r))
1799 overflow = 1;
1800 high = 0;
1801 low = 0;
1804 /* See if R is less than the lower bound or greater than the
1805 upper bound. */
1807 if (! overflow)
1809 tree lt = TYPE_MIN_VALUE (type);
1810 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1811 if (REAL_VALUES_LESS (r, l))
1813 overflow = 1;
1814 high = TREE_INT_CST_HIGH (lt);
1815 low = TREE_INT_CST_LOW (lt);
1819 if (! overflow)
1821 tree ut = TYPE_MAX_VALUE (type);
1822 if (ut)
1824 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1825 if (REAL_VALUES_LESS (u, r))
1827 overflow = 1;
1828 high = TREE_INT_CST_HIGH (ut);
1829 low = TREE_INT_CST_LOW (ut);
1834 if (! overflow)
1835 REAL_VALUE_TO_INT (&low, &high, r);
1837 t = build_int_cst_wide (type, low, high);
1839 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1840 TREE_CONSTANT_OVERFLOW (arg1));
1841 return t;
1844 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1845 to another floating point type. */
1847 static tree
1848 fold_convert_const_real_from_real (tree type, tree arg1)
1850 REAL_VALUE_TYPE value;
1851 tree t;
1853 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1854 t = build_real (type, value);
1856 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1857 TREE_CONSTANT_OVERFLOW (t)
1858 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1859 return t;
1862 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1863 type TYPE. If no simplification can be done return NULL_TREE. */
1865 static tree
1866 fold_convert_const (enum tree_code code, tree type, tree arg1)
1868 if (TREE_TYPE (arg1) == type)
1869 return arg1;
1871 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1873 if (TREE_CODE (arg1) == INTEGER_CST)
1874 return fold_convert_const_int_from_int (type, arg1);
1875 else if (TREE_CODE (arg1) == REAL_CST)
1876 return fold_convert_const_int_from_real (code, type, arg1);
1878 else if (TREE_CODE (type) == REAL_TYPE)
1880 if (TREE_CODE (arg1) == INTEGER_CST)
1881 return build_real_from_int_cst (type, arg1);
1882 if (TREE_CODE (arg1) == REAL_CST)
1883 return fold_convert_const_real_from_real (type, arg1);
1885 return NULL_TREE;
1888 /* Construct a vector of zero elements of vector type TYPE. */
1890 static tree
1891 build_zero_vector (tree type)
1893 tree elem, list;
1894 int i, units;
1896 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1897 units = TYPE_VECTOR_SUBPARTS (type);
1899 list = NULL_TREE;
1900 for (i = 0; i < units; i++)
1901 list = tree_cons (NULL_TREE, elem, list);
1902 return build_vector (type, list);
1905 /* Convert expression ARG to type TYPE. Used by the middle-end for
1906 simple conversions in preference to calling the front-end's convert. */
1908 tree
1909 fold_convert (tree type, tree arg)
1911 tree orig = TREE_TYPE (arg);
1912 tree tem;
1914 if (type == orig)
1915 return arg;
1917 if (TREE_CODE (arg) == ERROR_MARK
1918 || TREE_CODE (type) == ERROR_MARK
1919 || TREE_CODE (orig) == ERROR_MARK)
1920 return error_mark_node;
1922 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1923 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1924 TYPE_MAIN_VARIANT (orig)))
1925 return fold_build1 (NOP_EXPR, type, arg);
1927 switch (TREE_CODE (type))
1929 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1930 case POINTER_TYPE: case REFERENCE_TYPE:
1931 case OFFSET_TYPE:
1932 if (TREE_CODE (arg) == INTEGER_CST)
1934 tem = fold_convert_const (NOP_EXPR, type, arg);
1935 if (tem != NULL_TREE)
1936 return tem;
1938 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1939 || TREE_CODE (orig) == OFFSET_TYPE)
1940 return fold_build1 (NOP_EXPR, type, arg);
1941 if (TREE_CODE (orig) == COMPLEX_TYPE)
1943 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1944 return fold_convert (type, tem);
1946 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1947 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1948 return fold_build1 (NOP_EXPR, type, arg);
1950 case REAL_TYPE:
1951 if (TREE_CODE (arg) == INTEGER_CST)
1953 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1954 if (tem != NULL_TREE)
1955 return tem;
1957 else if (TREE_CODE (arg) == REAL_CST)
1959 tem = fold_convert_const (NOP_EXPR, type, arg);
1960 if (tem != NULL_TREE)
1961 return tem;
1964 switch (TREE_CODE (orig))
1966 case INTEGER_TYPE: case CHAR_TYPE:
1967 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1968 case POINTER_TYPE: case REFERENCE_TYPE:
1969 return fold_build1 (FLOAT_EXPR, type, arg);
1971 case REAL_TYPE:
1972 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1973 type, arg);
1975 case COMPLEX_TYPE:
1976 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1977 return fold_convert (type, tem);
1979 default:
1980 gcc_unreachable ();
1983 case COMPLEX_TYPE:
1984 switch (TREE_CODE (orig))
1986 case INTEGER_TYPE: case CHAR_TYPE:
1987 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1988 case POINTER_TYPE: case REFERENCE_TYPE:
1989 case REAL_TYPE:
1990 return build2 (COMPLEX_EXPR, type,
1991 fold_convert (TREE_TYPE (type), arg),
1992 fold_convert (TREE_TYPE (type), integer_zero_node));
1993 case COMPLEX_TYPE:
1995 tree rpart, ipart;
1997 if (TREE_CODE (arg) == COMPLEX_EXPR)
1999 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2000 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2001 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2004 arg = save_expr (arg);
2005 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2006 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2007 rpart = fold_convert (TREE_TYPE (type), rpart);
2008 ipart = fold_convert (TREE_TYPE (type), ipart);
2009 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2012 default:
2013 gcc_unreachable ();
2016 case VECTOR_TYPE:
2017 if (integer_zerop (arg))
2018 return build_zero_vector (type);
2019 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2020 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2021 || TREE_CODE (orig) == VECTOR_TYPE);
2022 return fold_build1 (NOP_EXPR, type, arg);
2024 case VOID_TYPE:
2025 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2027 default:
2028 gcc_unreachable ();
2032 /* Return false if expr can be assumed not to be an lvalue, true
2033 otherwise. */
2035 static bool
2036 maybe_lvalue_p (tree x)
2038 /* We only need to wrap lvalue tree codes. */
2039 switch (TREE_CODE (x))
2041 case VAR_DECL:
2042 case PARM_DECL:
2043 case RESULT_DECL:
2044 case LABEL_DECL:
2045 case FUNCTION_DECL:
2046 case SSA_NAME:
2048 case COMPONENT_REF:
2049 case INDIRECT_REF:
2050 case ALIGN_INDIRECT_REF:
2051 case MISALIGNED_INDIRECT_REF:
2052 case ARRAY_REF:
2053 case ARRAY_RANGE_REF:
2054 case BIT_FIELD_REF:
2055 case OBJ_TYPE_REF:
2057 case REALPART_EXPR:
2058 case IMAGPART_EXPR:
2059 case PREINCREMENT_EXPR:
2060 case PREDECREMENT_EXPR:
2061 case SAVE_EXPR:
2062 case TRY_CATCH_EXPR:
2063 case WITH_CLEANUP_EXPR:
2064 case COMPOUND_EXPR:
2065 case MODIFY_EXPR:
2066 case TARGET_EXPR:
2067 case COND_EXPR:
2068 case BIND_EXPR:
2069 case MIN_EXPR:
2070 case MAX_EXPR:
2071 break;
2073 default:
2074 /* Assume the worst for front-end tree codes. */
2075 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2076 break;
2077 return false;
2080 return true;
2083 /* Return an expr equal to X but certainly not valid as an lvalue. */
2085 tree
2086 non_lvalue (tree x)
2088 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2089 us. */
2090 if (in_gimple_form)
2091 return x;
2093 if (! maybe_lvalue_p (x))
2094 return x;
2095 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2098 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2099 Zero means allow extended lvalues. */
2101 int pedantic_lvalues;
2103 /* When pedantic, return an expr equal to X but certainly not valid as a
2104 pedantic lvalue. Otherwise, return X. */
2106 static tree
2107 pedantic_non_lvalue (tree x)
2109 if (pedantic_lvalues)
2110 return non_lvalue (x);
2111 else
2112 return x;
2115 /* Given a tree comparison code, return the code that is the logical inverse
2116 of the given code. It is not safe to do this for floating-point
2117 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2118 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2120 enum tree_code
2121 invert_tree_comparison (enum tree_code code, bool honor_nans)
2123 if (honor_nans && flag_trapping_math)
2124 return ERROR_MARK;
2126 switch (code)
2128 case EQ_EXPR:
2129 return NE_EXPR;
2130 case NE_EXPR:
2131 return EQ_EXPR;
2132 case GT_EXPR:
2133 return honor_nans ? UNLE_EXPR : LE_EXPR;
2134 case GE_EXPR:
2135 return honor_nans ? UNLT_EXPR : LT_EXPR;
2136 case LT_EXPR:
2137 return honor_nans ? UNGE_EXPR : GE_EXPR;
2138 case LE_EXPR:
2139 return honor_nans ? UNGT_EXPR : GT_EXPR;
2140 case LTGT_EXPR:
2141 return UNEQ_EXPR;
2142 case UNEQ_EXPR:
2143 return LTGT_EXPR;
2144 case UNGT_EXPR:
2145 return LE_EXPR;
2146 case UNGE_EXPR:
2147 return LT_EXPR;
2148 case UNLT_EXPR:
2149 return GE_EXPR;
2150 case UNLE_EXPR:
2151 return GT_EXPR;
2152 case ORDERED_EXPR:
2153 return UNORDERED_EXPR;
2154 case UNORDERED_EXPR:
2155 return ORDERED_EXPR;
2156 default:
2157 gcc_unreachable ();
2161 /* Similar, but return the comparison that results if the operands are
2162 swapped. This is safe for floating-point. */
2164 enum tree_code
2165 swap_tree_comparison (enum tree_code code)
2167 switch (code)
2169 case EQ_EXPR:
2170 case NE_EXPR:
2171 case ORDERED_EXPR:
2172 case UNORDERED_EXPR:
2173 case LTGT_EXPR:
2174 case UNEQ_EXPR:
2175 return code;
2176 case GT_EXPR:
2177 return LT_EXPR;
2178 case GE_EXPR:
2179 return LE_EXPR;
2180 case LT_EXPR:
2181 return GT_EXPR;
2182 case LE_EXPR:
2183 return GE_EXPR;
2184 case UNGT_EXPR:
2185 return UNLT_EXPR;
2186 case UNGE_EXPR:
2187 return UNLE_EXPR;
2188 case UNLT_EXPR:
2189 return UNGT_EXPR;
2190 case UNLE_EXPR:
2191 return UNGE_EXPR;
2192 default:
2193 gcc_unreachable ();
2198 /* Convert a comparison tree code from an enum tree_code representation
2199 into a compcode bit-based encoding. This function is the inverse of
2200 compcode_to_comparison. */
2202 static enum comparison_code
2203 comparison_to_compcode (enum tree_code code)
2205 switch (code)
2207 case LT_EXPR:
2208 return COMPCODE_LT;
2209 case EQ_EXPR:
2210 return COMPCODE_EQ;
2211 case LE_EXPR:
2212 return COMPCODE_LE;
2213 case GT_EXPR:
2214 return COMPCODE_GT;
2215 case NE_EXPR:
2216 return COMPCODE_NE;
2217 case GE_EXPR:
2218 return COMPCODE_GE;
2219 case ORDERED_EXPR:
2220 return COMPCODE_ORD;
2221 case UNORDERED_EXPR:
2222 return COMPCODE_UNORD;
2223 case UNLT_EXPR:
2224 return COMPCODE_UNLT;
2225 case UNEQ_EXPR:
2226 return COMPCODE_UNEQ;
2227 case UNLE_EXPR:
2228 return COMPCODE_UNLE;
2229 case UNGT_EXPR:
2230 return COMPCODE_UNGT;
2231 case LTGT_EXPR:
2232 return COMPCODE_LTGT;
2233 case UNGE_EXPR:
2234 return COMPCODE_UNGE;
2235 default:
2236 gcc_unreachable ();
2240 /* Convert a compcode bit-based encoding of a comparison operator back
2241 to GCC's enum tree_code representation. This function is the
2242 inverse of comparison_to_compcode. */
2244 static enum tree_code
2245 compcode_to_comparison (enum comparison_code code)
2247 switch (code)
2249 case COMPCODE_LT:
2250 return LT_EXPR;
2251 case COMPCODE_EQ:
2252 return EQ_EXPR;
2253 case COMPCODE_LE:
2254 return LE_EXPR;
2255 case COMPCODE_GT:
2256 return GT_EXPR;
2257 case COMPCODE_NE:
2258 return NE_EXPR;
2259 case COMPCODE_GE:
2260 return GE_EXPR;
2261 case COMPCODE_ORD:
2262 return ORDERED_EXPR;
2263 case COMPCODE_UNORD:
2264 return UNORDERED_EXPR;
2265 case COMPCODE_UNLT:
2266 return UNLT_EXPR;
2267 case COMPCODE_UNEQ:
2268 return UNEQ_EXPR;
2269 case COMPCODE_UNLE:
2270 return UNLE_EXPR;
2271 case COMPCODE_UNGT:
2272 return UNGT_EXPR;
2273 case COMPCODE_LTGT:
2274 return LTGT_EXPR;
2275 case COMPCODE_UNGE:
2276 return UNGE_EXPR;
2277 default:
2278 gcc_unreachable ();
2282 /* Return a tree for the comparison which is the combination of
2283 doing the AND or OR (depending on CODE) of the two operations LCODE
2284 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2285 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2286 if this makes the transformation invalid. */
2288 tree
2289 combine_comparisons (enum tree_code code, enum tree_code lcode,
2290 enum tree_code rcode, tree truth_type,
2291 tree ll_arg, tree lr_arg)
2293 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2294 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2295 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2296 enum comparison_code compcode;
2298 switch (code)
2300 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2301 compcode = lcompcode & rcompcode;
2302 break;
2304 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2305 compcode = lcompcode | rcompcode;
2306 break;
2308 default:
2309 return NULL_TREE;
2312 if (!honor_nans)
2314 /* Eliminate unordered comparisons, as well as LTGT and ORD
2315 which are not used unless the mode has NaNs. */
2316 compcode &= ~COMPCODE_UNORD;
2317 if (compcode == COMPCODE_LTGT)
2318 compcode = COMPCODE_NE;
2319 else if (compcode == COMPCODE_ORD)
2320 compcode = COMPCODE_TRUE;
2322 else if (flag_trapping_math)
2324 /* Check that the original operation and the optimized ones will trap
2325 under the same condition. */
2326 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2327 && (lcompcode != COMPCODE_EQ)
2328 && (lcompcode != COMPCODE_ORD);
2329 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2330 && (rcompcode != COMPCODE_EQ)
2331 && (rcompcode != COMPCODE_ORD);
2332 bool trap = (compcode & COMPCODE_UNORD) == 0
2333 && (compcode != COMPCODE_EQ)
2334 && (compcode != COMPCODE_ORD);
2336 /* In a short-circuited boolean expression the LHS might be
2337 such that the RHS, if evaluated, will never trap. For
2338 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2339 if neither x nor y is NaN. (This is a mixed blessing: for
2340 example, the expression above will never trap, hence
2341 optimizing it to x < y would be invalid). */
2342 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2343 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2344 rtrap = false;
2346 /* If the comparison was short-circuited, and only the RHS
2347 trapped, we may now generate a spurious trap. */
2348 if (rtrap && !ltrap
2349 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2350 return NULL_TREE;
2352 /* If we changed the conditions that cause a trap, we lose. */
2353 if ((ltrap || rtrap) != trap)
2354 return NULL_TREE;
2357 if (compcode == COMPCODE_TRUE)
2358 return constant_boolean_node (true, truth_type);
2359 else if (compcode == COMPCODE_FALSE)
2360 return constant_boolean_node (false, truth_type);
2361 else
2362 return fold_build2 (compcode_to_comparison (compcode),
2363 truth_type, ll_arg, lr_arg);
2366 /* Return nonzero if CODE is a tree code that represents a truth value. */
2368 static int
2369 truth_value_p (enum tree_code code)
2371 return (TREE_CODE_CLASS (code) == tcc_comparison
2372 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2373 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2374 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2377 /* Return nonzero if two operands (typically of the same tree node)
2378 are necessarily equal. If either argument has side-effects this
2379 function returns zero. FLAGS modifies behavior as follows:
2381 If OEP_ONLY_CONST is set, only return nonzero for constants.
2382 This function tests whether the operands are indistinguishable;
2383 it does not test whether they are equal using C's == operation.
2384 The distinction is important for IEEE floating point, because
2385 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2386 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2388 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2389 even though it may hold multiple values during a function.
2390 This is because a GCC tree node guarantees that nothing else is
2391 executed between the evaluation of its "operands" (which may often
2392 be evaluated in arbitrary order). Hence if the operands themselves
2393 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2394 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2395 unset means assuming isochronic (or instantaneous) tree equivalence.
2396 Unless comparing arbitrary expression trees, such as from different
2397 statements, this flag can usually be left unset.
2399 If OEP_PURE_SAME is set, then pure functions with identical arguments
2400 are considered the same. It is used when the caller has other ways
2401 to ensure that global memory is unchanged in between. */
2404 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2406 /* If either is ERROR_MARK, they aren't equal. */
2407 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2408 return 0;
2410 /* If both types don't have the same signedness, then we can't consider
2411 them equal. We must check this before the STRIP_NOPS calls
2412 because they may change the signedness of the arguments. */
2413 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2414 return 0;
2416 STRIP_NOPS (arg0);
2417 STRIP_NOPS (arg1);
2419 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2420 /* This is needed for conversions and for COMPONENT_REF.
2421 Might as well play it safe and always test this. */
2422 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2423 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2424 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2425 return 0;
2427 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2428 We don't care about side effects in that case because the SAVE_EXPR
2429 takes care of that for us. In all other cases, two expressions are
2430 equal if they have no side effects. If we have two identical
2431 expressions with side effects that should be treated the same due
2432 to the only side effects being identical SAVE_EXPR's, that will
2433 be detected in the recursive calls below. */
2434 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2435 && (TREE_CODE (arg0) == SAVE_EXPR
2436 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2437 return 1;
2439 /* Next handle constant cases, those for which we can return 1 even
2440 if ONLY_CONST is set. */
2441 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2442 switch (TREE_CODE (arg0))
2444 case INTEGER_CST:
2445 return (! TREE_CONSTANT_OVERFLOW (arg0)
2446 && ! TREE_CONSTANT_OVERFLOW (arg1)
2447 && tree_int_cst_equal (arg0, arg1));
2449 case REAL_CST:
2450 return (! TREE_CONSTANT_OVERFLOW (arg0)
2451 && ! TREE_CONSTANT_OVERFLOW (arg1)
2452 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2453 TREE_REAL_CST (arg1)));
2455 case VECTOR_CST:
2457 tree v1, v2;
2459 if (TREE_CONSTANT_OVERFLOW (arg0)
2460 || TREE_CONSTANT_OVERFLOW (arg1))
2461 return 0;
2463 v1 = TREE_VECTOR_CST_ELTS (arg0);
2464 v2 = TREE_VECTOR_CST_ELTS (arg1);
2465 while (v1 && v2)
2467 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2468 flags))
2469 return 0;
2470 v1 = TREE_CHAIN (v1);
2471 v2 = TREE_CHAIN (v2);
2474 return v1 == v2;
2477 case COMPLEX_CST:
2478 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2479 flags)
2480 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2481 flags));
2483 case STRING_CST:
2484 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2485 && ! memcmp (TREE_STRING_POINTER (arg0),
2486 TREE_STRING_POINTER (arg1),
2487 TREE_STRING_LENGTH (arg0)));
2489 case ADDR_EXPR:
2490 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2492 default:
2493 break;
2496 if (flags & OEP_ONLY_CONST)
2497 return 0;
2499 /* Define macros to test an operand from arg0 and arg1 for equality and a
2500 variant that allows null and views null as being different from any
2501 non-null value. In the latter case, if either is null, the both
2502 must be; otherwise, do the normal comparison. */
2503 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2504 TREE_OPERAND (arg1, N), flags)
2506 #define OP_SAME_WITH_NULL(N) \
2507 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2508 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2510 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2512 case tcc_unary:
2513 /* Two conversions are equal only if signedness and modes match. */
2514 switch (TREE_CODE (arg0))
2516 case NOP_EXPR:
2517 case CONVERT_EXPR:
2518 case FIX_CEIL_EXPR:
2519 case FIX_TRUNC_EXPR:
2520 case FIX_FLOOR_EXPR:
2521 case FIX_ROUND_EXPR:
2522 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2523 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2524 return 0;
2525 break;
2526 default:
2527 break;
2530 return OP_SAME (0);
2533 case tcc_comparison:
2534 case tcc_binary:
2535 if (OP_SAME (0) && OP_SAME (1))
2536 return 1;
2538 /* For commutative ops, allow the other order. */
2539 return (commutative_tree_code (TREE_CODE (arg0))
2540 && operand_equal_p (TREE_OPERAND (arg0, 0),
2541 TREE_OPERAND (arg1, 1), flags)
2542 && operand_equal_p (TREE_OPERAND (arg0, 1),
2543 TREE_OPERAND (arg1, 0), flags));
2545 case tcc_reference:
2546 /* If either of the pointer (or reference) expressions we are
2547 dereferencing contain a side effect, these cannot be equal. */
2548 if (TREE_SIDE_EFFECTS (arg0)
2549 || TREE_SIDE_EFFECTS (arg1))
2550 return 0;
2552 switch (TREE_CODE (arg0))
2554 case INDIRECT_REF:
2555 case ALIGN_INDIRECT_REF:
2556 case MISALIGNED_INDIRECT_REF:
2557 case REALPART_EXPR:
2558 case IMAGPART_EXPR:
2559 return OP_SAME (0);
2561 case ARRAY_REF:
2562 case ARRAY_RANGE_REF:
2563 /* Operands 2 and 3 may be null. */
2564 return (OP_SAME (0)
2565 && OP_SAME (1)
2566 && OP_SAME_WITH_NULL (2)
2567 && OP_SAME_WITH_NULL (3));
2569 case COMPONENT_REF:
2570 /* Handle operand 2 the same as for ARRAY_REF. */
2571 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2573 case BIT_FIELD_REF:
2574 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2576 default:
2577 return 0;
2580 case tcc_expression:
2581 switch (TREE_CODE (arg0))
2583 case ADDR_EXPR:
2584 case TRUTH_NOT_EXPR:
2585 return OP_SAME (0);
2587 case TRUTH_ANDIF_EXPR:
2588 case TRUTH_ORIF_EXPR:
2589 return OP_SAME (0) && OP_SAME (1);
2591 case TRUTH_AND_EXPR:
2592 case TRUTH_OR_EXPR:
2593 case TRUTH_XOR_EXPR:
2594 if (OP_SAME (0) && OP_SAME (1))
2595 return 1;
2597 /* Otherwise take into account this is a commutative operation. */
2598 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2599 TREE_OPERAND (arg1, 1), flags)
2600 && operand_equal_p (TREE_OPERAND (arg0, 1),
2601 TREE_OPERAND (arg1, 0), flags));
2603 case CALL_EXPR:
2604 /* If the CALL_EXPRs call different functions, then they
2605 clearly can not be equal. */
2606 if (!OP_SAME (0))
2607 return 0;
2610 unsigned int cef = call_expr_flags (arg0);
2611 if (flags & OEP_PURE_SAME)
2612 cef &= ECF_CONST | ECF_PURE;
2613 else
2614 cef &= ECF_CONST;
2615 if (!cef)
2616 return 0;
2619 /* Now see if all the arguments are the same. operand_equal_p
2620 does not handle TREE_LIST, so we walk the operands here
2621 feeding them to operand_equal_p. */
2622 arg0 = TREE_OPERAND (arg0, 1);
2623 arg1 = TREE_OPERAND (arg1, 1);
2624 while (arg0 && arg1)
2626 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2627 flags))
2628 return 0;
2630 arg0 = TREE_CHAIN (arg0);
2631 arg1 = TREE_CHAIN (arg1);
2634 /* If we get here and both argument lists are exhausted
2635 then the CALL_EXPRs are equal. */
2636 return ! (arg0 || arg1);
2638 default:
2639 return 0;
2642 case tcc_declaration:
2643 /* Consider __builtin_sqrt equal to sqrt. */
2644 return (TREE_CODE (arg0) == FUNCTION_DECL
2645 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2646 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2647 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2649 default:
2650 return 0;
2653 #undef OP_SAME
2654 #undef OP_SAME_WITH_NULL
2657 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2658 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2660 When in doubt, return 0. */
2662 static int
2663 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2665 int unsignedp1, unsignedpo;
2666 tree primarg0, primarg1, primother;
2667 unsigned int correct_width;
2669 if (operand_equal_p (arg0, arg1, 0))
2670 return 1;
2672 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2673 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2674 return 0;
2676 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2677 and see if the inner values are the same. This removes any
2678 signedness comparison, which doesn't matter here. */
2679 primarg0 = arg0, primarg1 = arg1;
2680 STRIP_NOPS (primarg0);
2681 STRIP_NOPS (primarg1);
2682 if (operand_equal_p (primarg0, primarg1, 0))
2683 return 1;
2685 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2686 actual comparison operand, ARG0.
2688 First throw away any conversions to wider types
2689 already present in the operands. */
2691 primarg1 = get_narrower (arg1, &unsignedp1);
2692 primother = get_narrower (other, &unsignedpo);
2694 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2695 if (unsignedp1 == unsignedpo
2696 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2697 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2699 tree type = TREE_TYPE (arg0);
2701 /* Make sure shorter operand is extended the right way
2702 to match the longer operand. */
2703 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2704 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2706 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2707 return 1;
2710 return 0;
2713 /* See if ARG is an expression that is either a comparison or is performing
2714 arithmetic on comparisons. The comparisons must only be comparing
2715 two different values, which will be stored in *CVAL1 and *CVAL2; if
2716 they are nonzero it means that some operands have already been found.
2717 No variables may be used anywhere else in the expression except in the
2718 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2719 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2721 If this is true, return 1. Otherwise, return zero. */
2723 static int
2724 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2726 enum tree_code code = TREE_CODE (arg);
2727 enum tree_code_class class = TREE_CODE_CLASS (code);
2729 /* We can handle some of the tcc_expression cases here. */
2730 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2731 class = tcc_unary;
2732 else if (class == tcc_expression
2733 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2734 || code == COMPOUND_EXPR))
2735 class = tcc_binary;
2737 else if (class == tcc_expression && code == SAVE_EXPR
2738 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2740 /* If we've already found a CVAL1 or CVAL2, this expression is
2741 two complex to handle. */
2742 if (*cval1 || *cval2)
2743 return 0;
2745 class = tcc_unary;
2746 *save_p = 1;
2749 switch (class)
2751 case tcc_unary:
2752 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2754 case tcc_binary:
2755 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2756 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2757 cval1, cval2, save_p));
2759 case tcc_constant:
2760 return 1;
2762 case tcc_expression:
2763 if (code == COND_EXPR)
2764 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2765 cval1, cval2, save_p)
2766 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2767 cval1, cval2, save_p)
2768 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2769 cval1, cval2, save_p));
2770 return 0;
2772 case tcc_comparison:
2773 /* First see if we can handle the first operand, then the second. For
2774 the second operand, we know *CVAL1 can't be zero. It must be that
2775 one side of the comparison is each of the values; test for the
2776 case where this isn't true by failing if the two operands
2777 are the same. */
2779 if (operand_equal_p (TREE_OPERAND (arg, 0),
2780 TREE_OPERAND (arg, 1), 0))
2781 return 0;
2783 if (*cval1 == 0)
2784 *cval1 = TREE_OPERAND (arg, 0);
2785 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2787 else if (*cval2 == 0)
2788 *cval2 = TREE_OPERAND (arg, 0);
2789 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2791 else
2792 return 0;
2794 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2796 else if (*cval2 == 0)
2797 *cval2 = TREE_OPERAND (arg, 1);
2798 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2800 else
2801 return 0;
2803 return 1;
2805 default:
2806 return 0;
2810 /* ARG is a tree that is known to contain just arithmetic operations and
2811 comparisons. Evaluate the operations in the tree substituting NEW0 for
2812 any occurrence of OLD0 as an operand of a comparison and likewise for
2813 NEW1 and OLD1. */
2815 static tree
2816 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2818 tree type = TREE_TYPE (arg);
2819 enum tree_code code = TREE_CODE (arg);
2820 enum tree_code_class class = TREE_CODE_CLASS (code);
2822 /* We can handle some of the tcc_expression cases here. */
2823 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2824 class = tcc_unary;
2825 else if (class == tcc_expression
2826 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2827 class = tcc_binary;
2829 switch (class)
2831 case tcc_unary:
2832 return fold_build1 (code, type,
2833 eval_subst (TREE_OPERAND (arg, 0),
2834 old0, new0, old1, new1));
2836 case tcc_binary:
2837 return fold_build2 (code, type,
2838 eval_subst (TREE_OPERAND (arg, 0),
2839 old0, new0, old1, new1),
2840 eval_subst (TREE_OPERAND (arg, 1),
2841 old0, new0, old1, new1));
2843 case tcc_expression:
2844 switch (code)
2846 case SAVE_EXPR:
2847 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2849 case COMPOUND_EXPR:
2850 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2852 case COND_EXPR:
2853 return fold_build3 (code, type,
2854 eval_subst (TREE_OPERAND (arg, 0),
2855 old0, new0, old1, new1),
2856 eval_subst (TREE_OPERAND (arg, 1),
2857 old0, new0, old1, new1),
2858 eval_subst (TREE_OPERAND (arg, 2),
2859 old0, new0, old1, new1));
2860 default:
2861 break;
2863 /* Fall through - ??? */
2865 case tcc_comparison:
2867 tree arg0 = TREE_OPERAND (arg, 0);
2868 tree arg1 = TREE_OPERAND (arg, 1);
2870 /* We need to check both for exact equality and tree equality. The
2871 former will be true if the operand has a side-effect. In that
2872 case, we know the operand occurred exactly once. */
2874 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2875 arg0 = new0;
2876 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2877 arg0 = new1;
2879 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2880 arg1 = new0;
2881 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2882 arg1 = new1;
2884 return fold_build2 (code, type, arg0, arg1);
2887 default:
2888 return arg;
2892 /* Return a tree for the case when the result of an expression is RESULT
2893 converted to TYPE and OMITTED was previously an operand of the expression
2894 but is now not needed (e.g., we folded OMITTED * 0).
2896 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2897 the conversion of RESULT to TYPE. */
2899 tree
2900 omit_one_operand (tree type, tree result, tree omitted)
2902 tree t = fold_convert (type, result);
2904 if (TREE_SIDE_EFFECTS (omitted))
2905 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2907 return non_lvalue (t);
2910 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2912 static tree
2913 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2915 tree t = fold_convert (type, result);
2917 if (TREE_SIDE_EFFECTS (omitted))
2918 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2920 return pedantic_non_lvalue (t);
2923 /* Return a tree for the case when the result of an expression is RESULT
2924 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2925 of the expression but are now not needed.
2927 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2928 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2929 evaluated before OMITTED2. Otherwise, if neither has side effects,
2930 just do the conversion of RESULT to TYPE. */
2932 tree
2933 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2935 tree t = fold_convert (type, result);
2937 if (TREE_SIDE_EFFECTS (omitted2))
2938 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2939 if (TREE_SIDE_EFFECTS (omitted1))
2940 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2942 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2946 /* Return a simplified tree node for the truth-negation of ARG. This
2947 never alters ARG itself. We assume that ARG is an operation that
2948 returns a truth value (0 or 1).
2950 FIXME: one would think we would fold the result, but it causes
2951 problems with the dominator optimizer. */
2952 tree
2953 invert_truthvalue (tree arg)
2955 tree type = TREE_TYPE (arg);
2956 enum tree_code code = TREE_CODE (arg);
2958 if (code == ERROR_MARK)
2959 return arg;
2961 /* If this is a comparison, we can simply invert it, except for
2962 floating-point non-equality comparisons, in which case we just
2963 enclose a TRUTH_NOT_EXPR around what we have. */
2965 if (TREE_CODE_CLASS (code) == tcc_comparison)
2967 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2968 if (FLOAT_TYPE_P (op_type)
2969 && flag_trapping_math
2970 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2971 && code != NE_EXPR && code != EQ_EXPR)
2972 return build1 (TRUTH_NOT_EXPR, type, arg);
2973 else
2975 code = invert_tree_comparison (code,
2976 HONOR_NANS (TYPE_MODE (op_type)));
2977 if (code == ERROR_MARK)
2978 return build1 (TRUTH_NOT_EXPR, type, arg);
2979 else
2980 return build2 (code, type,
2981 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2985 switch (code)
2987 case INTEGER_CST:
2988 return constant_boolean_node (integer_zerop (arg), type);
2990 case TRUTH_AND_EXPR:
2991 return build2 (TRUTH_OR_EXPR, type,
2992 invert_truthvalue (TREE_OPERAND (arg, 0)),
2993 invert_truthvalue (TREE_OPERAND (arg, 1)));
2995 case TRUTH_OR_EXPR:
2996 return build2 (TRUTH_AND_EXPR, type,
2997 invert_truthvalue (TREE_OPERAND (arg, 0)),
2998 invert_truthvalue (TREE_OPERAND (arg, 1)));
3000 case TRUTH_XOR_EXPR:
3001 /* Here we can invert either operand. We invert the first operand
3002 unless the second operand is a TRUTH_NOT_EXPR in which case our
3003 result is the XOR of the first operand with the inside of the
3004 negation of the second operand. */
3006 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3007 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3008 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3009 else
3010 return build2 (TRUTH_XOR_EXPR, type,
3011 invert_truthvalue (TREE_OPERAND (arg, 0)),
3012 TREE_OPERAND (arg, 1));
3014 case TRUTH_ANDIF_EXPR:
3015 return build2 (TRUTH_ORIF_EXPR, type,
3016 invert_truthvalue (TREE_OPERAND (arg, 0)),
3017 invert_truthvalue (TREE_OPERAND (arg, 1)));
3019 case TRUTH_ORIF_EXPR:
3020 return build2 (TRUTH_ANDIF_EXPR, type,
3021 invert_truthvalue (TREE_OPERAND (arg, 0)),
3022 invert_truthvalue (TREE_OPERAND (arg, 1)));
3024 case TRUTH_NOT_EXPR:
3025 return TREE_OPERAND (arg, 0);
3027 case COND_EXPR:
3028 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3029 invert_truthvalue (TREE_OPERAND (arg, 1)),
3030 invert_truthvalue (TREE_OPERAND (arg, 2)));
3032 case COMPOUND_EXPR:
3033 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3034 invert_truthvalue (TREE_OPERAND (arg, 1)));
3036 case NON_LVALUE_EXPR:
3037 return invert_truthvalue (TREE_OPERAND (arg, 0));
3039 case NOP_EXPR:
3040 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3041 break;
3043 case CONVERT_EXPR:
3044 case FLOAT_EXPR:
3045 return build1 (TREE_CODE (arg), type,
3046 invert_truthvalue (TREE_OPERAND (arg, 0)));
3048 case BIT_AND_EXPR:
3049 if (!integer_onep (TREE_OPERAND (arg, 1)))
3050 break;
3051 return build2 (EQ_EXPR, type, arg,
3052 fold_convert (type, integer_zero_node));
3054 case SAVE_EXPR:
3055 return build1 (TRUTH_NOT_EXPR, type, arg);
3057 case CLEANUP_POINT_EXPR:
3058 return build1 (CLEANUP_POINT_EXPR, type,
3059 invert_truthvalue (TREE_OPERAND (arg, 0)));
3061 default:
3062 break;
3064 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3065 return build1 (TRUTH_NOT_EXPR, type, arg);
3068 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3069 operands are another bit-wise operation with a common input. If so,
3070 distribute the bit operations to save an operation and possibly two if
3071 constants are involved. For example, convert
3072 (A | B) & (A | C) into A | (B & C)
3073 Further simplification will occur if B and C are constants.
3075 If this optimization cannot be done, 0 will be returned. */
3077 static tree
3078 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3080 tree common;
3081 tree left, right;
3083 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3084 || TREE_CODE (arg0) == code
3085 || (TREE_CODE (arg0) != BIT_AND_EXPR
3086 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3087 return 0;
3089 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3091 common = TREE_OPERAND (arg0, 0);
3092 left = TREE_OPERAND (arg0, 1);
3093 right = TREE_OPERAND (arg1, 1);
3095 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3097 common = TREE_OPERAND (arg0, 0);
3098 left = TREE_OPERAND (arg0, 1);
3099 right = TREE_OPERAND (arg1, 0);
3101 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3103 common = TREE_OPERAND (arg0, 1);
3104 left = TREE_OPERAND (arg0, 0);
3105 right = TREE_OPERAND (arg1, 1);
3107 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3109 common = TREE_OPERAND (arg0, 1);
3110 left = TREE_OPERAND (arg0, 0);
3111 right = TREE_OPERAND (arg1, 0);
3113 else
3114 return 0;
3116 return fold_build2 (TREE_CODE (arg0), type, common,
3117 fold_build2 (code, type, left, right));
3120 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3121 with code CODE. This optimization is unsafe. */
3122 static tree
3123 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3125 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3126 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3128 /* (A / C) +- (B / C) -> (A +- B) / C. */
3129 if (mul0 == mul1
3130 && operand_equal_p (TREE_OPERAND (arg0, 1),
3131 TREE_OPERAND (arg1, 1), 0))
3132 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3133 fold_build2 (code, type,
3134 TREE_OPERAND (arg0, 0),
3135 TREE_OPERAND (arg1, 0)),
3136 TREE_OPERAND (arg0, 1));
3138 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3139 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3140 TREE_OPERAND (arg1, 0), 0)
3141 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3142 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3144 REAL_VALUE_TYPE r0, r1;
3145 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3146 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3147 if (!mul0)
3148 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3149 if (!mul1)
3150 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3151 real_arithmetic (&r0, code, &r0, &r1);
3152 return fold_build2 (MULT_EXPR, type,
3153 TREE_OPERAND (arg0, 0),
3154 build_real (type, r0));
3157 return NULL_TREE;
3160 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3161 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3163 static tree
3164 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3165 int unsignedp)
3167 tree result;
3169 if (bitpos == 0)
3171 tree size = TYPE_SIZE (TREE_TYPE (inner));
3172 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3173 || POINTER_TYPE_P (TREE_TYPE (inner)))
3174 && host_integerp (size, 0)
3175 && tree_low_cst (size, 0) == bitsize)
3176 return fold_convert (type, inner);
3179 result = build3 (BIT_FIELD_REF, type, inner,
3180 size_int (bitsize), bitsize_int (bitpos));
3182 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3184 return result;
3187 /* Optimize a bit-field compare.
3189 There are two cases: First is a compare against a constant and the
3190 second is a comparison of two items where the fields are at the same
3191 bit position relative to the start of a chunk (byte, halfword, word)
3192 large enough to contain it. In these cases we can avoid the shift
3193 implicit in bitfield extractions.
3195 For constants, we emit a compare of the shifted constant with the
3196 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3197 compared. For two fields at the same position, we do the ANDs with the
3198 similar mask and compare the result of the ANDs.
3200 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3201 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3202 are the left and right operands of the comparison, respectively.
3204 If the optimization described above can be done, we return the resulting
3205 tree. Otherwise we return zero. */
3207 static tree
3208 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3209 tree lhs, tree rhs)
3211 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3212 tree type = TREE_TYPE (lhs);
3213 tree signed_type, unsigned_type;
3214 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3215 enum machine_mode lmode, rmode, nmode;
3216 int lunsignedp, runsignedp;
3217 int lvolatilep = 0, rvolatilep = 0;
3218 tree linner, rinner = NULL_TREE;
3219 tree mask;
3220 tree offset;
3222 /* Get all the information about the extractions being done. If the bit size
3223 if the same as the size of the underlying object, we aren't doing an
3224 extraction at all and so can do nothing. We also don't want to
3225 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3226 then will no longer be able to replace it. */
3227 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3228 &lunsignedp, &lvolatilep, false);
3229 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3230 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3231 return 0;
3233 if (!const_p)
3235 /* If this is not a constant, we can only do something if bit positions,
3236 sizes, and signedness are the same. */
3237 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3238 &runsignedp, &rvolatilep, false);
3240 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3241 || lunsignedp != runsignedp || offset != 0
3242 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3243 return 0;
3246 /* See if we can find a mode to refer to this field. We should be able to,
3247 but fail if we can't. */
3248 nmode = get_best_mode (lbitsize, lbitpos,
3249 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3250 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3251 TYPE_ALIGN (TREE_TYPE (rinner))),
3252 word_mode, lvolatilep || rvolatilep);
3253 if (nmode == VOIDmode)
3254 return 0;
3256 /* Set signed and unsigned types of the precision of this mode for the
3257 shifts below. */
3258 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3259 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3261 /* Compute the bit position and size for the new reference and our offset
3262 within it. If the new reference is the same size as the original, we
3263 won't optimize anything, so return zero. */
3264 nbitsize = GET_MODE_BITSIZE (nmode);
3265 nbitpos = lbitpos & ~ (nbitsize - 1);
3266 lbitpos -= nbitpos;
3267 if (nbitsize == lbitsize)
3268 return 0;
3270 if (BYTES_BIG_ENDIAN)
3271 lbitpos = nbitsize - lbitsize - lbitpos;
3273 /* Make the mask to be used against the extracted field. */
3274 mask = build_int_cst (unsigned_type, -1);
3275 mask = force_fit_type (mask, 0, false, false);
3276 mask = fold_convert (unsigned_type, mask);
3277 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3278 mask = const_binop (RSHIFT_EXPR, mask,
3279 size_int (nbitsize - lbitsize - lbitpos), 0);
3281 if (! const_p)
3282 /* If not comparing with constant, just rework the comparison
3283 and return. */
3284 return build2 (code, compare_type,
3285 build2 (BIT_AND_EXPR, unsigned_type,
3286 make_bit_field_ref (linner, unsigned_type,
3287 nbitsize, nbitpos, 1),
3288 mask),
3289 build2 (BIT_AND_EXPR, unsigned_type,
3290 make_bit_field_ref (rinner, unsigned_type,
3291 nbitsize, nbitpos, 1),
3292 mask));
3294 /* Otherwise, we are handling the constant case. See if the constant is too
3295 big for the field. Warn and return a tree of for 0 (false) if so. We do
3296 this not only for its own sake, but to avoid having to test for this
3297 error case below. If we didn't, we might generate wrong code.
3299 For unsigned fields, the constant shifted right by the field length should
3300 be all zero. For signed fields, the high-order bits should agree with
3301 the sign bit. */
3303 if (lunsignedp)
3305 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3306 fold_convert (unsigned_type, rhs),
3307 size_int (lbitsize), 0)))
3309 warning (0, "comparison is always %d due to width of bit-field",
3310 code == NE_EXPR);
3311 return constant_boolean_node (code == NE_EXPR, compare_type);
3314 else
3316 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3317 size_int (lbitsize - 1), 0);
3318 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3320 warning (0, "comparison is always %d due to width of bit-field",
3321 code == NE_EXPR);
3322 return constant_boolean_node (code == NE_EXPR, compare_type);
3326 /* Single-bit compares should always be against zero. */
3327 if (lbitsize == 1 && ! integer_zerop (rhs))
3329 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3330 rhs = fold_convert (type, integer_zero_node);
3333 /* Make a new bitfield reference, shift the constant over the
3334 appropriate number of bits and mask it with the computed mask
3335 (in case this was a signed field). If we changed it, make a new one. */
3336 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3337 if (lvolatilep)
3339 TREE_SIDE_EFFECTS (lhs) = 1;
3340 TREE_THIS_VOLATILE (lhs) = 1;
3343 rhs = fold (const_binop (BIT_AND_EXPR,
3344 const_binop (LSHIFT_EXPR,
3345 fold_convert (unsigned_type, rhs),
3346 size_int (lbitpos), 0),
3347 mask, 0));
3349 return build2 (code, compare_type,
3350 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3351 rhs);
3354 /* Subroutine for fold_truthop: decode a field reference.
3356 If EXP is a comparison reference, we return the innermost reference.
3358 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3359 set to the starting bit number.
3361 If the innermost field can be completely contained in a mode-sized
3362 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3364 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3365 otherwise it is not changed.
3367 *PUNSIGNEDP is set to the signedness of the field.
3369 *PMASK is set to the mask used. This is either contained in a
3370 BIT_AND_EXPR or derived from the width of the field.
3372 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3374 Return 0 if this is not a component reference or is one that we can't
3375 do anything with. */
3377 static tree
3378 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3379 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3380 int *punsignedp, int *pvolatilep,
3381 tree *pmask, tree *pand_mask)
3383 tree outer_type = 0;
3384 tree and_mask = 0;
3385 tree mask, inner, offset;
3386 tree unsigned_type;
3387 unsigned int precision;
3389 /* All the optimizations using this function assume integer fields.
3390 There are problems with FP fields since the type_for_size call
3391 below can fail for, e.g., XFmode. */
3392 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3393 return 0;
3395 /* We are interested in the bare arrangement of bits, so strip everything
3396 that doesn't affect the machine mode. However, record the type of the
3397 outermost expression if it may matter below. */
3398 if (TREE_CODE (exp) == NOP_EXPR
3399 || TREE_CODE (exp) == CONVERT_EXPR
3400 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3401 outer_type = TREE_TYPE (exp);
3402 STRIP_NOPS (exp);
3404 if (TREE_CODE (exp) == BIT_AND_EXPR)
3406 and_mask = TREE_OPERAND (exp, 1);
3407 exp = TREE_OPERAND (exp, 0);
3408 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3409 if (TREE_CODE (and_mask) != INTEGER_CST)
3410 return 0;
3413 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3414 punsignedp, pvolatilep, false);
3415 if ((inner == exp && and_mask == 0)
3416 || *pbitsize < 0 || offset != 0
3417 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3418 return 0;
3420 /* If the number of bits in the reference is the same as the bitsize of
3421 the outer type, then the outer type gives the signedness. Otherwise
3422 (in case of a small bitfield) the signedness is unchanged. */
3423 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3424 *punsignedp = TYPE_UNSIGNED (outer_type);
3426 /* Compute the mask to access the bitfield. */
3427 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3428 precision = TYPE_PRECISION (unsigned_type);
3430 mask = build_int_cst (unsigned_type, -1);
3431 mask = force_fit_type (mask, 0, false, false);
3433 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3434 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3436 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3437 if (and_mask != 0)
3438 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3439 fold_convert (unsigned_type, and_mask), mask);
3441 *pmask = mask;
3442 *pand_mask = and_mask;
3443 return inner;
3446 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3447 bit positions. */
3449 static int
3450 all_ones_mask_p (tree mask, int size)
3452 tree type = TREE_TYPE (mask);
3453 unsigned int precision = TYPE_PRECISION (type);
3454 tree tmask;
3456 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3457 tmask = force_fit_type (tmask, 0, false, false);
3459 return
3460 tree_int_cst_equal (mask,
3461 const_binop (RSHIFT_EXPR,
3462 const_binop (LSHIFT_EXPR, tmask,
3463 size_int (precision - size),
3465 size_int (precision - size), 0));
3468 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3469 represents the sign bit of EXP's type. If EXP represents a sign
3470 or zero extension, also test VAL against the unextended type.
3471 The return value is the (sub)expression whose sign bit is VAL,
3472 or NULL_TREE otherwise. */
3474 static tree
3475 sign_bit_p (tree exp, tree val)
3477 unsigned HOST_WIDE_INT mask_lo, lo;
3478 HOST_WIDE_INT mask_hi, hi;
3479 int width;
3480 tree t;
3482 /* Tree EXP must have an integral type. */
3483 t = TREE_TYPE (exp);
3484 if (! INTEGRAL_TYPE_P (t))
3485 return NULL_TREE;
3487 /* Tree VAL must be an integer constant. */
3488 if (TREE_CODE (val) != INTEGER_CST
3489 || TREE_CONSTANT_OVERFLOW (val))
3490 return NULL_TREE;
3492 width = TYPE_PRECISION (t);
3493 if (width > HOST_BITS_PER_WIDE_INT)
3495 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3496 lo = 0;
3498 mask_hi = ((unsigned HOST_WIDE_INT) -1
3499 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3500 mask_lo = -1;
3502 else
3504 hi = 0;
3505 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3507 mask_hi = 0;
3508 mask_lo = ((unsigned HOST_WIDE_INT) -1
3509 >> (HOST_BITS_PER_WIDE_INT - width));
3512 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3513 treat VAL as if it were unsigned. */
3514 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3515 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3516 return exp;
3518 /* Handle extension from a narrower type. */
3519 if (TREE_CODE (exp) == NOP_EXPR
3520 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3521 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3523 return NULL_TREE;
3526 /* Subroutine for fold_truthop: determine if an operand is simple enough
3527 to be evaluated unconditionally. */
3529 static int
3530 simple_operand_p (tree exp)
3532 /* Strip any conversions that don't change the machine mode. */
3533 STRIP_NOPS (exp);
3535 return (CONSTANT_CLASS_P (exp)
3536 || TREE_CODE (exp) == SSA_NAME
3537 || (DECL_P (exp)
3538 && ! TREE_ADDRESSABLE (exp)
3539 && ! TREE_THIS_VOLATILE (exp)
3540 && ! DECL_NONLOCAL (exp)
3541 /* Don't regard global variables as simple. They may be
3542 allocated in ways unknown to the compiler (shared memory,
3543 #pragma weak, etc). */
3544 && ! TREE_PUBLIC (exp)
3545 && ! DECL_EXTERNAL (exp)
3546 /* Loading a static variable is unduly expensive, but global
3547 registers aren't expensive. */
3548 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3551 /* The following functions are subroutines to fold_range_test and allow it to
3552 try to change a logical combination of comparisons into a range test.
3554 For example, both
3555 X == 2 || X == 3 || X == 4 || X == 5
3557 X >= 2 && X <= 5
3558 are converted to
3559 (unsigned) (X - 2) <= 3
3561 We describe each set of comparisons as being either inside or outside
3562 a range, using a variable named like IN_P, and then describe the
3563 range with a lower and upper bound. If one of the bounds is omitted,
3564 it represents either the highest or lowest value of the type.
3566 In the comments below, we represent a range by two numbers in brackets
3567 preceded by a "+" to designate being inside that range, or a "-" to
3568 designate being outside that range, so the condition can be inverted by
3569 flipping the prefix. An omitted bound is represented by a "-". For
3570 example, "- [-, 10]" means being outside the range starting at the lowest
3571 possible value and ending at 10, in other words, being greater than 10.
3572 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3573 always false.
3575 We set up things so that the missing bounds are handled in a consistent
3576 manner so neither a missing bound nor "true" and "false" need to be
3577 handled using a special case. */
3579 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3580 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3581 and UPPER1_P are nonzero if the respective argument is an upper bound
3582 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3583 must be specified for a comparison. ARG1 will be converted to ARG0's
3584 type if both are specified. */
3586 static tree
3587 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3588 tree arg1, int upper1_p)
3590 tree tem;
3591 int result;
3592 int sgn0, sgn1;
3594 /* If neither arg represents infinity, do the normal operation.
3595 Else, if not a comparison, return infinity. Else handle the special
3596 comparison rules. Note that most of the cases below won't occur, but
3597 are handled for consistency. */
3599 if (arg0 != 0 && arg1 != 0)
3601 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3602 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3603 STRIP_NOPS (tem);
3604 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3607 if (TREE_CODE_CLASS (code) != tcc_comparison)
3608 return 0;
3610 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3611 for neither. In real maths, we cannot assume open ended ranges are
3612 the same. But, this is computer arithmetic, where numbers are finite.
3613 We can therefore make the transformation of any unbounded range with
3614 the value Z, Z being greater than any representable number. This permits
3615 us to treat unbounded ranges as equal. */
3616 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3617 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3618 switch (code)
3620 case EQ_EXPR:
3621 result = sgn0 == sgn1;
3622 break;
3623 case NE_EXPR:
3624 result = sgn0 != sgn1;
3625 break;
3626 case LT_EXPR:
3627 result = sgn0 < sgn1;
3628 break;
3629 case LE_EXPR:
3630 result = sgn0 <= sgn1;
3631 break;
3632 case GT_EXPR:
3633 result = sgn0 > sgn1;
3634 break;
3635 case GE_EXPR:
3636 result = sgn0 >= sgn1;
3637 break;
3638 default:
3639 gcc_unreachable ();
3642 return constant_boolean_node (result, type);
3645 /* Given EXP, a logical expression, set the range it is testing into
3646 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3647 actually being tested. *PLOW and *PHIGH will be made of the same type
3648 as the returned expression. If EXP is not a comparison, we will most
3649 likely not be returning a useful value and range. */
3651 static tree
3652 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3654 enum tree_code code;
3655 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3656 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3657 int in_p, n_in_p;
3658 tree low, high, n_low, n_high;
3660 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3661 and see if we can refine the range. Some of the cases below may not
3662 happen, but it doesn't seem worth worrying about this. We "continue"
3663 the outer loop when we've changed something; otherwise we "break"
3664 the switch, which will "break" the while. */
3666 in_p = 0;
3667 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3669 while (1)
3671 code = TREE_CODE (exp);
3672 exp_type = TREE_TYPE (exp);
3674 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3676 if (TREE_CODE_LENGTH (code) > 0)
3677 arg0 = TREE_OPERAND (exp, 0);
3678 if (TREE_CODE_CLASS (code) == tcc_comparison
3679 || TREE_CODE_CLASS (code) == tcc_unary
3680 || TREE_CODE_CLASS (code) == tcc_binary)
3681 arg0_type = TREE_TYPE (arg0);
3682 if (TREE_CODE_CLASS (code) == tcc_binary
3683 || TREE_CODE_CLASS (code) == tcc_comparison
3684 || (TREE_CODE_CLASS (code) == tcc_expression
3685 && TREE_CODE_LENGTH (code) > 1))
3686 arg1 = TREE_OPERAND (exp, 1);
3689 switch (code)
3691 case TRUTH_NOT_EXPR:
3692 in_p = ! in_p, exp = arg0;
3693 continue;
3695 case EQ_EXPR: case NE_EXPR:
3696 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3697 /* We can only do something if the range is testing for zero
3698 and if the second operand is an integer constant. Note that
3699 saying something is "in" the range we make is done by
3700 complementing IN_P since it will set in the initial case of
3701 being not equal to zero; "out" is leaving it alone. */
3702 if (low == 0 || high == 0
3703 || ! integer_zerop (low) || ! integer_zerop (high)
3704 || TREE_CODE (arg1) != INTEGER_CST)
3705 break;
3707 switch (code)
3709 case NE_EXPR: /* - [c, c] */
3710 low = high = arg1;
3711 break;
3712 case EQ_EXPR: /* + [c, c] */
3713 in_p = ! in_p, low = high = arg1;
3714 break;
3715 case GT_EXPR: /* - [-, c] */
3716 low = 0, high = arg1;
3717 break;
3718 case GE_EXPR: /* + [c, -] */
3719 in_p = ! in_p, low = arg1, high = 0;
3720 break;
3721 case LT_EXPR: /* - [c, -] */
3722 low = arg1, high = 0;
3723 break;
3724 case LE_EXPR: /* + [-, c] */
3725 in_p = ! in_p, low = 0, high = arg1;
3726 break;
3727 default:
3728 gcc_unreachable ();
3731 /* If this is an unsigned comparison, we also know that EXP is
3732 greater than or equal to zero. We base the range tests we make
3733 on that fact, so we record it here so we can parse existing
3734 range tests. We test arg0_type since often the return type
3735 of, e.g. EQ_EXPR, is boolean. */
3736 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3738 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3739 in_p, low, high, 1,
3740 fold_convert (arg0_type, integer_zero_node),
3741 NULL_TREE))
3742 break;
3744 in_p = n_in_p, low = n_low, high = n_high;
3746 /* If the high bound is missing, but we have a nonzero low
3747 bound, reverse the range so it goes from zero to the low bound
3748 minus 1. */
3749 if (high == 0 && low && ! integer_zerop (low))
3751 in_p = ! in_p;
3752 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3753 integer_one_node, 0);
3754 low = fold_convert (arg0_type, integer_zero_node);
3758 exp = arg0;
3759 continue;
3761 case NEGATE_EXPR:
3762 /* (-x) IN [a,b] -> x in [-b, -a] */
3763 n_low = range_binop (MINUS_EXPR, exp_type,
3764 fold_convert (exp_type, integer_zero_node),
3765 0, high, 1);
3766 n_high = range_binop (MINUS_EXPR, exp_type,
3767 fold_convert (exp_type, integer_zero_node),
3768 0, low, 0);
3769 low = n_low, high = n_high;
3770 exp = arg0;
3771 continue;
3773 case BIT_NOT_EXPR:
3774 /* ~ X -> -X - 1 */
3775 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3776 fold_convert (exp_type, integer_one_node));
3777 continue;
3779 case PLUS_EXPR: case MINUS_EXPR:
3780 if (TREE_CODE (arg1) != INTEGER_CST)
3781 break;
3783 /* If EXP is signed, any overflow in the computation is undefined,
3784 so we don't worry about it so long as our computations on
3785 the bounds don't overflow. For unsigned, overflow is defined
3786 and this is exactly the right thing. */
3787 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3788 arg0_type, low, 0, arg1, 0);
3789 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3790 arg0_type, high, 1, arg1, 0);
3791 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3792 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3793 break;
3795 /* Check for an unsigned range which has wrapped around the maximum
3796 value thus making n_high < n_low, and normalize it. */
3797 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3799 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3800 integer_one_node, 0);
3801 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3802 integer_one_node, 0);
3804 /* If the range is of the form +/- [ x+1, x ], we won't
3805 be able to normalize it. But then, it represents the
3806 whole range or the empty set, so make it
3807 +/- [ -, - ]. */
3808 if (tree_int_cst_equal (n_low, low)
3809 && tree_int_cst_equal (n_high, high))
3810 low = high = 0;
3811 else
3812 in_p = ! in_p;
3814 else
3815 low = n_low, high = n_high;
3817 exp = arg0;
3818 continue;
3820 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3821 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3822 break;
3824 if (! INTEGRAL_TYPE_P (arg0_type)
3825 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3826 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3827 break;
3829 n_low = low, n_high = high;
3831 if (n_low != 0)
3832 n_low = fold_convert (arg0_type, n_low);
3834 if (n_high != 0)
3835 n_high = fold_convert (arg0_type, n_high);
3838 /* If we're converting arg0 from an unsigned type, to exp,
3839 a signed type, we will be doing the comparison as unsigned.
3840 The tests above have already verified that LOW and HIGH
3841 are both positive.
3843 So we have to ensure that we will handle large unsigned
3844 values the same way that the current signed bounds treat
3845 negative values. */
3847 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3849 tree high_positive;
3850 tree equiv_type = lang_hooks.types.type_for_mode
3851 (TYPE_MODE (arg0_type), 1);
3853 /* A range without an upper bound is, naturally, unbounded.
3854 Since convert would have cropped a very large value, use
3855 the max value for the destination type. */
3856 high_positive
3857 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3858 : TYPE_MAX_VALUE (arg0_type);
3860 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3861 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3862 fold_convert (arg0_type,
3863 high_positive),
3864 fold_convert (arg0_type,
3865 integer_one_node));
3867 /* If the low bound is specified, "and" the range with the
3868 range for which the original unsigned value will be
3869 positive. */
3870 if (low != 0)
3872 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3873 1, n_low, n_high, 1,
3874 fold_convert (arg0_type,
3875 integer_zero_node),
3876 high_positive))
3877 break;
3879 in_p = (n_in_p == in_p);
3881 else
3883 /* Otherwise, "or" the range with the range of the input
3884 that will be interpreted as negative. */
3885 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3886 0, n_low, n_high, 1,
3887 fold_convert (arg0_type,
3888 integer_zero_node),
3889 high_positive))
3890 break;
3892 in_p = (in_p != n_in_p);
3896 exp = arg0;
3897 low = n_low, high = n_high;
3898 continue;
3900 default:
3901 break;
3904 break;
3907 /* If EXP is a constant, we can evaluate whether this is true or false. */
3908 if (TREE_CODE (exp) == INTEGER_CST)
3910 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3911 exp, 0, low, 0))
3912 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3913 exp, 1, high, 1)));
3914 low = high = 0;
3915 exp = 0;
3918 *pin_p = in_p, *plow = low, *phigh = high;
3919 return exp;
3922 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3923 type, TYPE, return an expression to test if EXP is in (or out of, depending
3924 on IN_P) the range. Return 0 if the test couldn't be created. */
3926 static tree
3927 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3929 tree etype = TREE_TYPE (exp);
3930 tree value;
3932 if (! in_p)
3934 value = build_range_check (type, exp, 1, low, high);
3935 if (value != 0)
3936 return invert_truthvalue (value);
3938 return 0;
3941 if (low == 0 && high == 0)
3942 return fold_convert (type, integer_one_node);
3944 if (low == 0)
3945 return fold_build2 (LE_EXPR, type, exp,
3946 fold_convert (etype, high));
3948 if (high == 0)
3949 return fold_build2 (GE_EXPR, type, exp,
3950 fold_convert (etype, low));
3952 if (operand_equal_p (low, high, 0))
3953 return fold_build2 (EQ_EXPR, type, exp,
3954 fold_convert (etype, low));
3956 if (integer_zerop (low))
3958 if (! TYPE_UNSIGNED (etype))
3960 etype = lang_hooks.types.unsigned_type (etype);
3961 high = fold_convert (etype, high);
3962 exp = fold_convert (etype, exp);
3964 return build_range_check (type, exp, 1, 0, high);
3967 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3968 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3970 unsigned HOST_WIDE_INT lo;
3971 HOST_WIDE_INT hi;
3972 int prec;
3974 prec = TYPE_PRECISION (etype);
3975 if (prec <= HOST_BITS_PER_WIDE_INT)
3977 hi = 0;
3978 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3980 else
3982 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3983 lo = (unsigned HOST_WIDE_INT) -1;
3986 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3988 if (TYPE_UNSIGNED (etype))
3990 etype = lang_hooks.types.signed_type (etype);
3991 exp = fold_convert (etype, exp);
3993 return fold_build2 (GT_EXPR, type, exp,
3994 fold_convert (etype, integer_zero_node));
3998 value = const_binop (MINUS_EXPR, high, low, 0);
3999 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
4001 tree utype, minv, maxv;
4003 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4004 for the type in question, as we rely on this here. */
4005 switch (TREE_CODE (etype))
4007 case INTEGER_TYPE:
4008 case ENUMERAL_TYPE:
4009 case CHAR_TYPE:
4010 utype = lang_hooks.types.unsigned_type (etype);
4011 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4012 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4013 integer_one_node, 1);
4014 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4015 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4016 minv, 1, maxv, 1)))
4018 etype = utype;
4019 high = fold_convert (etype, high);
4020 low = fold_convert (etype, low);
4021 exp = fold_convert (etype, exp);
4022 value = const_binop (MINUS_EXPR, high, low, 0);
4024 break;
4025 default:
4026 break;
4030 if (value != 0 && ! TREE_OVERFLOW (value))
4031 return build_range_check (type,
4032 fold_build2 (MINUS_EXPR, etype, exp, low),
4033 1, fold_convert (etype, integer_zero_node),
4034 value);
4036 return 0;
4039 /* Given two ranges, see if we can merge them into one. Return 1 if we
4040 can, 0 if we can't. Set the output range into the specified parameters. */
4042 static int
4043 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4044 tree high0, int in1_p, tree low1, tree high1)
4046 int no_overlap;
4047 int subset;
4048 int temp;
4049 tree tem;
4050 int in_p;
4051 tree low, high;
4052 int lowequal = ((low0 == 0 && low1 == 0)
4053 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4054 low0, 0, low1, 0)));
4055 int highequal = ((high0 == 0 && high1 == 0)
4056 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4057 high0, 1, high1, 1)));
4059 /* Make range 0 be the range that starts first, or ends last if they
4060 start at the same value. Swap them if it isn't. */
4061 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4062 low0, 0, low1, 0))
4063 || (lowequal
4064 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4065 high1, 1, high0, 1))))
4067 temp = in0_p, in0_p = in1_p, in1_p = temp;
4068 tem = low0, low0 = low1, low1 = tem;
4069 tem = high0, high0 = high1, high1 = tem;
4072 /* Now flag two cases, whether the ranges are disjoint or whether the
4073 second range is totally subsumed in the first. Note that the tests
4074 below are simplified by the ones above. */
4075 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4076 high0, 1, low1, 0));
4077 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4078 high1, 1, high0, 1));
4080 /* We now have four cases, depending on whether we are including or
4081 excluding the two ranges. */
4082 if (in0_p && in1_p)
4084 /* If they don't overlap, the result is false. If the second range
4085 is a subset it is the result. Otherwise, the range is from the start
4086 of the second to the end of the first. */
4087 if (no_overlap)
4088 in_p = 0, low = high = 0;
4089 else if (subset)
4090 in_p = 1, low = low1, high = high1;
4091 else
4092 in_p = 1, low = low1, high = high0;
4095 else if (in0_p && ! in1_p)
4097 /* If they don't overlap, the result is the first range. If they are
4098 equal, the result is false. If the second range is a subset of the
4099 first, and the ranges begin at the same place, we go from just after
4100 the end of the first range to the end of the second. If the second
4101 range is not a subset of the first, or if it is a subset and both
4102 ranges end at the same place, the range starts at the start of the
4103 first range and ends just before the second range.
4104 Otherwise, we can't describe this as a single range. */
4105 if (no_overlap)
4106 in_p = 1, low = low0, high = high0;
4107 else if (lowequal && highequal)
4108 in_p = 0, low = high = 0;
4109 else if (subset && lowequal)
4111 in_p = 1, high = high0;
4112 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4113 integer_one_node, 0);
4115 else if (! subset || highequal)
4117 in_p = 1, low = low0;
4118 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4119 integer_one_node, 0);
4121 else
4122 return 0;
4125 else if (! in0_p && in1_p)
4127 /* If they don't overlap, the result is the second range. If the second
4128 is a subset of the first, the result is false. Otherwise,
4129 the range starts just after the first range and ends at the
4130 end of the second. */
4131 if (no_overlap)
4132 in_p = 1, low = low1, high = high1;
4133 else if (subset || highequal)
4134 in_p = 0, low = high = 0;
4135 else
4137 in_p = 1, high = high1;
4138 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4139 integer_one_node, 0);
4143 else
4145 /* The case where we are excluding both ranges. Here the complex case
4146 is if they don't overlap. In that case, the only time we have a
4147 range is if they are adjacent. If the second is a subset of the
4148 first, the result is the first. Otherwise, the range to exclude
4149 starts at the beginning of the first range and ends at the end of the
4150 second. */
4151 if (no_overlap)
4153 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4154 range_binop (PLUS_EXPR, NULL_TREE,
4155 high0, 1,
4156 integer_one_node, 1),
4157 1, low1, 0)))
4158 in_p = 0, low = low0, high = high1;
4159 else
4161 /* Canonicalize - [min, x] into - [-, x]. */
4162 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4163 switch (TREE_CODE (TREE_TYPE (low0)))
4165 case ENUMERAL_TYPE:
4166 if (TYPE_PRECISION (TREE_TYPE (low0))
4167 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4168 break;
4169 /* FALLTHROUGH */
4170 case INTEGER_TYPE:
4171 case CHAR_TYPE:
4172 if (tree_int_cst_equal (low0,
4173 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4174 low0 = 0;
4175 break;
4176 case POINTER_TYPE:
4177 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4178 && integer_zerop (low0))
4179 low0 = 0;
4180 break;
4181 default:
4182 break;
4185 /* Canonicalize - [x, max] into - [x, -]. */
4186 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4187 switch (TREE_CODE (TREE_TYPE (high1)))
4189 case ENUMERAL_TYPE:
4190 if (TYPE_PRECISION (TREE_TYPE (high1))
4191 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4192 break;
4193 /* FALLTHROUGH */
4194 case INTEGER_TYPE:
4195 case CHAR_TYPE:
4196 if (tree_int_cst_equal (high1,
4197 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4198 high1 = 0;
4199 break;
4200 case POINTER_TYPE:
4201 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4202 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4203 high1, 1,
4204 integer_one_node, 1)))
4205 high1 = 0;
4206 break;
4207 default:
4208 break;
4211 /* The ranges might be also adjacent between the maximum and
4212 minimum values of the given type. For
4213 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4214 return + [x + 1, y - 1]. */
4215 if (low0 == 0 && high1 == 0)
4217 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4218 integer_one_node, 1);
4219 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4220 integer_one_node, 0);
4221 if (low == 0 || high == 0)
4222 return 0;
4224 in_p = 1;
4226 else
4227 return 0;
4230 else if (subset)
4231 in_p = 0, low = low0, high = high0;
4232 else
4233 in_p = 0, low = low0, high = high1;
4236 *pin_p = in_p, *plow = low, *phigh = high;
4237 return 1;
4241 /* Subroutine of fold, looking inside expressions of the form
4242 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4243 of the COND_EXPR. This function is being used also to optimize
4244 A op B ? C : A, by reversing the comparison first.
4246 Return a folded expression whose code is not a COND_EXPR
4247 anymore, or NULL_TREE if no folding opportunity is found. */
4249 static tree
4250 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4252 enum tree_code comp_code = TREE_CODE (arg0);
4253 tree arg00 = TREE_OPERAND (arg0, 0);
4254 tree arg01 = TREE_OPERAND (arg0, 1);
4255 tree arg1_type = TREE_TYPE (arg1);
4256 tree tem;
4258 STRIP_NOPS (arg1);
4259 STRIP_NOPS (arg2);
4261 /* If we have A op 0 ? A : -A, consider applying the following
4262 transformations:
4264 A == 0? A : -A same as -A
4265 A != 0? A : -A same as A
4266 A >= 0? A : -A same as abs (A)
4267 A > 0? A : -A same as abs (A)
4268 A <= 0? A : -A same as -abs (A)
4269 A < 0? A : -A same as -abs (A)
4271 None of these transformations work for modes with signed
4272 zeros. If A is +/-0, the first two transformations will
4273 change the sign of the result (from +0 to -0, or vice
4274 versa). The last four will fix the sign of the result,
4275 even though the original expressions could be positive or
4276 negative, depending on the sign of A.
4278 Note that all these transformations are correct if A is
4279 NaN, since the two alternatives (A and -A) are also NaNs. */
4280 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4281 ? real_zerop (arg01)
4282 : integer_zerop (arg01))
4283 && ((TREE_CODE (arg2) == NEGATE_EXPR
4284 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4285 /* In the case that A is of the form X-Y, '-A' (arg2) may
4286 have already been folded to Y-X, check for that. */
4287 || (TREE_CODE (arg1) == MINUS_EXPR
4288 && TREE_CODE (arg2) == MINUS_EXPR
4289 && operand_equal_p (TREE_OPERAND (arg1, 0),
4290 TREE_OPERAND (arg2, 1), 0)
4291 && operand_equal_p (TREE_OPERAND (arg1, 1),
4292 TREE_OPERAND (arg2, 0), 0))))
4293 switch (comp_code)
4295 case EQ_EXPR:
4296 case UNEQ_EXPR:
4297 tem = fold_convert (arg1_type, arg1);
4298 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4299 case NE_EXPR:
4300 case LTGT_EXPR:
4301 return pedantic_non_lvalue (fold_convert (type, arg1));
4302 case UNGE_EXPR:
4303 case UNGT_EXPR:
4304 if (flag_trapping_math)
4305 break;
4306 /* Fall through. */
4307 case GE_EXPR:
4308 case GT_EXPR:
4309 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4310 arg1 = fold_convert (lang_hooks.types.signed_type
4311 (TREE_TYPE (arg1)), arg1);
4312 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4313 return pedantic_non_lvalue (fold_convert (type, tem));
4314 case UNLE_EXPR:
4315 case UNLT_EXPR:
4316 if (flag_trapping_math)
4317 break;
4318 case LE_EXPR:
4319 case LT_EXPR:
4320 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4321 arg1 = fold_convert (lang_hooks.types.signed_type
4322 (TREE_TYPE (arg1)), arg1);
4323 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4324 return negate_expr (fold_convert (type, tem));
4325 default:
4326 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4327 break;
4330 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4331 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4332 both transformations are correct when A is NaN: A != 0
4333 is then true, and A == 0 is false. */
4335 if (integer_zerop (arg01) && integer_zerop (arg2))
4337 if (comp_code == NE_EXPR)
4338 return pedantic_non_lvalue (fold_convert (type, arg1));
4339 else if (comp_code == EQ_EXPR)
4340 return fold_convert (type, integer_zero_node);
4343 /* Try some transformations of A op B ? A : B.
4345 A == B? A : B same as B
4346 A != B? A : B same as A
4347 A >= B? A : B same as max (A, B)
4348 A > B? A : B same as max (B, A)
4349 A <= B? A : B same as min (A, B)
4350 A < B? A : B same as min (B, A)
4352 As above, these transformations don't work in the presence
4353 of signed zeros. For example, if A and B are zeros of
4354 opposite sign, the first two transformations will change
4355 the sign of the result. In the last four, the original
4356 expressions give different results for (A=+0, B=-0) and
4357 (A=-0, B=+0), but the transformed expressions do not.
4359 The first two transformations are correct if either A or B
4360 is a NaN. In the first transformation, the condition will
4361 be false, and B will indeed be chosen. In the case of the
4362 second transformation, the condition A != B will be true,
4363 and A will be chosen.
4365 The conversions to max() and min() are not correct if B is
4366 a number and A is not. The conditions in the original
4367 expressions will be false, so all four give B. The min()
4368 and max() versions would give a NaN instead. */
4369 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4370 /* Avoid these transformations if the COND_EXPR may be used
4371 as an lvalue in the C++ front-end. PR c++/19199. */
4372 && (in_gimple_form
4373 || strcmp (lang_hooks.name, "GNU C++") != 0
4374 || ! maybe_lvalue_p (arg1)
4375 || ! maybe_lvalue_p (arg2)))
4377 tree comp_op0 = arg00;
4378 tree comp_op1 = arg01;
4379 tree comp_type = TREE_TYPE (comp_op0);
4381 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4382 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4384 comp_type = type;
4385 comp_op0 = arg1;
4386 comp_op1 = arg2;
4389 switch (comp_code)
4391 case EQ_EXPR:
4392 return pedantic_non_lvalue (fold_convert (type, arg2));
4393 case NE_EXPR:
4394 return pedantic_non_lvalue (fold_convert (type, arg1));
4395 case LE_EXPR:
4396 case LT_EXPR:
4397 case UNLE_EXPR:
4398 case UNLT_EXPR:
4399 /* In C++ a ?: expression can be an lvalue, so put the
4400 operand which will be used if they are equal first
4401 so that we can convert this back to the
4402 corresponding COND_EXPR. */
4403 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4405 comp_op0 = fold_convert (comp_type, comp_op0);
4406 comp_op1 = fold_convert (comp_type, comp_op1);
4407 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4408 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4409 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4410 return pedantic_non_lvalue (fold_convert (type, tem));
4412 break;
4413 case GE_EXPR:
4414 case GT_EXPR:
4415 case UNGE_EXPR:
4416 case UNGT_EXPR:
4417 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4419 comp_op0 = fold_convert (comp_type, comp_op0);
4420 comp_op1 = fold_convert (comp_type, comp_op1);
4421 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4422 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4423 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4424 return pedantic_non_lvalue (fold_convert (type, tem));
4426 break;
4427 case UNEQ_EXPR:
4428 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4429 return pedantic_non_lvalue (fold_convert (type, arg2));
4430 break;
4431 case LTGT_EXPR:
4432 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4433 return pedantic_non_lvalue (fold_convert (type, arg1));
4434 break;
4435 default:
4436 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4437 break;
4441 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4442 we might still be able to simplify this. For example,
4443 if C1 is one less or one more than C2, this might have started
4444 out as a MIN or MAX and been transformed by this function.
4445 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4447 if (INTEGRAL_TYPE_P (type)
4448 && TREE_CODE (arg01) == INTEGER_CST
4449 && TREE_CODE (arg2) == INTEGER_CST)
4450 switch (comp_code)
4452 case EQ_EXPR:
4453 /* We can replace A with C1 in this case. */
4454 arg1 = fold_convert (type, arg01);
4455 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4457 case LT_EXPR:
4458 /* If C1 is C2 + 1, this is min(A, C2). */
4459 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4460 OEP_ONLY_CONST)
4461 && operand_equal_p (arg01,
4462 const_binop (PLUS_EXPR, arg2,
4463 integer_one_node, 0),
4464 OEP_ONLY_CONST))
4465 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4466 type, arg1, arg2));
4467 break;
4469 case LE_EXPR:
4470 /* If C1 is C2 - 1, this is min(A, C2). */
4471 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4472 OEP_ONLY_CONST)
4473 && operand_equal_p (arg01,
4474 const_binop (MINUS_EXPR, arg2,
4475 integer_one_node, 0),
4476 OEP_ONLY_CONST))
4477 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4478 type, arg1, arg2));
4479 break;
4481 case GT_EXPR:
4482 /* If C1 is C2 - 1, this is max(A, C2). */
4483 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4484 OEP_ONLY_CONST)
4485 && operand_equal_p (arg01,
4486 const_binop (MINUS_EXPR, arg2,
4487 integer_one_node, 0),
4488 OEP_ONLY_CONST))
4489 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4490 type, arg1, arg2));
4491 break;
4493 case GE_EXPR:
4494 /* If C1 is C2 + 1, this is max(A, C2). */
4495 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4496 OEP_ONLY_CONST)
4497 && operand_equal_p (arg01,
4498 const_binop (PLUS_EXPR, arg2,
4499 integer_one_node, 0),
4500 OEP_ONLY_CONST))
4501 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4502 type, arg1, arg2));
4503 break;
4504 case NE_EXPR:
4505 break;
4506 default:
4507 gcc_unreachable ();
4510 return NULL_TREE;
4515 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4516 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4517 #endif
4519 /* EXP is some logical combination of boolean tests. See if we can
4520 merge it into some range test. Return the new tree if so. */
4522 static tree
4523 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4525 int or_op = (code == TRUTH_ORIF_EXPR
4526 || code == TRUTH_OR_EXPR);
4527 int in0_p, in1_p, in_p;
4528 tree low0, low1, low, high0, high1, high;
4529 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4530 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4531 tree tem;
4533 /* If this is an OR operation, invert both sides; we will invert
4534 again at the end. */
4535 if (or_op)
4536 in0_p = ! in0_p, in1_p = ! in1_p;
4538 /* If both expressions are the same, if we can merge the ranges, and we
4539 can build the range test, return it or it inverted. If one of the
4540 ranges is always true or always false, consider it to be the same
4541 expression as the other. */
4542 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4543 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4544 in1_p, low1, high1)
4545 && 0 != (tem = (build_range_check (type,
4546 lhs != 0 ? lhs
4547 : rhs != 0 ? rhs : integer_zero_node,
4548 in_p, low, high))))
4549 return or_op ? invert_truthvalue (tem) : tem;
4551 /* On machines where the branch cost is expensive, if this is a
4552 short-circuited branch and the underlying object on both sides
4553 is the same, make a non-short-circuit operation. */
4554 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4555 && lhs != 0 && rhs != 0
4556 && (code == TRUTH_ANDIF_EXPR
4557 || code == TRUTH_ORIF_EXPR)
4558 && operand_equal_p (lhs, rhs, 0))
4560 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4561 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4562 which cases we can't do this. */
4563 if (simple_operand_p (lhs))
4564 return build2 (code == TRUTH_ANDIF_EXPR
4565 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4566 type, op0, op1);
4568 else if (lang_hooks.decls.global_bindings_p () == 0
4569 && ! CONTAINS_PLACEHOLDER_P (lhs))
4571 tree common = save_expr (lhs);
4573 if (0 != (lhs = build_range_check (type, common,
4574 or_op ? ! in0_p : in0_p,
4575 low0, high0))
4576 && (0 != (rhs = build_range_check (type, common,
4577 or_op ? ! in1_p : in1_p,
4578 low1, high1))))
4579 return build2 (code == TRUTH_ANDIF_EXPR
4580 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4581 type, lhs, rhs);
4585 return 0;
4588 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4589 bit value. Arrange things so the extra bits will be set to zero if and
4590 only if C is signed-extended to its full width. If MASK is nonzero,
4591 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4593 static tree
4594 unextend (tree c, int p, int unsignedp, tree mask)
4596 tree type = TREE_TYPE (c);
4597 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4598 tree temp;
4600 if (p == modesize || unsignedp)
4601 return c;
4603 /* We work by getting just the sign bit into the low-order bit, then
4604 into the high-order bit, then sign-extend. We then XOR that value
4605 with C. */
4606 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4607 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4609 /* We must use a signed type in order to get an arithmetic right shift.
4610 However, we must also avoid introducing accidental overflows, so that
4611 a subsequent call to integer_zerop will work. Hence we must
4612 do the type conversion here. At this point, the constant is either
4613 zero or one, and the conversion to a signed type can never overflow.
4614 We could get an overflow if this conversion is done anywhere else. */
4615 if (TYPE_UNSIGNED (type))
4616 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4618 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4619 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4620 if (mask != 0)
4621 temp = const_binop (BIT_AND_EXPR, temp,
4622 fold_convert (TREE_TYPE (c), mask), 0);
4623 /* If necessary, convert the type back to match the type of C. */
4624 if (TYPE_UNSIGNED (type))
4625 temp = fold_convert (type, temp);
4627 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4630 /* Find ways of folding logical expressions of LHS and RHS:
4631 Try to merge two comparisons to the same innermost item.
4632 Look for range tests like "ch >= '0' && ch <= '9'".
4633 Look for combinations of simple terms on machines with expensive branches
4634 and evaluate the RHS unconditionally.
4636 For example, if we have p->a == 2 && p->b == 4 and we can make an
4637 object large enough to span both A and B, we can do this with a comparison
4638 against the object ANDed with the a mask.
4640 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4641 operations to do this with one comparison.
4643 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4644 function and the one above.
4646 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4647 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4649 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4650 two operands.
4652 We return the simplified tree or 0 if no optimization is possible. */
4654 static tree
4655 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4657 /* If this is the "or" of two comparisons, we can do something if
4658 the comparisons are NE_EXPR. If this is the "and", we can do something
4659 if the comparisons are EQ_EXPR. I.e.,
4660 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4662 WANTED_CODE is this operation code. For single bit fields, we can
4663 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4664 comparison for one-bit fields. */
4666 enum tree_code wanted_code;
4667 enum tree_code lcode, rcode;
4668 tree ll_arg, lr_arg, rl_arg, rr_arg;
4669 tree ll_inner, lr_inner, rl_inner, rr_inner;
4670 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4671 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4672 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4673 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4674 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4675 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4676 enum machine_mode lnmode, rnmode;
4677 tree ll_mask, lr_mask, rl_mask, rr_mask;
4678 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4679 tree l_const, r_const;
4680 tree lntype, rntype, result;
4681 int first_bit, end_bit;
4682 int volatilep;
4684 /* Start by getting the comparison codes. Fail if anything is volatile.
4685 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4686 it were surrounded with a NE_EXPR. */
4688 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4689 return 0;
4691 lcode = TREE_CODE (lhs);
4692 rcode = TREE_CODE (rhs);
4694 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4696 lhs = build2 (NE_EXPR, truth_type, lhs,
4697 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4698 lcode = NE_EXPR;
4701 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4703 rhs = build2 (NE_EXPR, truth_type, rhs,
4704 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4705 rcode = NE_EXPR;
4708 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4709 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4710 return 0;
4712 ll_arg = TREE_OPERAND (lhs, 0);
4713 lr_arg = TREE_OPERAND (lhs, 1);
4714 rl_arg = TREE_OPERAND (rhs, 0);
4715 rr_arg = TREE_OPERAND (rhs, 1);
4717 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4718 if (simple_operand_p (ll_arg)
4719 && simple_operand_p (lr_arg))
4721 tree result;
4722 if (operand_equal_p (ll_arg, rl_arg, 0)
4723 && operand_equal_p (lr_arg, rr_arg, 0))
4725 result = combine_comparisons (code, lcode, rcode,
4726 truth_type, ll_arg, lr_arg);
4727 if (result)
4728 return result;
4730 else if (operand_equal_p (ll_arg, rr_arg, 0)
4731 && operand_equal_p (lr_arg, rl_arg, 0))
4733 result = combine_comparisons (code, lcode,
4734 swap_tree_comparison (rcode),
4735 truth_type, ll_arg, lr_arg);
4736 if (result)
4737 return result;
4741 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4742 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4744 /* If the RHS can be evaluated unconditionally and its operands are
4745 simple, it wins to evaluate the RHS unconditionally on machines
4746 with expensive branches. In this case, this isn't a comparison
4747 that can be merged. Avoid doing this if the RHS is a floating-point
4748 comparison since those can trap. */
4750 if (BRANCH_COST >= 2
4751 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4752 && simple_operand_p (rl_arg)
4753 && simple_operand_p (rr_arg))
4755 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4756 if (code == TRUTH_OR_EXPR
4757 && lcode == NE_EXPR && integer_zerop (lr_arg)
4758 && rcode == NE_EXPR && integer_zerop (rr_arg)
4759 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4760 return build2 (NE_EXPR, truth_type,
4761 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4762 ll_arg, rl_arg),
4763 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4765 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4766 if (code == TRUTH_AND_EXPR
4767 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4768 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4769 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4770 return build2 (EQ_EXPR, truth_type,
4771 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4772 ll_arg, rl_arg),
4773 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4775 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4776 return build2 (code, truth_type, lhs, rhs);
4779 /* See if the comparisons can be merged. Then get all the parameters for
4780 each side. */
4782 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4783 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4784 return 0;
4786 volatilep = 0;
4787 ll_inner = decode_field_reference (ll_arg,
4788 &ll_bitsize, &ll_bitpos, &ll_mode,
4789 &ll_unsignedp, &volatilep, &ll_mask,
4790 &ll_and_mask);
4791 lr_inner = decode_field_reference (lr_arg,
4792 &lr_bitsize, &lr_bitpos, &lr_mode,
4793 &lr_unsignedp, &volatilep, &lr_mask,
4794 &lr_and_mask);
4795 rl_inner = decode_field_reference (rl_arg,
4796 &rl_bitsize, &rl_bitpos, &rl_mode,
4797 &rl_unsignedp, &volatilep, &rl_mask,
4798 &rl_and_mask);
4799 rr_inner = decode_field_reference (rr_arg,
4800 &rr_bitsize, &rr_bitpos, &rr_mode,
4801 &rr_unsignedp, &volatilep, &rr_mask,
4802 &rr_and_mask);
4804 /* It must be true that the inner operation on the lhs of each
4805 comparison must be the same if we are to be able to do anything.
4806 Then see if we have constants. If not, the same must be true for
4807 the rhs's. */
4808 if (volatilep || ll_inner == 0 || rl_inner == 0
4809 || ! operand_equal_p (ll_inner, rl_inner, 0))
4810 return 0;
4812 if (TREE_CODE (lr_arg) == INTEGER_CST
4813 && TREE_CODE (rr_arg) == INTEGER_CST)
4814 l_const = lr_arg, r_const = rr_arg;
4815 else if (lr_inner == 0 || rr_inner == 0
4816 || ! operand_equal_p (lr_inner, rr_inner, 0))
4817 return 0;
4818 else
4819 l_const = r_const = 0;
4821 /* If either comparison code is not correct for our logical operation,
4822 fail. However, we can convert a one-bit comparison against zero into
4823 the opposite comparison against that bit being set in the field. */
4825 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4826 if (lcode != wanted_code)
4828 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4830 /* Make the left operand unsigned, since we are only interested
4831 in the value of one bit. Otherwise we are doing the wrong
4832 thing below. */
4833 ll_unsignedp = 1;
4834 l_const = ll_mask;
4836 else
4837 return 0;
4840 /* This is analogous to the code for l_const above. */
4841 if (rcode != wanted_code)
4843 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4845 rl_unsignedp = 1;
4846 r_const = rl_mask;
4848 else
4849 return 0;
4852 /* After this point all optimizations will generate bit-field
4853 references, which we might not want. */
4854 if (! lang_hooks.can_use_bit_fields_p ())
4855 return 0;
4857 /* See if we can find a mode that contains both fields being compared on
4858 the left. If we can't, fail. Otherwise, update all constants and masks
4859 to be relative to a field of that size. */
4860 first_bit = MIN (ll_bitpos, rl_bitpos);
4861 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4862 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4863 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4864 volatilep);
4865 if (lnmode == VOIDmode)
4866 return 0;
4868 lnbitsize = GET_MODE_BITSIZE (lnmode);
4869 lnbitpos = first_bit & ~ (lnbitsize - 1);
4870 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4871 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4873 if (BYTES_BIG_ENDIAN)
4875 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4876 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4879 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4880 size_int (xll_bitpos), 0);
4881 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4882 size_int (xrl_bitpos), 0);
4884 if (l_const)
4886 l_const = fold_convert (lntype, l_const);
4887 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4888 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4889 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4890 fold_build1 (BIT_NOT_EXPR,
4891 lntype, ll_mask),
4892 0)))
4894 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4896 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4899 if (r_const)
4901 r_const = fold_convert (lntype, r_const);
4902 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4903 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4904 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4905 fold_build1 (BIT_NOT_EXPR,
4906 lntype, rl_mask),
4907 0)))
4909 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4911 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4915 /* If the right sides are not constant, do the same for it. Also,
4916 disallow this optimization if a size or signedness mismatch occurs
4917 between the left and right sides. */
4918 if (l_const == 0)
4920 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4921 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4922 /* Make sure the two fields on the right
4923 correspond to the left without being swapped. */
4924 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4925 return 0;
4927 first_bit = MIN (lr_bitpos, rr_bitpos);
4928 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4929 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4930 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4931 volatilep);
4932 if (rnmode == VOIDmode)
4933 return 0;
4935 rnbitsize = GET_MODE_BITSIZE (rnmode);
4936 rnbitpos = first_bit & ~ (rnbitsize - 1);
4937 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4938 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4940 if (BYTES_BIG_ENDIAN)
4942 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4943 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4946 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4947 size_int (xlr_bitpos), 0);
4948 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4949 size_int (xrr_bitpos), 0);
4951 /* Make a mask that corresponds to both fields being compared.
4952 Do this for both items being compared. If the operands are the
4953 same size and the bits being compared are in the same position
4954 then we can do this by masking both and comparing the masked
4955 results. */
4956 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4957 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4958 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4960 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4961 ll_unsignedp || rl_unsignedp);
4962 if (! all_ones_mask_p (ll_mask, lnbitsize))
4963 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4965 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4966 lr_unsignedp || rr_unsignedp);
4967 if (! all_ones_mask_p (lr_mask, rnbitsize))
4968 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4970 return build2 (wanted_code, truth_type, lhs, rhs);
4973 /* There is still another way we can do something: If both pairs of
4974 fields being compared are adjacent, we may be able to make a wider
4975 field containing them both.
4977 Note that we still must mask the lhs/rhs expressions. Furthermore,
4978 the mask must be shifted to account for the shift done by
4979 make_bit_field_ref. */
4980 if ((ll_bitsize + ll_bitpos == rl_bitpos
4981 && lr_bitsize + lr_bitpos == rr_bitpos)
4982 || (ll_bitpos == rl_bitpos + rl_bitsize
4983 && lr_bitpos == rr_bitpos + rr_bitsize))
4985 tree type;
4987 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4988 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4989 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4990 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4992 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4993 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4994 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4995 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4997 /* Convert to the smaller type before masking out unwanted bits. */
4998 type = lntype;
4999 if (lntype != rntype)
5001 if (lnbitsize > rnbitsize)
5003 lhs = fold_convert (rntype, lhs);
5004 ll_mask = fold_convert (rntype, ll_mask);
5005 type = rntype;
5007 else if (lnbitsize < rnbitsize)
5009 rhs = fold_convert (lntype, rhs);
5010 lr_mask = fold_convert (lntype, lr_mask);
5011 type = lntype;
5015 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5016 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5018 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5019 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5021 return build2 (wanted_code, truth_type, lhs, rhs);
5024 return 0;
5027 /* Handle the case of comparisons with constants. If there is something in
5028 common between the masks, those bits of the constants must be the same.
5029 If not, the condition is always false. Test for this to avoid generating
5030 incorrect code below. */
5031 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5032 if (! integer_zerop (result)
5033 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5034 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5036 if (wanted_code == NE_EXPR)
5038 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5039 return constant_boolean_node (true, truth_type);
5041 else
5043 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5044 return constant_boolean_node (false, truth_type);
5048 /* Construct the expression we will return. First get the component
5049 reference we will make. Unless the mask is all ones the width of
5050 that field, perform the mask operation. Then compare with the
5051 merged constant. */
5052 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5053 ll_unsignedp || rl_unsignedp);
5055 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5056 if (! all_ones_mask_p (ll_mask, lnbitsize))
5057 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5059 return build2 (wanted_code, truth_type, result,
5060 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5063 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5064 constant. */
5066 static tree
5067 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5069 tree arg0 = op0;
5070 enum tree_code op_code;
5071 tree comp_const = op1;
5072 tree minmax_const;
5073 int consts_equal, consts_lt;
5074 tree inner;
5076 STRIP_SIGN_NOPS (arg0);
5078 op_code = TREE_CODE (arg0);
5079 minmax_const = TREE_OPERAND (arg0, 1);
5080 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5081 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5082 inner = TREE_OPERAND (arg0, 0);
5084 /* If something does not permit us to optimize, return the original tree. */
5085 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5086 || TREE_CODE (comp_const) != INTEGER_CST
5087 || TREE_CONSTANT_OVERFLOW (comp_const)
5088 || TREE_CODE (minmax_const) != INTEGER_CST
5089 || TREE_CONSTANT_OVERFLOW (minmax_const))
5090 return NULL_TREE;
5092 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5093 and GT_EXPR, doing the rest with recursive calls using logical
5094 simplifications. */
5095 switch (code)
5097 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5099 /* FIXME: We should be able to invert code without building a
5100 scratch tree node, but doing so would require us to
5101 duplicate a part of invert_truthvalue here. */
5102 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5103 tem = optimize_minmax_comparison (TREE_CODE (tem),
5104 TREE_TYPE (tem),
5105 TREE_OPERAND (tem, 0),
5106 TREE_OPERAND (tem, 1));
5107 return invert_truthvalue (tem);
5110 case GE_EXPR:
5111 return
5112 fold_build2 (TRUTH_ORIF_EXPR, type,
5113 optimize_minmax_comparison
5114 (EQ_EXPR, type, arg0, comp_const),
5115 optimize_minmax_comparison
5116 (GT_EXPR, type, arg0, comp_const));
5118 case EQ_EXPR:
5119 if (op_code == MAX_EXPR && consts_equal)
5120 /* MAX (X, 0) == 0 -> X <= 0 */
5121 return fold_build2 (LE_EXPR, type, inner, comp_const);
5123 else if (op_code == MAX_EXPR && consts_lt)
5124 /* MAX (X, 0) == 5 -> X == 5 */
5125 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5127 else if (op_code == MAX_EXPR)
5128 /* MAX (X, 0) == -1 -> false */
5129 return omit_one_operand (type, integer_zero_node, inner);
5131 else if (consts_equal)
5132 /* MIN (X, 0) == 0 -> X >= 0 */
5133 return fold_build2 (GE_EXPR, type, inner, comp_const);
5135 else if (consts_lt)
5136 /* MIN (X, 0) == 5 -> false */
5137 return omit_one_operand (type, integer_zero_node, inner);
5139 else
5140 /* MIN (X, 0) == -1 -> X == -1 */
5141 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5143 case GT_EXPR:
5144 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5145 /* MAX (X, 0) > 0 -> X > 0
5146 MAX (X, 0) > 5 -> X > 5 */
5147 return fold_build2 (GT_EXPR, type, inner, comp_const);
5149 else if (op_code == MAX_EXPR)
5150 /* MAX (X, 0) > -1 -> true */
5151 return omit_one_operand (type, integer_one_node, inner);
5153 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5154 /* MIN (X, 0) > 0 -> false
5155 MIN (X, 0) > 5 -> false */
5156 return omit_one_operand (type, integer_zero_node, inner);
5158 else
5159 /* MIN (X, 0) > -1 -> X > -1 */
5160 return fold_build2 (GT_EXPR, type, inner, comp_const);
5162 default:
5163 return NULL_TREE;
5167 /* T is an integer expression that is being multiplied, divided, or taken a
5168 modulus (CODE says which and what kind of divide or modulus) by a
5169 constant C. See if we can eliminate that operation by folding it with
5170 other operations already in T. WIDE_TYPE, if non-null, is a type that
5171 should be used for the computation if wider than our type.
5173 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5174 (X * 2) + (Y * 4). We must, however, be assured that either the original
5175 expression would not overflow or that overflow is undefined for the type
5176 in the language in question.
5178 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5179 the machine has a multiply-accumulate insn or that this is part of an
5180 addressing calculation.
5182 If we return a non-null expression, it is an equivalent form of the
5183 original computation, but need not be in the original type. */
5185 static tree
5186 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5188 /* To avoid exponential search depth, refuse to allow recursion past
5189 three levels. Beyond that (1) it's highly unlikely that we'll find
5190 something interesting and (2) we've probably processed it before
5191 when we built the inner expression. */
5193 static int depth;
5194 tree ret;
5196 if (depth > 3)
5197 return NULL;
5199 depth++;
5200 ret = extract_muldiv_1 (t, c, code, wide_type);
5201 depth--;
5203 return ret;
5206 static tree
5207 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5209 tree type = TREE_TYPE (t);
5210 enum tree_code tcode = TREE_CODE (t);
5211 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5212 > GET_MODE_SIZE (TYPE_MODE (type)))
5213 ? wide_type : type);
5214 tree t1, t2;
5215 int same_p = tcode == code;
5216 tree op0 = NULL_TREE, op1 = NULL_TREE;
5218 /* Don't deal with constants of zero here; they confuse the code below. */
5219 if (integer_zerop (c))
5220 return NULL_TREE;
5222 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5223 op0 = TREE_OPERAND (t, 0);
5225 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5226 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5228 /* Note that we need not handle conditional operations here since fold
5229 already handles those cases. So just do arithmetic here. */
5230 switch (tcode)
5232 case INTEGER_CST:
5233 /* For a constant, we can always simplify if we are a multiply
5234 or (for divide and modulus) if it is a multiple of our constant. */
5235 if (code == MULT_EXPR
5236 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5237 return const_binop (code, fold_convert (ctype, t),
5238 fold_convert (ctype, c), 0);
5239 break;
5241 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5242 /* If op0 is an expression ... */
5243 if ((COMPARISON_CLASS_P (op0)
5244 || UNARY_CLASS_P (op0)
5245 || BINARY_CLASS_P (op0)
5246 || EXPRESSION_CLASS_P (op0))
5247 /* ... and is unsigned, and its type is smaller than ctype,
5248 then we cannot pass through as widening. */
5249 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5250 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5251 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5252 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5253 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5254 /* ... or this is a truncation (t is narrower than op0),
5255 then we cannot pass through this narrowing. */
5256 || (GET_MODE_SIZE (TYPE_MODE (type))
5257 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5258 /* ... or signedness changes for division or modulus,
5259 then we cannot pass through this conversion. */
5260 || (code != MULT_EXPR
5261 && (TYPE_UNSIGNED (ctype)
5262 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5263 break;
5265 /* Pass the constant down and see if we can make a simplification. If
5266 we can, replace this expression with the inner simplification for
5267 possible later conversion to our or some other type. */
5268 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5269 && TREE_CODE (t2) == INTEGER_CST
5270 && ! TREE_CONSTANT_OVERFLOW (t2)
5271 && (0 != (t1 = extract_muldiv (op0, t2, code,
5272 code == MULT_EXPR
5273 ? ctype : NULL_TREE))))
5274 return t1;
5275 break;
5277 case ABS_EXPR:
5278 /* If widening the type changes it from signed to unsigned, then we
5279 must avoid building ABS_EXPR itself as unsigned. */
5280 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5282 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5283 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5285 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5286 return fold_convert (ctype, t1);
5288 break;
5290 /* FALLTHROUGH */
5291 case NEGATE_EXPR:
5292 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5293 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5294 break;
5296 case MIN_EXPR: case MAX_EXPR:
5297 /* If widening the type changes the signedness, then we can't perform
5298 this optimization as that changes the result. */
5299 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5300 break;
5302 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5303 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5304 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5306 if (tree_int_cst_sgn (c) < 0)
5307 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5309 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5310 fold_convert (ctype, t2));
5312 break;
5314 case LSHIFT_EXPR: case RSHIFT_EXPR:
5315 /* If the second operand is constant, this is a multiplication
5316 or floor division, by a power of two, so we can treat it that
5317 way unless the multiplier or divisor overflows. Signed
5318 left-shift overflow is implementation-defined rather than
5319 undefined in C90, so do not convert signed left shift into
5320 multiplication. */
5321 if (TREE_CODE (op1) == INTEGER_CST
5322 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5323 /* const_binop may not detect overflow correctly,
5324 so check for it explicitly here. */
5325 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5326 && TREE_INT_CST_HIGH (op1) == 0
5327 && 0 != (t1 = fold_convert (ctype,
5328 const_binop (LSHIFT_EXPR,
5329 size_one_node,
5330 op1, 0)))
5331 && ! TREE_OVERFLOW (t1))
5332 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5333 ? MULT_EXPR : FLOOR_DIV_EXPR,
5334 ctype, fold_convert (ctype, op0), t1),
5335 c, code, wide_type);
5336 break;
5338 case PLUS_EXPR: case MINUS_EXPR:
5339 /* See if we can eliminate the operation on both sides. If we can, we
5340 can return a new PLUS or MINUS. If we can't, the only remaining
5341 cases where we can do anything are if the second operand is a
5342 constant. */
5343 t1 = extract_muldiv (op0, c, code, wide_type);
5344 t2 = extract_muldiv (op1, c, code, wide_type);
5345 if (t1 != 0 && t2 != 0
5346 && (code == MULT_EXPR
5347 /* If not multiplication, we can only do this if both operands
5348 are divisible by c. */
5349 || (multiple_of_p (ctype, op0, c)
5350 && multiple_of_p (ctype, op1, c))))
5351 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5352 fold_convert (ctype, t2));
5354 /* If this was a subtraction, negate OP1 and set it to be an addition.
5355 This simplifies the logic below. */
5356 if (tcode == MINUS_EXPR)
5357 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5359 if (TREE_CODE (op1) != INTEGER_CST)
5360 break;
5362 /* If either OP1 or C are negative, this optimization is not safe for
5363 some of the division and remainder types while for others we need
5364 to change the code. */
5365 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5367 if (code == CEIL_DIV_EXPR)
5368 code = FLOOR_DIV_EXPR;
5369 else if (code == FLOOR_DIV_EXPR)
5370 code = CEIL_DIV_EXPR;
5371 else if (code != MULT_EXPR
5372 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5373 break;
5376 /* If it's a multiply or a division/modulus operation of a multiple
5377 of our constant, do the operation and verify it doesn't overflow. */
5378 if (code == MULT_EXPR
5379 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5381 op1 = const_binop (code, fold_convert (ctype, op1),
5382 fold_convert (ctype, c), 0);
5383 /* We allow the constant to overflow with wrapping semantics. */
5384 if (op1 == 0
5385 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5386 break;
5388 else
5389 break;
5391 /* If we have an unsigned type is not a sizetype, we cannot widen
5392 the operation since it will change the result if the original
5393 computation overflowed. */
5394 if (TYPE_UNSIGNED (ctype)
5395 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5396 && ctype != type)
5397 break;
5399 /* If we were able to eliminate our operation from the first side,
5400 apply our operation to the second side and reform the PLUS. */
5401 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5402 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5404 /* The last case is if we are a multiply. In that case, we can
5405 apply the distributive law to commute the multiply and addition
5406 if the multiplication of the constants doesn't overflow. */
5407 if (code == MULT_EXPR)
5408 return fold_build2 (tcode, ctype,
5409 fold_build2 (code, ctype,
5410 fold_convert (ctype, op0),
5411 fold_convert (ctype, c)),
5412 op1);
5414 break;
5416 case MULT_EXPR:
5417 /* We have a special case here if we are doing something like
5418 (C * 8) % 4 since we know that's zero. */
5419 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5420 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5421 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5422 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5423 return omit_one_operand (type, integer_zero_node, op0);
5425 /* ... fall through ... */
5427 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5428 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5429 /* If we can extract our operation from the LHS, do so and return a
5430 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5431 do something only if the second operand is a constant. */
5432 if (same_p
5433 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5434 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5435 fold_convert (ctype, op1));
5436 else if (tcode == MULT_EXPR && code == MULT_EXPR
5437 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5438 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5439 fold_convert (ctype, t1));
5440 else if (TREE_CODE (op1) != INTEGER_CST)
5441 return 0;
5443 /* If these are the same operation types, we can associate them
5444 assuming no overflow. */
5445 if (tcode == code
5446 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5447 fold_convert (ctype, c), 0))
5448 && ! TREE_OVERFLOW (t1))
5449 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5451 /* If these operations "cancel" each other, we have the main
5452 optimizations of this pass, which occur when either constant is a
5453 multiple of the other, in which case we replace this with either an
5454 operation or CODE or TCODE.
5456 If we have an unsigned type that is not a sizetype, we cannot do
5457 this since it will change the result if the original computation
5458 overflowed. */
5459 if ((! TYPE_UNSIGNED (ctype)
5460 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5461 && ! flag_wrapv
5462 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5463 || (tcode == MULT_EXPR
5464 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5465 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5467 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5468 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5469 fold_convert (ctype,
5470 const_binop (TRUNC_DIV_EXPR,
5471 op1, c, 0)));
5472 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5473 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5474 fold_convert (ctype,
5475 const_binop (TRUNC_DIV_EXPR,
5476 c, op1, 0)));
5478 break;
5480 default:
5481 break;
5484 return 0;
5487 /* Return a node which has the indicated constant VALUE (either 0 or
5488 1), and is of the indicated TYPE. */
5490 tree
5491 constant_boolean_node (int value, tree type)
5493 if (type == integer_type_node)
5494 return value ? integer_one_node : integer_zero_node;
5495 else if (type == boolean_type_node)
5496 return value ? boolean_true_node : boolean_false_node;
5497 else
5498 return build_int_cst (type, value);
5502 /* Return true if expr looks like an ARRAY_REF and set base and
5503 offset to the appropriate trees. If there is no offset,
5504 offset is set to NULL_TREE. Base will be canonicalized to
5505 something you can get the element type from using
5506 TREE_TYPE (TREE_TYPE (base)). */
5508 static bool
5509 extract_array_ref (tree expr, tree *base, tree *offset)
5511 /* One canonical form is a PLUS_EXPR with the first
5512 argument being an ADDR_EXPR with a possible NOP_EXPR
5513 attached. */
5514 if (TREE_CODE (expr) == PLUS_EXPR)
5516 tree op0 = TREE_OPERAND (expr, 0);
5517 tree inner_base, dummy1;
5518 /* Strip NOP_EXPRs here because the C frontends and/or
5519 folders present us (int *)&x.a + 4B possibly. */
5520 STRIP_NOPS (op0);
5521 if (extract_array_ref (op0, &inner_base, &dummy1))
5523 *base = inner_base;
5524 if (dummy1 == NULL_TREE)
5525 *offset = TREE_OPERAND (expr, 1);
5526 else
5527 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5528 dummy1, TREE_OPERAND (expr, 1));
5529 return true;
5532 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5533 which we transform into an ADDR_EXPR with appropriate
5534 offset. For other arguments to the ADDR_EXPR we assume
5535 zero offset and as such do not care about the ADDR_EXPR
5536 type and strip possible nops from it. */
5537 else if (TREE_CODE (expr) == ADDR_EXPR)
5539 tree op0 = TREE_OPERAND (expr, 0);
5540 if (TREE_CODE (op0) == ARRAY_REF)
5542 *base = TREE_OPERAND (op0, 0);
5543 *offset = TREE_OPERAND (op0, 1);
5545 else
5547 /* Handle array-to-pointer decay as &a. */
5548 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5549 *base = TREE_OPERAND (expr, 0);
5550 else
5551 *base = expr;
5552 *offset = NULL_TREE;
5554 return true;
5556 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5557 else if (SSA_VAR_P (expr)
5558 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5560 *base = expr;
5561 *offset = NULL_TREE;
5562 return true;
5565 return false;
5569 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5570 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5571 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5572 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5573 COND is the first argument to CODE; otherwise (as in the example
5574 given here), it is the second argument. TYPE is the type of the
5575 original expression. Return NULL_TREE if no simplification is
5576 possible. */
5578 static tree
5579 fold_binary_op_with_conditional_arg (enum tree_code code,
5580 tree type, tree op0, tree op1,
5581 tree cond, tree arg, int cond_first_p)
5583 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5584 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5585 tree test, true_value, false_value;
5586 tree lhs = NULL_TREE;
5587 tree rhs = NULL_TREE;
5589 /* This transformation is only worthwhile if we don't have to wrap
5590 arg in a SAVE_EXPR, and the operation can be simplified on at least
5591 one of the branches once its pushed inside the COND_EXPR. */
5592 if (!TREE_CONSTANT (arg))
5593 return NULL_TREE;
5595 if (TREE_CODE (cond) == COND_EXPR)
5597 test = TREE_OPERAND (cond, 0);
5598 true_value = TREE_OPERAND (cond, 1);
5599 false_value = TREE_OPERAND (cond, 2);
5600 /* If this operand throws an expression, then it does not make
5601 sense to try to perform a logical or arithmetic operation
5602 involving it. */
5603 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5604 lhs = true_value;
5605 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5606 rhs = false_value;
5608 else
5610 tree testtype = TREE_TYPE (cond);
5611 test = cond;
5612 true_value = constant_boolean_node (true, testtype);
5613 false_value = constant_boolean_node (false, testtype);
5616 arg = fold_convert (arg_type, arg);
5617 if (lhs == 0)
5619 true_value = fold_convert (cond_type, true_value);
5620 if (cond_first_p)
5621 lhs = fold_build2 (code, type, true_value, arg);
5622 else
5623 lhs = fold_build2 (code, type, arg, true_value);
5625 if (rhs == 0)
5627 false_value = fold_convert (cond_type, false_value);
5628 if (cond_first_p)
5629 rhs = fold_build2 (code, type, false_value, arg);
5630 else
5631 rhs = fold_build2 (code, type, arg, false_value);
5634 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5635 return fold_convert (type, test);
5639 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5641 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5642 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5643 ADDEND is the same as X.
5645 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5646 and finite. The problematic cases are when X is zero, and its mode
5647 has signed zeros. In the case of rounding towards -infinity,
5648 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5649 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5651 static bool
5652 fold_real_zero_addition_p (tree type, tree addend, int negate)
5654 if (!real_zerop (addend))
5655 return false;
5657 /* Don't allow the fold with -fsignaling-nans. */
5658 if (HONOR_SNANS (TYPE_MODE (type)))
5659 return false;
5661 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5662 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5663 return true;
5665 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5666 if (TREE_CODE (addend) == REAL_CST
5667 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5668 negate = !negate;
5670 /* The mode has signed zeros, and we have to honor their sign.
5671 In this situation, there is only one case we can return true for.
5672 X - 0 is the same as X unless rounding towards -infinity is
5673 supported. */
5674 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5677 /* Subroutine of fold() that checks comparisons of built-in math
5678 functions against real constants.
5680 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5681 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5682 is the type of the result and ARG0 and ARG1 are the operands of the
5683 comparison. ARG1 must be a TREE_REAL_CST.
5685 The function returns the constant folded tree if a simplification
5686 can be made, and NULL_TREE otherwise. */
5688 static tree
5689 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5690 tree type, tree arg0, tree arg1)
5692 REAL_VALUE_TYPE c;
5694 if (BUILTIN_SQRT_P (fcode))
5696 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5697 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5699 c = TREE_REAL_CST (arg1);
5700 if (REAL_VALUE_NEGATIVE (c))
5702 /* sqrt(x) < y is always false, if y is negative. */
5703 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5704 return omit_one_operand (type, integer_zero_node, arg);
5706 /* sqrt(x) > y is always true, if y is negative and we
5707 don't care about NaNs, i.e. negative values of x. */
5708 if (code == NE_EXPR || !HONOR_NANS (mode))
5709 return omit_one_operand (type, integer_one_node, arg);
5711 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5712 return fold_build2 (GE_EXPR, type, arg,
5713 build_real (TREE_TYPE (arg), dconst0));
5715 else if (code == GT_EXPR || code == GE_EXPR)
5717 REAL_VALUE_TYPE c2;
5719 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5720 real_convert (&c2, mode, &c2);
5722 if (REAL_VALUE_ISINF (c2))
5724 /* sqrt(x) > y is x == +Inf, when y is very large. */
5725 if (HONOR_INFINITIES (mode))
5726 return fold_build2 (EQ_EXPR, type, arg,
5727 build_real (TREE_TYPE (arg), c2));
5729 /* sqrt(x) > y is always false, when y is very large
5730 and we don't care about infinities. */
5731 return omit_one_operand (type, integer_zero_node, arg);
5734 /* sqrt(x) > c is the same as x > c*c. */
5735 return fold_build2 (code, type, arg,
5736 build_real (TREE_TYPE (arg), c2));
5738 else if (code == LT_EXPR || code == LE_EXPR)
5740 REAL_VALUE_TYPE c2;
5742 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5743 real_convert (&c2, mode, &c2);
5745 if (REAL_VALUE_ISINF (c2))
5747 /* sqrt(x) < y is always true, when y is a very large
5748 value and we don't care about NaNs or Infinities. */
5749 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5750 return omit_one_operand (type, integer_one_node, arg);
5752 /* sqrt(x) < y is x != +Inf when y is very large and we
5753 don't care about NaNs. */
5754 if (! HONOR_NANS (mode))
5755 return fold_build2 (NE_EXPR, type, arg,
5756 build_real (TREE_TYPE (arg), c2));
5758 /* sqrt(x) < y is x >= 0 when y is very large and we
5759 don't care about Infinities. */
5760 if (! HONOR_INFINITIES (mode))
5761 return fold_build2 (GE_EXPR, type, arg,
5762 build_real (TREE_TYPE (arg), dconst0));
5764 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5765 if (lang_hooks.decls.global_bindings_p () != 0
5766 || CONTAINS_PLACEHOLDER_P (arg))
5767 return NULL_TREE;
5769 arg = save_expr (arg);
5770 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5771 fold_build2 (GE_EXPR, type, arg,
5772 build_real (TREE_TYPE (arg),
5773 dconst0)),
5774 fold_build2 (NE_EXPR, type, arg,
5775 build_real (TREE_TYPE (arg),
5776 c2)));
5779 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5780 if (! HONOR_NANS (mode))
5781 return fold_build2 (code, type, arg,
5782 build_real (TREE_TYPE (arg), c2));
5784 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5785 if (lang_hooks.decls.global_bindings_p () == 0
5786 && ! CONTAINS_PLACEHOLDER_P (arg))
5788 arg = save_expr (arg);
5789 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5790 fold_build2 (GE_EXPR, type, arg,
5791 build_real (TREE_TYPE (arg),
5792 dconst0)),
5793 fold_build2 (code, type, arg,
5794 build_real (TREE_TYPE (arg),
5795 c2)));
5800 return NULL_TREE;
5803 /* Subroutine of fold() that optimizes comparisons against Infinities,
5804 either +Inf or -Inf.
5806 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5807 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5808 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5810 The function returns the constant folded tree if a simplification
5811 can be made, and NULL_TREE otherwise. */
5813 static tree
5814 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5816 enum machine_mode mode;
5817 REAL_VALUE_TYPE max;
5818 tree temp;
5819 bool neg;
5821 mode = TYPE_MODE (TREE_TYPE (arg0));
5823 /* For negative infinity swap the sense of the comparison. */
5824 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5825 if (neg)
5826 code = swap_tree_comparison (code);
5828 switch (code)
5830 case GT_EXPR:
5831 /* x > +Inf is always false, if with ignore sNANs. */
5832 if (HONOR_SNANS (mode))
5833 return NULL_TREE;
5834 return omit_one_operand (type, integer_zero_node, arg0);
5836 case LE_EXPR:
5837 /* x <= +Inf is always true, if we don't case about NaNs. */
5838 if (! HONOR_NANS (mode))
5839 return omit_one_operand (type, integer_one_node, arg0);
5841 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5842 if (lang_hooks.decls.global_bindings_p () == 0
5843 && ! CONTAINS_PLACEHOLDER_P (arg0))
5845 arg0 = save_expr (arg0);
5846 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5848 break;
5850 case EQ_EXPR:
5851 case GE_EXPR:
5852 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5853 real_maxval (&max, neg, mode);
5854 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5855 arg0, build_real (TREE_TYPE (arg0), max));
5857 case LT_EXPR:
5858 /* x < +Inf is always equal to x <= DBL_MAX. */
5859 real_maxval (&max, neg, mode);
5860 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5861 arg0, build_real (TREE_TYPE (arg0), max));
5863 case NE_EXPR:
5864 /* x != +Inf is always equal to !(x > DBL_MAX). */
5865 real_maxval (&max, neg, mode);
5866 if (! HONOR_NANS (mode))
5867 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5868 arg0, build_real (TREE_TYPE (arg0), max));
5870 /* The transformation below creates non-gimple code and thus is
5871 not appropriate if we are in gimple form. */
5872 if (in_gimple_form)
5873 return NULL_TREE;
5875 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5876 arg0, build_real (TREE_TYPE (arg0), max));
5877 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5879 default:
5880 break;
5883 return NULL_TREE;
5886 /* Subroutine of fold() that optimizes comparisons of a division by
5887 a nonzero integer constant against an integer constant, i.e.
5888 X/C1 op C2.
5890 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5891 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5892 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5894 The function returns the constant folded tree if a simplification
5895 can be made, and NULL_TREE otherwise. */
5897 static tree
5898 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5900 tree prod, tmp, hi, lo;
5901 tree arg00 = TREE_OPERAND (arg0, 0);
5902 tree arg01 = TREE_OPERAND (arg0, 1);
5903 unsigned HOST_WIDE_INT lpart;
5904 HOST_WIDE_INT hpart;
5905 int overflow;
5907 /* We have to do this the hard way to detect unsigned overflow.
5908 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5909 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5910 TREE_INT_CST_HIGH (arg01),
5911 TREE_INT_CST_LOW (arg1),
5912 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5913 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5914 prod = force_fit_type (prod, -1, overflow, false);
5916 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5918 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5919 lo = prod;
5921 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5922 overflow = add_double (TREE_INT_CST_LOW (prod),
5923 TREE_INT_CST_HIGH (prod),
5924 TREE_INT_CST_LOW (tmp),
5925 TREE_INT_CST_HIGH (tmp),
5926 &lpart, &hpart);
5927 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5928 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5929 TREE_CONSTANT_OVERFLOW (prod));
5931 else if (tree_int_cst_sgn (arg01) >= 0)
5933 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5934 switch (tree_int_cst_sgn (arg1))
5936 case -1:
5937 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5938 hi = prod;
5939 break;
5941 case 0:
5942 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5943 hi = tmp;
5944 break;
5946 case 1:
5947 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5948 lo = prod;
5949 break;
5951 default:
5952 gcc_unreachable ();
5955 else
5957 /* A negative divisor reverses the relational operators. */
5958 code = swap_tree_comparison (code);
5960 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5961 switch (tree_int_cst_sgn (arg1))
5963 case -1:
5964 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5965 lo = prod;
5966 break;
5968 case 0:
5969 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5970 lo = tmp;
5971 break;
5973 case 1:
5974 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5975 hi = prod;
5976 break;
5978 default:
5979 gcc_unreachable ();
5983 switch (code)
5985 case EQ_EXPR:
5986 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5987 return omit_one_operand (type, integer_zero_node, arg00);
5988 if (TREE_OVERFLOW (hi))
5989 return fold_build2 (GE_EXPR, type, arg00, lo);
5990 if (TREE_OVERFLOW (lo))
5991 return fold_build2 (LE_EXPR, type, arg00, hi);
5992 return build_range_check (type, arg00, 1, lo, hi);
5994 case NE_EXPR:
5995 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5996 return omit_one_operand (type, integer_one_node, arg00);
5997 if (TREE_OVERFLOW (hi))
5998 return fold_build2 (LT_EXPR, type, arg00, lo);
5999 if (TREE_OVERFLOW (lo))
6000 return fold_build2 (GT_EXPR, type, arg00, hi);
6001 return build_range_check (type, arg00, 0, lo, hi);
6003 case LT_EXPR:
6004 if (TREE_OVERFLOW (lo))
6005 return omit_one_operand (type, integer_zero_node, arg00);
6006 return fold_build2 (LT_EXPR, type, arg00, lo);
6008 case LE_EXPR:
6009 if (TREE_OVERFLOW (hi))
6010 return omit_one_operand (type, integer_one_node, arg00);
6011 return fold_build2 (LE_EXPR, type, arg00, hi);
6013 case GT_EXPR:
6014 if (TREE_OVERFLOW (hi))
6015 return omit_one_operand (type, integer_zero_node, arg00);
6016 return fold_build2 (GT_EXPR, type, arg00, hi);
6018 case GE_EXPR:
6019 if (TREE_OVERFLOW (lo))
6020 return omit_one_operand (type, integer_one_node, arg00);
6021 return fold_build2 (GE_EXPR, type, arg00, lo);
6023 default:
6024 break;
6027 return NULL_TREE;
6031 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6032 equality/inequality test, then return a simplified form of the test
6033 using a sign testing. Otherwise return NULL. TYPE is the desired
6034 result type. */
6036 static tree
6037 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6038 tree result_type)
6040 /* If this is testing a single bit, we can optimize the test. */
6041 if ((code == NE_EXPR || code == EQ_EXPR)
6042 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6043 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6045 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6046 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6047 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6049 if (arg00 != NULL_TREE
6050 /* This is only a win if casting to a signed type is cheap,
6051 i.e. when arg00's type is not a partial mode. */
6052 && TYPE_PRECISION (TREE_TYPE (arg00))
6053 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6055 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6056 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6057 result_type, fold_convert (stype, arg00),
6058 fold_convert (stype, integer_zero_node));
6062 return NULL_TREE;
6065 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6066 equality/inequality test, then return a simplified form of
6067 the test using shifts and logical operations. Otherwise return
6068 NULL. TYPE is the desired result type. */
6070 tree
6071 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6072 tree result_type)
6074 /* If this is testing a single bit, we can optimize the test. */
6075 if ((code == NE_EXPR || code == EQ_EXPR)
6076 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6077 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6079 tree inner = TREE_OPERAND (arg0, 0);
6080 tree type = TREE_TYPE (arg0);
6081 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6082 enum machine_mode operand_mode = TYPE_MODE (type);
6083 int ops_unsigned;
6084 tree signed_type, unsigned_type, intermediate_type;
6085 tree tem;
6087 /* First, see if we can fold the single bit test into a sign-bit
6088 test. */
6089 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6090 result_type);
6091 if (tem)
6092 return tem;
6094 /* Otherwise we have (A & C) != 0 where C is a single bit,
6095 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6096 Similarly for (A & C) == 0. */
6098 /* If INNER is a right shift of a constant and it plus BITNUM does
6099 not overflow, adjust BITNUM and INNER. */
6100 if (TREE_CODE (inner) == RSHIFT_EXPR
6101 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6102 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6103 && bitnum < TYPE_PRECISION (type)
6104 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6105 bitnum - TYPE_PRECISION (type)))
6107 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6108 inner = TREE_OPERAND (inner, 0);
6111 /* If we are going to be able to omit the AND below, we must do our
6112 operations as unsigned. If we must use the AND, we have a choice.
6113 Normally unsigned is faster, but for some machines signed is. */
6114 #ifdef LOAD_EXTEND_OP
6115 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6116 && !flag_syntax_only) ? 0 : 1;
6117 #else
6118 ops_unsigned = 1;
6119 #endif
6121 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6122 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6123 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6124 inner = fold_convert (intermediate_type, inner);
6126 if (bitnum != 0)
6127 inner = build2 (RSHIFT_EXPR, intermediate_type,
6128 inner, size_int (bitnum));
6130 if (code == EQ_EXPR)
6131 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6132 inner, integer_one_node);
6134 /* Put the AND last so it can combine with more things. */
6135 inner = build2 (BIT_AND_EXPR, intermediate_type,
6136 inner, integer_one_node);
6138 /* Make sure to return the proper type. */
6139 inner = fold_convert (result_type, inner);
6141 return inner;
6143 return NULL_TREE;
6146 /* Check whether we are allowed to reorder operands arg0 and arg1,
6147 such that the evaluation of arg1 occurs before arg0. */
6149 static bool
6150 reorder_operands_p (tree arg0, tree arg1)
6152 if (! flag_evaluation_order)
6153 return true;
6154 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6155 return true;
6156 return ! TREE_SIDE_EFFECTS (arg0)
6157 && ! TREE_SIDE_EFFECTS (arg1);
6160 /* Test whether it is preferable two swap two operands, ARG0 and
6161 ARG1, for example because ARG0 is an integer constant and ARG1
6162 isn't. If REORDER is true, only recommend swapping if we can
6163 evaluate the operands in reverse order. */
6165 bool
6166 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6168 STRIP_SIGN_NOPS (arg0);
6169 STRIP_SIGN_NOPS (arg1);
6171 if (TREE_CODE (arg1) == INTEGER_CST)
6172 return 0;
6173 if (TREE_CODE (arg0) == INTEGER_CST)
6174 return 1;
6176 if (TREE_CODE (arg1) == REAL_CST)
6177 return 0;
6178 if (TREE_CODE (arg0) == REAL_CST)
6179 return 1;
6181 if (TREE_CODE (arg1) == COMPLEX_CST)
6182 return 0;
6183 if (TREE_CODE (arg0) == COMPLEX_CST)
6184 return 1;
6186 if (TREE_CONSTANT (arg1))
6187 return 0;
6188 if (TREE_CONSTANT (arg0))
6189 return 1;
6191 if (optimize_size)
6192 return 0;
6194 if (reorder && flag_evaluation_order
6195 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6196 return 0;
6198 if (DECL_P (arg1))
6199 return 0;
6200 if (DECL_P (arg0))
6201 return 1;
6203 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6204 for commutative and comparison operators. Ensuring a canonical
6205 form allows the optimizers to find additional redundancies without
6206 having to explicitly check for both orderings. */
6207 if (TREE_CODE (arg0) == SSA_NAME
6208 && TREE_CODE (arg1) == SSA_NAME
6209 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6210 return 1;
6212 return 0;
6215 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6216 ARG0 is extended to a wider type. */
6218 static tree
6219 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6221 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6222 tree arg1_unw;
6223 tree shorter_type, outer_type;
6224 tree min, max;
6225 bool above, below;
6227 if (arg0_unw == arg0)
6228 return NULL_TREE;
6229 shorter_type = TREE_TYPE (arg0_unw);
6231 #ifdef HAVE_canonicalize_funcptr_for_compare
6232 /* Disable this optimization if we're casting a function pointer
6233 type on targets that require function pointer canonicalization. */
6234 if (HAVE_canonicalize_funcptr_for_compare
6235 && TREE_CODE (shorter_type) == POINTER_TYPE
6236 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6237 return NULL_TREE;
6238 #endif
6240 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6241 return NULL_TREE;
6243 arg1_unw = get_unwidened (arg1, shorter_type);
6244 if (!arg1_unw)
6245 return NULL_TREE;
6247 /* If possible, express the comparison in the shorter mode. */
6248 if ((code == EQ_EXPR || code == NE_EXPR
6249 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6250 && (TREE_TYPE (arg1_unw) == shorter_type
6251 || (TREE_CODE (arg1_unw) == INTEGER_CST
6252 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6253 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6254 && int_fits_type_p (arg1_unw, shorter_type))))
6255 return fold_build2 (code, type, arg0_unw,
6256 fold_convert (shorter_type, arg1_unw));
6258 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6259 return NULL_TREE;
6261 /* If we are comparing with the integer that does not fit into the range
6262 of the shorter type, the result is known. */
6263 outer_type = TREE_TYPE (arg1_unw);
6264 min = lower_bound_in_type (outer_type, shorter_type);
6265 max = upper_bound_in_type (outer_type, shorter_type);
6267 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6268 max, arg1_unw));
6269 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6270 arg1_unw, min));
6272 switch (code)
6274 case EQ_EXPR:
6275 if (above || below)
6276 return omit_one_operand (type, integer_zero_node, arg0);
6277 break;
6279 case NE_EXPR:
6280 if (above || below)
6281 return omit_one_operand (type, integer_one_node, arg0);
6282 break;
6284 case LT_EXPR:
6285 case LE_EXPR:
6286 if (above)
6287 return omit_one_operand (type, integer_one_node, arg0);
6288 else if (below)
6289 return omit_one_operand (type, integer_zero_node, arg0);
6291 case GT_EXPR:
6292 case GE_EXPR:
6293 if (above)
6294 return omit_one_operand (type, integer_zero_node, arg0);
6295 else if (below)
6296 return omit_one_operand (type, integer_one_node, arg0);
6298 default:
6299 break;
6302 return NULL_TREE;
6305 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6306 ARG0 just the signedness is changed. */
6308 static tree
6309 fold_sign_changed_comparison (enum tree_code code, tree type,
6310 tree arg0, tree arg1)
6312 tree arg0_inner, tmp;
6313 tree inner_type, outer_type;
6315 if (TREE_CODE (arg0) != NOP_EXPR
6316 && TREE_CODE (arg0) != CONVERT_EXPR)
6317 return NULL_TREE;
6319 outer_type = TREE_TYPE (arg0);
6320 arg0_inner = TREE_OPERAND (arg0, 0);
6321 inner_type = TREE_TYPE (arg0_inner);
6323 #ifdef HAVE_canonicalize_funcptr_for_compare
6324 /* Disable this optimization if we're casting a function pointer
6325 type on targets that require function pointer canonicalization. */
6326 if (HAVE_canonicalize_funcptr_for_compare
6327 && TREE_CODE (inner_type) == POINTER_TYPE
6328 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6329 return NULL_TREE;
6330 #endif
6332 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6333 return NULL_TREE;
6335 if (TREE_CODE (arg1) != INTEGER_CST
6336 && !((TREE_CODE (arg1) == NOP_EXPR
6337 || TREE_CODE (arg1) == CONVERT_EXPR)
6338 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6339 return NULL_TREE;
6341 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6342 && code != NE_EXPR
6343 && code != EQ_EXPR)
6344 return NULL_TREE;
6346 if (TREE_CODE (arg1) == INTEGER_CST)
6348 tmp = build_int_cst_wide (inner_type,
6349 TREE_INT_CST_LOW (arg1),
6350 TREE_INT_CST_HIGH (arg1));
6351 arg1 = force_fit_type (tmp, 0,
6352 TREE_OVERFLOW (arg1),
6353 TREE_CONSTANT_OVERFLOW (arg1));
6355 else
6356 arg1 = fold_convert (inner_type, arg1);
6358 return fold_build2 (code, type, arg0_inner, arg1);
6361 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6362 step of the array. Reconstructs s and delta in the case of s * delta
6363 being an integer constant (and thus already folded).
6364 ADDR is the address. MULT is the multiplicative expression.
6365 If the function succeeds, the new address expression is returned. Otherwise
6366 NULL_TREE is returned. */
6368 static tree
6369 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6371 tree s, delta, step;
6372 tree ref = TREE_OPERAND (addr, 0), pref;
6373 tree ret, pos;
6374 tree itype;
6376 /* Canonicalize op1 into a possibly non-constant delta
6377 and an INTEGER_CST s. */
6378 if (TREE_CODE (op1) == MULT_EXPR)
6380 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6382 STRIP_NOPS (arg0);
6383 STRIP_NOPS (arg1);
6385 if (TREE_CODE (arg0) == INTEGER_CST)
6387 s = arg0;
6388 delta = arg1;
6390 else if (TREE_CODE (arg1) == INTEGER_CST)
6392 s = arg1;
6393 delta = arg0;
6395 else
6396 return NULL_TREE;
6398 else if (TREE_CODE (op1) == INTEGER_CST)
6400 delta = op1;
6401 s = NULL_TREE;
6403 else
6405 /* Simulate we are delta * 1. */
6406 delta = op1;
6407 s = integer_one_node;
6410 for (;; ref = TREE_OPERAND (ref, 0))
6412 if (TREE_CODE (ref) == ARRAY_REF)
6414 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6415 if (! itype)
6416 continue;
6418 step = array_ref_element_size (ref);
6419 if (TREE_CODE (step) != INTEGER_CST)
6420 continue;
6422 if (s)
6424 if (! tree_int_cst_equal (step, s))
6425 continue;
6427 else
6429 /* Try if delta is a multiple of step. */
6430 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6431 if (! tmp)
6432 continue;
6433 delta = tmp;
6436 break;
6439 if (!handled_component_p (ref))
6440 return NULL_TREE;
6443 /* We found the suitable array reference. So copy everything up to it,
6444 and replace the index. */
6446 pref = TREE_OPERAND (addr, 0);
6447 ret = copy_node (pref);
6448 pos = ret;
6450 while (pref != ref)
6452 pref = TREE_OPERAND (pref, 0);
6453 TREE_OPERAND (pos, 0) = copy_node (pref);
6454 pos = TREE_OPERAND (pos, 0);
6457 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6458 fold_convert (itype,
6459 TREE_OPERAND (pos, 1)),
6460 fold_convert (itype, delta));
6462 return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6466 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6467 means A >= Y && A != MAX, but in this case we know that
6468 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6470 static tree
6471 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6473 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6475 if (TREE_CODE (bound) == LT_EXPR)
6476 a = TREE_OPERAND (bound, 0);
6477 else if (TREE_CODE (bound) == GT_EXPR)
6478 a = TREE_OPERAND (bound, 1);
6479 else
6480 return NULL_TREE;
6482 typea = TREE_TYPE (a);
6483 if (!INTEGRAL_TYPE_P (typea)
6484 && !POINTER_TYPE_P (typea))
6485 return NULL_TREE;
6487 if (TREE_CODE (ineq) == LT_EXPR)
6489 a1 = TREE_OPERAND (ineq, 1);
6490 y = TREE_OPERAND (ineq, 0);
6492 else if (TREE_CODE (ineq) == GT_EXPR)
6494 a1 = TREE_OPERAND (ineq, 0);
6495 y = TREE_OPERAND (ineq, 1);
6497 else
6498 return NULL_TREE;
6500 if (TREE_TYPE (a1) != typea)
6501 return NULL_TREE;
6503 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6504 if (!integer_onep (diff))
6505 return NULL_TREE;
6507 return fold_build2 (GE_EXPR, type, a, y);
6510 /* Fold a unary expression of code CODE and type TYPE with operand
6511 OP0. Return the folded expression if folding is successful.
6512 Otherwise, return NULL_TREE. */
6514 tree
6515 fold_unary (enum tree_code code, tree type, tree op0)
6517 tree tem;
6518 tree arg0;
6519 enum tree_code_class kind = TREE_CODE_CLASS (code);
6521 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6522 && TREE_CODE_LENGTH (code) == 1);
6524 arg0 = op0;
6525 if (arg0)
6527 if (code == NOP_EXPR || code == CONVERT_EXPR
6528 || code == FLOAT_EXPR || code == ABS_EXPR)
6530 /* Don't use STRIP_NOPS, because signedness of argument type
6531 matters. */
6532 STRIP_SIGN_NOPS (arg0);
6534 else
6536 /* Strip any conversions that don't change the mode. This
6537 is safe for every expression, except for a comparison
6538 expression because its signedness is derived from its
6539 operands.
6541 Note that this is done as an internal manipulation within
6542 the constant folder, in order to find the simplest
6543 representation of the arguments so that their form can be
6544 studied. In any cases, the appropriate type conversions
6545 should be put back in the tree that will get out of the
6546 constant folder. */
6547 STRIP_NOPS (arg0);
6551 if (TREE_CODE_CLASS (code) == tcc_unary)
6553 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6554 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6555 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6556 else if (TREE_CODE (arg0) == COND_EXPR)
6558 tree arg01 = TREE_OPERAND (arg0, 1);
6559 tree arg02 = TREE_OPERAND (arg0, 2);
6560 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6561 arg01 = fold_build1 (code, type, arg01);
6562 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6563 arg02 = fold_build1 (code, type, arg02);
6564 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6565 arg01, arg02);
6567 /* If this was a conversion, and all we did was to move into
6568 inside the COND_EXPR, bring it back out. But leave it if
6569 it is a conversion from integer to integer and the
6570 result precision is no wider than a word since such a
6571 conversion is cheap and may be optimized away by combine,
6572 while it couldn't if it were outside the COND_EXPR. Then return
6573 so we don't get into an infinite recursion loop taking the
6574 conversion out and then back in. */
6576 if ((code == NOP_EXPR || code == CONVERT_EXPR
6577 || code == NON_LVALUE_EXPR)
6578 && TREE_CODE (tem) == COND_EXPR
6579 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6580 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6581 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6582 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6583 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6584 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6585 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6586 && (INTEGRAL_TYPE_P
6587 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6588 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6589 || flag_syntax_only))
6590 tem = build1 (code, type,
6591 build3 (COND_EXPR,
6592 TREE_TYPE (TREE_OPERAND
6593 (TREE_OPERAND (tem, 1), 0)),
6594 TREE_OPERAND (tem, 0),
6595 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6596 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6597 return tem;
6599 else if (COMPARISON_CLASS_P (arg0))
6601 if (TREE_CODE (type) == BOOLEAN_TYPE)
6603 arg0 = copy_node (arg0);
6604 TREE_TYPE (arg0) = type;
6605 return arg0;
6607 else if (TREE_CODE (type) != INTEGER_TYPE)
6608 return fold_build3 (COND_EXPR, type, arg0,
6609 fold_build1 (code, type,
6610 integer_one_node),
6611 fold_build1 (code, type,
6612 integer_zero_node));
6616 switch (code)
6618 case NOP_EXPR:
6619 case FLOAT_EXPR:
6620 case CONVERT_EXPR:
6621 case FIX_TRUNC_EXPR:
6622 case FIX_CEIL_EXPR:
6623 case FIX_FLOOR_EXPR:
6624 case FIX_ROUND_EXPR:
6625 if (TREE_TYPE (op0) == type)
6626 return op0;
6628 /* Handle cases of two conversions in a row. */
6629 if (TREE_CODE (op0) == NOP_EXPR
6630 || TREE_CODE (op0) == CONVERT_EXPR)
6632 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6633 tree inter_type = TREE_TYPE (op0);
6634 int inside_int = INTEGRAL_TYPE_P (inside_type);
6635 int inside_ptr = POINTER_TYPE_P (inside_type);
6636 int inside_float = FLOAT_TYPE_P (inside_type);
6637 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6638 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6639 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6640 int inter_int = INTEGRAL_TYPE_P (inter_type);
6641 int inter_ptr = POINTER_TYPE_P (inter_type);
6642 int inter_float = FLOAT_TYPE_P (inter_type);
6643 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6644 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6645 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6646 int final_int = INTEGRAL_TYPE_P (type);
6647 int final_ptr = POINTER_TYPE_P (type);
6648 int final_float = FLOAT_TYPE_P (type);
6649 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6650 unsigned int final_prec = TYPE_PRECISION (type);
6651 int final_unsignedp = TYPE_UNSIGNED (type);
6653 /* In addition to the cases of two conversions in a row
6654 handled below, if we are converting something to its own
6655 type via an object of identical or wider precision, neither
6656 conversion is needed. */
6657 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6658 && ((inter_int && final_int) || (inter_float && final_float))
6659 && inter_prec >= final_prec)
6660 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6662 /* Likewise, if the intermediate and final types are either both
6663 float or both integer, we don't need the middle conversion if
6664 it is wider than the final type and doesn't change the signedness
6665 (for integers). Avoid this if the final type is a pointer
6666 since then we sometimes need the inner conversion. Likewise if
6667 the outer has a precision not equal to the size of its mode. */
6668 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6669 || (inter_float && inside_float)
6670 || (inter_vec && inside_vec))
6671 && inter_prec >= inside_prec
6672 && (inter_float || inter_vec
6673 || inter_unsignedp == inside_unsignedp)
6674 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6675 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6676 && ! final_ptr
6677 && (! final_vec || inter_prec == inside_prec))
6678 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6680 /* If we have a sign-extension of a zero-extended value, we can
6681 replace that by a single zero-extension. */
6682 if (inside_int && inter_int && final_int
6683 && inside_prec < inter_prec && inter_prec < final_prec
6684 && inside_unsignedp && !inter_unsignedp)
6685 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6687 /* Two conversions in a row are not needed unless:
6688 - some conversion is floating-point (overstrict for now), or
6689 - some conversion is a vector (overstrict for now), or
6690 - the intermediate type is narrower than both initial and
6691 final, or
6692 - the intermediate type and innermost type differ in signedness,
6693 and the outermost type is wider than the intermediate, or
6694 - the initial type is a pointer type and the precisions of the
6695 intermediate and final types differ, or
6696 - the final type is a pointer type and the precisions of the
6697 initial and intermediate types differ. */
6698 if (! inside_float && ! inter_float && ! final_float
6699 && ! inside_vec && ! inter_vec && ! final_vec
6700 && (inter_prec > inside_prec || inter_prec > final_prec)
6701 && ! (inside_int && inter_int
6702 && inter_unsignedp != inside_unsignedp
6703 && inter_prec < final_prec)
6704 && ((inter_unsignedp && inter_prec > inside_prec)
6705 == (final_unsignedp && final_prec > inter_prec))
6706 && ! (inside_ptr && inter_prec != final_prec)
6707 && ! (final_ptr && inside_prec != inter_prec)
6708 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6709 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6710 && ! final_ptr)
6711 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6714 /* Handle (T *)&A.B.C for A being of type T and B and C
6715 living at offset zero. This occurs frequently in
6716 C++ upcasting and then accessing the base. */
6717 if (TREE_CODE (op0) == ADDR_EXPR
6718 && POINTER_TYPE_P (type)
6719 && handled_component_p (TREE_OPERAND (op0, 0)))
6721 HOST_WIDE_INT bitsize, bitpos;
6722 tree offset;
6723 enum machine_mode mode;
6724 int unsignedp, volatilep;
6725 tree base = TREE_OPERAND (op0, 0);
6726 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6727 &mode, &unsignedp, &volatilep, false);
6728 /* If the reference was to a (constant) zero offset, we can use
6729 the address of the base if it has the same base type
6730 as the result type. */
6731 if (! offset && bitpos == 0
6732 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
6733 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
6734 return fold_convert (type, build_fold_addr_expr (base));
6737 if (TREE_CODE (op0) == MODIFY_EXPR
6738 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6739 /* Detect assigning a bitfield. */
6740 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6741 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6743 /* Don't leave an assignment inside a conversion
6744 unless assigning a bitfield. */
6745 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6746 /* First do the assignment, then return converted constant. */
6747 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6748 TREE_NO_WARNING (tem) = 1;
6749 TREE_USED (tem) = 1;
6750 return tem;
6753 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6754 constants (if x has signed type, the sign bit cannot be set
6755 in c). This folds extension into the BIT_AND_EXPR. */
6756 if (INTEGRAL_TYPE_P (type)
6757 && TREE_CODE (type) != BOOLEAN_TYPE
6758 && TREE_CODE (op0) == BIT_AND_EXPR
6759 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6761 tree and = op0;
6762 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6763 int change = 0;
6765 if (TYPE_UNSIGNED (TREE_TYPE (and))
6766 || (TYPE_PRECISION (type)
6767 <= TYPE_PRECISION (TREE_TYPE (and))))
6768 change = 1;
6769 else if (TYPE_PRECISION (TREE_TYPE (and1))
6770 <= HOST_BITS_PER_WIDE_INT
6771 && host_integerp (and1, 1))
6773 unsigned HOST_WIDE_INT cst;
6775 cst = tree_low_cst (and1, 1);
6776 cst &= (HOST_WIDE_INT) -1
6777 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6778 change = (cst == 0);
6779 #ifdef LOAD_EXTEND_OP
6780 if (change
6781 && !flag_syntax_only
6782 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6783 == ZERO_EXTEND))
6785 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6786 and0 = fold_convert (uns, and0);
6787 and1 = fold_convert (uns, and1);
6789 #endif
6791 if (change)
6793 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6794 TREE_INT_CST_HIGH (and1));
6795 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6796 TREE_CONSTANT_OVERFLOW (and1));
6797 return fold_build2 (BIT_AND_EXPR, type,
6798 fold_convert (type, and0), tem);
6802 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6803 T2 being pointers to types of the same size. */
6804 if (POINTER_TYPE_P (type)
6805 && BINARY_CLASS_P (arg0)
6806 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6807 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6809 tree arg00 = TREE_OPERAND (arg0, 0);
6810 tree t0 = type;
6811 tree t1 = TREE_TYPE (arg00);
6812 tree tt0 = TREE_TYPE (t0);
6813 tree tt1 = TREE_TYPE (t1);
6814 tree s0 = TYPE_SIZE (tt0);
6815 tree s1 = TYPE_SIZE (tt1);
6817 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6818 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6819 TREE_OPERAND (arg0, 1));
6822 tem = fold_convert_const (code, type, arg0);
6823 return tem ? tem : NULL_TREE;
6825 case VIEW_CONVERT_EXPR:
6826 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6827 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6828 return NULL_TREE;
6830 case NEGATE_EXPR:
6831 if (negate_expr_p (arg0))
6832 return fold_convert (type, negate_expr (arg0));
6833 /* Convert - (~A) to A + 1. */
6834 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6835 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6836 build_int_cst (type, 1));
6837 return NULL_TREE;
6839 case ABS_EXPR:
6840 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6841 return fold_abs_const (arg0, type);
6842 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6843 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
6844 /* Convert fabs((double)float) into (double)fabsf(float). */
6845 else if (TREE_CODE (arg0) == NOP_EXPR
6846 && TREE_CODE (type) == REAL_TYPE)
6848 tree targ0 = strip_float_extensions (arg0);
6849 if (targ0 != arg0)
6850 return fold_convert (type, fold_build1 (ABS_EXPR,
6851 TREE_TYPE (targ0),
6852 targ0));
6854 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
6855 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
6856 return arg0;
6858 /* Strip sign ops from argument. */
6859 if (TREE_CODE (type) == REAL_TYPE)
6861 tem = fold_strip_sign_ops (arg0);
6862 if (tem)
6863 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
6865 return NULL_TREE;
6867 case CONJ_EXPR:
6868 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6869 return fold_convert (type, arg0);
6870 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6871 return build2 (COMPLEX_EXPR, type,
6872 TREE_OPERAND (arg0, 0),
6873 negate_expr (TREE_OPERAND (arg0, 1)));
6874 else if (TREE_CODE (arg0) == COMPLEX_CST)
6875 return build_complex (type, TREE_REALPART (arg0),
6876 negate_expr (TREE_IMAGPART (arg0)));
6877 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6878 return fold_build2 (TREE_CODE (arg0), type,
6879 fold_build1 (CONJ_EXPR, type,
6880 TREE_OPERAND (arg0, 0)),
6881 fold_build1 (CONJ_EXPR, type,
6882 TREE_OPERAND (arg0, 1)));
6883 else if (TREE_CODE (arg0) == CONJ_EXPR)
6884 return TREE_OPERAND (arg0, 0);
6885 return NULL_TREE;
6887 case BIT_NOT_EXPR:
6888 if (TREE_CODE (arg0) == INTEGER_CST)
6889 return fold_not_const (arg0, type);
6890 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6891 return TREE_OPERAND (arg0, 0);
6892 /* Convert ~ (-A) to A - 1. */
6893 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
6894 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
6895 build_int_cst (type, 1));
6896 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
6897 else if (INTEGRAL_TYPE_P (type)
6898 && ((TREE_CODE (arg0) == MINUS_EXPR
6899 && integer_onep (TREE_OPERAND (arg0, 1)))
6900 || (TREE_CODE (arg0) == PLUS_EXPR
6901 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
6902 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
6903 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
6904 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6905 && (tem = fold_unary (BIT_NOT_EXPR, type,
6906 fold_convert (type,
6907 TREE_OPERAND (arg0, 0)))))
6908 return fold_build2 (BIT_XOR_EXPR, type, tem,
6909 fold_convert (type, TREE_OPERAND (arg0, 1)));
6910 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6911 && (tem = fold_unary (BIT_NOT_EXPR, type,
6912 fold_convert (type,
6913 TREE_OPERAND (arg0, 1)))))
6914 return fold_build2 (BIT_XOR_EXPR, type,
6915 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
6917 return NULL_TREE;
6919 case TRUTH_NOT_EXPR:
6920 /* The argument to invert_truthvalue must have Boolean type. */
6921 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
6922 arg0 = fold_convert (boolean_type_node, arg0);
6924 /* Note that the operand of this must be an int
6925 and its values must be 0 or 1.
6926 ("true" is a fixed value perhaps depending on the language,
6927 but we don't handle values other than 1 correctly yet.) */
6928 tem = invert_truthvalue (arg0);
6929 /* Avoid infinite recursion. */
6930 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6931 return NULL_TREE;
6932 return fold_convert (type, tem);
6934 case REALPART_EXPR:
6935 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6936 return NULL_TREE;
6937 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6938 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6939 TREE_OPERAND (arg0, 1));
6940 else if (TREE_CODE (arg0) == COMPLEX_CST)
6941 return TREE_REALPART (arg0);
6942 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6943 return fold_build2 (TREE_CODE (arg0), type,
6944 fold_build1 (REALPART_EXPR, type,
6945 TREE_OPERAND (arg0, 0)),
6946 fold_build1 (REALPART_EXPR, type,
6947 TREE_OPERAND (arg0, 1)));
6948 return NULL_TREE;
6950 case IMAGPART_EXPR:
6951 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6952 return fold_convert (type, integer_zero_node);
6953 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6954 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
6955 TREE_OPERAND (arg0, 0));
6956 else if (TREE_CODE (arg0) == COMPLEX_CST)
6957 return TREE_IMAGPART (arg0);
6958 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6959 return fold_build2 (TREE_CODE (arg0), type,
6960 fold_build1 (IMAGPART_EXPR, type,
6961 TREE_OPERAND (arg0, 0)),
6962 fold_build1 (IMAGPART_EXPR, type,
6963 TREE_OPERAND (arg0, 1)));
6964 return NULL_TREE;
6966 default:
6967 return NULL_TREE;
6968 } /* switch (code) */
6971 /* Fold a binary expression of code CODE and type TYPE with operands
6972 OP0 and OP1. Return the folded expression if folding is
6973 successful. Otherwise, return NULL_TREE. */
6975 tree
6976 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
6978 tree t1 = NULL_TREE;
6979 tree tem;
6980 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
6981 enum tree_code_class kind = TREE_CODE_CLASS (code);
6983 /* WINS will be nonzero when the switch is done
6984 if all operands are constant. */
6985 int wins = 1;
6987 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6988 && TREE_CODE_LENGTH (code) == 2);
6990 arg0 = op0;
6991 arg1 = op1;
6993 if (arg0)
6995 tree subop;
6997 /* Strip any conversions that don't change the mode. This is
6998 safe for every expression, except for a comparison expression
6999 because its signedness is derived from its operands. So, in
7000 the latter case, only strip conversions that don't change the
7001 signedness.
7003 Note that this is done as an internal manipulation within the
7004 constant folder, in order to find the simplest representation
7005 of the arguments so that their form can be studied. In any
7006 cases, the appropriate type conversions should be put back in
7007 the tree that will get out of the constant folder. */
7008 if (kind == tcc_comparison)
7009 STRIP_SIGN_NOPS (arg0);
7010 else
7011 STRIP_NOPS (arg0);
7013 if (TREE_CODE (arg0) == COMPLEX_CST)
7014 subop = TREE_REALPART (arg0);
7015 else
7016 subop = arg0;
7018 if (TREE_CODE (subop) != INTEGER_CST
7019 && TREE_CODE (subop) != REAL_CST)
7020 /* Note that TREE_CONSTANT isn't enough:
7021 static var addresses are constant but we can't
7022 do arithmetic on them. */
7023 wins = 0;
7026 if (arg1)
7028 tree subop;
7030 /* Strip any conversions that don't change the mode. This is
7031 safe for every expression, except for a comparison expression
7032 because its signedness is derived from its operands. So, in
7033 the latter case, only strip conversions that don't change the
7034 signedness.
7036 Note that this is done as an internal manipulation within the
7037 constant folder, in order to find the simplest representation
7038 of the arguments so that their form can be studied. In any
7039 cases, the appropriate type conversions should be put back in
7040 the tree that will get out of the constant folder. */
7041 if (kind == tcc_comparison)
7042 STRIP_SIGN_NOPS (arg1);
7043 else
7044 STRIP_NOPS (arg1);
7046 if (TREE_CODE (arg1) == COMPLEX_CST)
7047 subop = TREE_REALPART (arg1);
7048 else
7049 subop = arg1;
7051 if (TREE_CODE (subop) != INTEGER_CST
7052 && TREE_CODE (subop) != REAL_CST)
7053 /* Note that TREE_CONSTANT isn't enough:
7054 static var addresses are constant but we can't
7055 do arithmetic on them. */
7056 wins = 0;
7059 /* If this is a commutative operation, and ARG0 is a constant, move it
7060 to ARG1 to reduce the number of tests below. */
7061 if (commutative_tree_code (code)
7062 && tree_swap_operands_p (arg0, arg1, true))
7063 return fold_build2 (code, type, op1, op0);
7065 /* Now WINS is set as described above,
7066 ARG0 is the first operand of EXPR,
7067 and ARG1 is the second operand (if it has more than one operand).
7069 First check for cases where an arithmetic operation is applied to a
7070 compound, conditional, or comparison operation. Push the arithmetic
7071 operation inside the compound or conditional to see if any folding
7072 can then be done. Convert comparison to conditional for this purpose.
7073 The also optimizes non-constant cases that used to be done in
7074 expand_expr.
7076 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7077 one of the operands is a comparison and the other is a comparison, a
7078 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7079 code below would make the expression more complex. Change it to a
7080 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7081 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7083 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7084 || code == EQ_EXPR || code == NE_EXPR)
7085 && ((truth_value_p (TREE_CODE (arg0))
7086 && (truth_value_p (TREE_CODE (arg1))
7087 || (TREE_CODE (arg1) == BIT_AND_EXPR
7088 && integer_onep (TREE_OPERAND (arg1, 1)))))
7089 || (truth_value_p (TREE_CODE (arg1))
7090 && (truth_value_p (TREE_CODE (arg0))
7091 || (TREE_CODE (arg0) == BIT_AND_EXPR
7092 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7094 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7095 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7096 : TRUTH_XOR_EXPR,
7097 boolean_type_node,
7098 fold_convert (boolean_type_node, arg0),
7099 fold_convert (boolean_type_node, arg1));
7101 if (code == EQ_EXPR)
7102 tem = invert_truthvalue (tem);
7104 return fold_convert (type, tem);
7107 if (TREE_CODE_CLASS (code) == tcc_comparison
7108 && TREE_CODE (arg0) == COMPOUND_EXPR)
7109 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7110 fold_build2 (code, type, TREE_OPERAND (arg0, 1), arg1));
7111 else if (TREE_CODE_CLASS (code) == tcc_comparison
7112 && TREE_CODE (arg1) == COMPOUND_EXPR)
7113 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7114 fold_build2 (code, type, arg0, TREE_OPERAND (arg1, 1)));
7115 else if (TREE_CODE_CLASS (code) == tcc_binary
7116 || TREE_CODE_CLASS (code) == tcc_comparison)
7118 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7119 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7120 fold_build2 (code, type, TREE_OPERAND (arg0, 1),
7121 arg1));
7122 if (TREE_CODE (arg1) == COMPOUND_EXPR
7123 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7124 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7125 fold_build2 (code, type,
7126 arg0, TREE_OPERAND (arg1, 1)));
7128 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7130 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7131 arg0, arg1,
7132 /*cond_first_p=*/1);
7133 if (tem != NULL_TREE)
7134 return tem;
7137 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7139 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7140 arg1, arg0,
7141 /*cond_first_p=*/0);
7142 if (tem != NULL_TREE)
7143 return tem;
7147 switch (code)
7149 case PLUS_EXPR:
7150 /* A + (-B) -> A - B */
7151 if (TREE_CODE (arg1) == NEGATE_EXPR)
7152 return fold_build2 (MINUS_EXPR, type,
7153 fold_convert (type, arg0),
7154 fold_convert (type, TREE_OPERAND (arg1, 0)));
7155 /* (-A) + B -> B - A */
7156 if (TREE_CODE (arg0) == NEGATE_EXPR
7157 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7158 return fold_build2 (MINUS_EXPR, type,
7159 fold_convert (type, arg1),
7160 fold_convert (type, TREE_OPERAND (arg0, 0)));
7161 /* Convert ~A + 1 to -A. */
7162 if (INTEGRAL_TYPE_P (type)
7163 && TREE_CODE (arg0) == BIT_NOT_EXPR
7164 && integer_onep (arg1))
7165 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7167 if (! FLOAT_TYPE_P (type))
7169 if (integer_zerop (arg1))
7170 return non_lvalue (fold_convert (type, arg0));
7172 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7173 with a constant, and the two constants have no bits in common,
7174 we should treat this as a BIT_IOR_EXPR since this may produce more
7175 simplifications. */
7176 if (TREE_CODE (arg0) == BIT_AND_EXPR
7177 && TREE_CODE (arg1) == BIT_AND_EXPR
7178 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7179 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7180 && integer_zerop (const_binop (BIT_AND_EXPR,
7181 TREE_OPERAND (arg0, 1),
7182 TREE_OPERAND (arg1, 1), 0)))
7184 code = BIT_IOR_EXPR;
7185 goto bit_ior;
7188 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7189 (plus (plus (mult) (mult)) (foo)) so that we can
7190 take advantage of the factoring cases below. */
7191 if (((TREE_CODE (arg0) == PLUS_EXPR
7192 || TREE_CODE (arg0) == MINUS_EXPR)
7193 && TREE_CODE (arg1) == MULT_EXPR)
7194 || ((TREE_CODE (arg1) == PLUS_EXPR
7195 || TREE_CODE (arg1) == MINUS_EXPR)
7196 && TREE_CODE (arg0) == MULT_EXPR))
7198 tree parg0, parg1, parg, marg;
7199 enum tree_code pcode;
7201 if (TREE_CODE (arg1) == MULT_EXPR)
7202 parg = arg0, marg = arg1;
7203 else
7204 parg = arg1, marg = arg0;
7205 pcode = TREE_CODE (parg);
7206 parg0 = TREE_OPERAND (parg, 0);
7207 parg1 = TREE_OPERAND (parg, 1);
7208 STRIP_NOPS (parg0);
7209 STRIP_NOPS (parg1);
7211 if (TREE_CODE (parg0) == MULT_EXPR
7212 && TREE_CODE (parg1) != MULT_EXPR)
7213 return fold_build2 (pcode, type,
7214 fold_build2 (PLUS_EXPR, type,
7215 fold_convert (type, parg0),
7216 fold_convert (type, marg)),
7217 fold_convert (type, parg1));
7218 if (TREE_CODE (parg0) != MULT_EXPR
7219 && TREE_CODE (parg1) == MULT_EXPR)
7220 return fold_build2 (PLUS_EXPR, type,
7221 fold_convert (type, parg0),
7222 fold_build2 (pcode, type,
7223 fold_convert (type, marg),
7224 fold_convert (type,
7225 parg1)));
7228 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7230 tree arg00, arg01, arg10, arg11;
7231 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7233 /* (A * C) + (B * C) -> (A+B) * C.
7234 We are most concerned about the case where C is a constant,
7235 but other combinations show up during loop reduction. Since
7236 it is not difficult, try all four possibilities. */
7238 arg00 = TREE_OPERAND (arg0, 0);
7239 arg01 = TREE_OPERAND (arg0, 1);
7240 arg10 = TREE_OPERAND (arg1, 0);
7241 arg11 = TREE_OPERAND (arg1, 1);
7242 same = NULL_TREE;
7244 if (operand_equal_p (arg01, arg11, 0))
7245 same = arg01, alt0 = arg00, alt1 = arg10;
7246 else if (operand_equal_p (arg00, arg10, 0))
7247 same = arg00, alt0 = arg01, alt1 = arg11;
7248 else if (operand_equal_p (arg00, arg11, 0))
7249 same = arg00, alt0 = arg01, alt1 = arg10;
7250 else if (operand_equal_p (arg01, arg10, 0))
7251 same = arg01, alt0 = arg00, alt1 = arg11;
7253 /* No identical multiplicands; see if we can find a common
7254 power-of-two factor in non-power-of-two multiplies. This
7255 can help in multi-dimensional array access. */
7256 else if (TREE_CODE (arg01) == INTEGER_CST
7257 && TREE_CODE (arg11) == INTEGER_CST
7258 && TREE_INT_CST_HIGH (arg01) == 0
7259 && TREE_INT_CST_HIGH (arg11) == 0)
7261 HOST_WIDE_INT int01, int11, tmp;
7262 int01 = TREE_INT_CST_LOW (arg01);
7263 int11 = TREE_INT_CST_LOW (arg11);
7265 /* Move min of absolute values to int11. */
7266 if ((int01 >= 0 ? int01 : -int01)
7267 < (int11 >= 0 ? int11 : -int11))
7269 tmp = int01, int01 = int11, int11 = tmp;
7270 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7271 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7274 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7276 alt0 = fold_build2 (MULT_EXPR, type, arg00,
7277 build_int_cst (NULL_TREE,
7278 int01 / int11));
7279 alt1 = arg10;
7280 same = arg11;
7284 if (same)
7285 return fold_build2 (MULT_EXPR, type,
7286 fold_build2 (PLUS_EXPR, type,
7287 fold_convert (type, alt0),
7288 fold_convert (type, alt1)),
7289 fold_convert (type, same));
7292 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7293 of the array. Loop optimizer sometimes produce this type of
7294 expressions. */
7295 if (TREE_CODE (arg0) == ADDR_EXPR)
7297 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7298 if (tem)
7299 return fold_convert (type, fold (tem));
7301 else if (TREE_CODE (arg1) == ADDR_EXPR)
7303 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7304 if (tem)
7305 return fold_convert (type, fold (tem));
7308 else
7310 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7311 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7312 return non_lvalue (fold_convert (type, arg0));
7314 /* Likewise if the operands are reversed. */
7315 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7316 return non_lvalue (fold_convert (type, arg1));
7318 /* Convert X + -C into X - C. */
7319 if (TREE_CODE (arg1) == REAL_CST
7320 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7322 tem = fold_negate_const (arg1, type);
7323 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7324 return fold_build2 (MINUS_EXPR, type,
7325 fold_convert (type, arg0),
7326 fold_convert (type, tem));
7329 if (flag_unsafe_math_optimizations
7330 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7331 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7332 && (tem = distribute_real_division (code, type, arg0, arg1)))
7333 return tem;
7335 /* Convert x+x into x*2.0. */
7336 if (operand_equal_p (arg0, arg1, 0)
7337 && SCALAR_FLOAT_TYPE_P (type))
7338 return fold_build2 (MULT_EXPR, type, arg0,
7339 build_real (type, dconst2));
7341 /* Convert x*c+x into x*(c+1). */
7342 if (flag_unsafe_math_optimizations
7343 && TREE_CODE (arg0) == MULT_EXPR
7344 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7345 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7346 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7348 REAL_VALUE_TYPE c;
7350 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7351 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7352 return fold_build2 (MULT_EXPR, type, arg1,
7353 build_real (type, c));
7356 /* Convert x+x*c into x*(c+1). */
7357 if (flag_unsafe_math_optimizations
7358 && TREE_CODE (arg1) == MULT_EXPR
7359 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7360 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7361 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7363 REAL_VALUE_TYPE c;
7365 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7366 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7367 return fold_build2 (MULT_EXPR, type, arg0,
7368 build_real (type, c));
7371 /* Convert x*c1+x*c2 into x*(c1+c2). */
7372 if (flag_unsafe_math_optimizations
7373 && TREE_CODE (arg0) == MULT_EXPR
7374 && TREE_CODE (arg1) == MULT_EXPR
7375 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7376 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7377 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7378 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7379 && operand_equal_p (TREE_OPERAND (arg0, 0),
7380 TREE_OPERAND (arg1, 0), 0))
7382 REAL_VALUE_TYPE c1, c2;
7384 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7385 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7386 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7387 return fold_build2 (MULT_EXPR, type,
7388 TREE_OPERAND (arg0, 0),
7389 build_real (type, c1));
7391 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7392 if (flag_unsafe_math_optimizations
7393 && TREE_CODE (arg1) == PLUS_EXPR
7394 && TREE_CODE (arg0) != MULT_EXPR)
7396 tree tree10 = TREE_OPERAND (arg1, 0);
7397 tree tree11 = TREE_OPERAND (arg1, 1);
7398 if (TREE_CODE (tree11) == MULT_EXPR
7399 && TREE_CODE (tree10) == MULT_EXPR)
7401 tree tree0;
7402 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7403 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7406 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7407 if (flag_unsafe_math_optimizations
7408 && TREE_CODE (arg0) == PLUS_EXPR
7409 && TREE_CODE (arg1) != MULT_EXPR)
7411 tree tree00 = TREE_OPERAND (arg0, 0);
7412 tree tree01 = TREE_OPERAND (arg0, 1);
7413 if (TREE_CODE (tree01) == MULT_EXPR
7414 && TREE_CODE (tree00) == MULT_EXPR)
7416 tree tree0;
7417 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7418 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7423 bit_rotate:
7424 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7425 is a rotate of A by C1 bits. */
7426 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7427 is a rotate of A by B bits. */
7429 enum tree_code code0, code1;
7430 code0 = TREE_CODE (arg0);
7431 code1 = TREE_CODE (arg1);
7432 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7433 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7434 && operand_equal_p (TREE_OPERAND (arg0, 0),
7435 TREE_OPERAND (arg1, 0), 0)
7436 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7438 tree tree01, tree11;
7439 enum tree_code code01, code11;
7441 tree01 = TREE_OPERAND (arg0, 1);
7442 tree11 = TREE_OPERAND (arg1, 1);
7443 STRIP_NOPS (tree01);
7444 STRIP_NOPS (tree11);
7445 code01 = TREE_CODE (tree01);
7446 code11 = TREE_CODE (tree11);
7447 if (code01 == INTEGER_CST
7448 && code11 == INTEGER_CST
7449 && TREE_INT_CST_HIGH (tree01) == 0
7450 && TREE_INT_CST_HIGH (tree11) == 0
7451 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7452 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7453 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7454 code0 == LSHIFT_EXPR ? tree01 : tree11);
7455 else if (code11 == MINUS_EXPR)
7457 tree tree110, tree111;
7458 tree110 = TREE_OPERAND (tree11, 0);
7459 tree111 = TREE_OPERAND (tree11, 1);
7460 STRIP_NOPS (tree110);
7461 STRIP_NOPS (tree111);
7462 if (TREE_CODE (tree110) == INTEGER_CST
7463 && 0 == compare_tree_int (tree110,
7464 TYPE_PRECISION
7465 (TREE_TYPE (TREE_OPERAND
7466 (arg0, 0))))
7467 && operand_equal_p (tree01, tree111, 0))
7468 return build2 ((code0 == LSHIFT_EXPR
7469 ? LROTATE_EXPR
7470 : RROTATE_EXPR),
7471 type, TREE_OPERAND (arg0, 0), tree01);
7473 else if (code01 == MINUS_EXPR)
7475 tree tree010, tree011;
7476 tree010 = TREE_OPERAND (tree01, 0);
7477 tree011 = TREE_OPERAND (tree01, 1);
7478 STRIP_NOPS (tree010);
7479 STRIP_NOPS (tree011);
7480 if (TREE_CODE (tree010) == INTEGER_CST
7481 && 0 == compare_tree_int (tree010,
7482 TYPE_PRECISION
7483 (TREE_TYPE (TREE_OPERAND
7484 (arg0, 0))))
7485 && operand_equal_p (tree11, tree011, 0))
7486 return build2 ((code0 != LSHIFT_EXPR
7487 ? LROTATE_EXPR
7488 : RROTATE_EXPR),
7489 type, TREE_OPERAND (arg0, 0), tree11);
7494 associate:
7495 /* In most languages, can't associate operations on floats through
7496 parentheses. Rather than remember where the parentheses were, we
7497 don't associate floats at all, unless the user has specified
7498 -funsafe-math-optimizations. */
7500 if (! wins
7501 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7503 tree var0, con0, lit0, minus_lit0;
7504 tree var1, con1, lit1, minus_lit1;
7506 /* Split both trees into variables, constants, and literals. Then
7507 associate each group together, the constants with literals,
7508 then the result with variables. This increases the chances of
7509 literals being recombined later and of generating relocatable
7510 expressions for the sum of a constant and literal. */
7511 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7512 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7513 code == MINUS_EXPR);
7515 /* Only do something if we found more than two objects. Otherwise,
7516 nothing has changed and we risk infinite recursion. */
7517 if (2 < ((var0 != 0) + (var1 != 0)
7518 + (con0 != 0) + (con1 != 0)
7519 + (lit0 != 0) + (lit1 != 0)
7520 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7522 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7523 if (code == MINUS_EXPR)
7524 code = PLUS_EXPR;
7526 var0 = associate_trees (var0, var1, code, type);
7527 con0 = associate_trees (con0, con1, code, type);
7528 lit0 = associate_trees (lit0, lit1, code, type);
7529 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7531 /* Preserve the MINUS_EXPR if the negative part of the literal is
7532 greater than the positive part. Otherwise, the multiplicative
7533 folding code (i.e extract_muldiv) may be fooled in case
7534 unsigned constants are subtracted, like in the following
7535 example: ((X*2 + 4) - 8U)/2. */
7536 if (minus_lit0 && lit0)
7538 if (TREE_CODE (lit0) == INTEGER_CST
7539 && TREE_CODE (minus_lit0) == INTEGER_CST
7540 && tree_int_cst_lt (lit0, minus_lit0))
7542 minus_lit0 = associate_trees (minus_lit0, lit0,
7543 MINUS_EXPR, type);
7544 lit0 = 0;
7546 else
7548 lit0 = associate_trees (lit0, minus_lit0,
7549 MINUS_EXPR, type);
7550 minus_lit0 = 0;
7553 if (minus_lit0)
7555 if (con0 == 0)
7556 return fold_convert (type,
7557 associate_trees (var0, minus_lit0,
7558 MINUS_EXPR, type));
7559 else
7561 con0 = associate_trees (con0, minus_lit0,
7562 MINUS_EXPR, type);
7563 return fold_convert (type,
7564 associate_trees (var0, con0,
7565 PLUS_EXPR, type));
7569 con0 = associate_trees (con0, lit0, code, type);
7570 return fold_convert (type, associate_trees (var0, con0,
7571 code, type));
7575 binary:
7576 if (wins)
7577 t1 = const_binop (code, arg0, arg1, 0);
7578 if (t1 != NULL_TREE)
7580 /* The return value should always have
7581 the same type as the original expression. */
7582 if (TREE_TYPE (t1) != type)
7583 t1 = fold_convert (type, t1);
7585 return t1;
7587 return NULL_TREE;
7589 case MINUS_EXPR:
7590 /* A - (-B) -> A + B */
7591 if (TREE_CODE (arg1) == NEGATE_EXPR)
7592 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7593 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7594 if (TREE_CODE (arg0) == NEGATE_EXPR
7595 && (FLOAT_TYPE_P (type)
7596 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7597 && negate_expr_p (arg1)
7598 && reorder_operands_p (arg0, arg1))
7599 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7600 TREE_OPERAND (arg0, 0));
7601 /* Convert -A - 1 to ~A. */
7602 if (INTEGRAL_TYPE_P (type)
7603 && TREE_CODE (arg0) == NEGATE_EXPR
7604 && integer_onep (arg1))
7605 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7607 /* Convert -1 - A to ~A. */
7608 if (INTEGRAL_TYPE_P (type)
7609 && integer_all_onesp (arg0))
7610 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7612 if (! FLOAT_TYPE_P (type))
7614 if (! wins && integer_zerop (arg0))
7615 return negate_expr (fold_convert (type, arg1));
7616 if (integer_zerop (arg1))
7617 return non_lvalue (fold_convert (type, arg0));
7619 /* Fold A - (A & B) into ~B & A. */
7620 if (!TREE_SIDE_EFFECTS (arg0)
7621 && TREE_CODE (arg1) == BIT_AND_EXPR)
7623 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7624 return fold_build2 (BIT_AND_EXPR, type,
7625 fold_build1 (BIT_NOT_EXPR, type,
7626 TREE_OPERAND (arg1, 0)),
7627 arg0);
7628 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7629 return fold_build2 (BIT_AND_EXPR, type,
7630 fold_build1 (BIT_NOT_EXPR, type,
7631 TREE_OPERAND (arg1, 1)),
7632 arg0);
7635 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7636 any power of 2 minus 1. */
7637 if (TREE_CODE (arg0) == BIT_AND_EXPR
7638 && TREE_CODE (arg1) == BIT_AND_EXPR
7639 && operand_equal_p (TREE_OPERAND (arg0, 0),
7640 TREE_OPERAND (arg1, 0), 0))
7642 tree mask0 = TREE_OPERAND (arg0, 1);
7643 tree mask1 = TREE_OPERAND (arg1, 1);
7644 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7646 if (operand_equal_p (tem, mask1, 0))
7648 tem = fold_build2 (BIT_XOR_EXPR, type,
7649 TREE_OPERAND (arg0, 0), mask1);
7650 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7655 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7656 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7657 return non_lvalue (fold_convert (type, arg0));
7659 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7660 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7661 (-ARG1 + ARG0) reduces to -ARG1. */
7662 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7663 return negate_expr (fold_convert (type, arg1));
7665 /* Fold &x - &x. This can happen from &x.foo - &x.
7666 This is unsafe for certain floats even in non-IEEE formats.
7667 In IEEE, it is unsafe because it does wrong for NaNs.
7668 Also note that operand_equal_p is always false if an operand
7669 is volatile. */
7671 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7672 && operand_equal_p (arg0, arg1, 0))
7673 return fold_convert (type, integer_zero_node);
7675 /* A - B -> A + (-B) if B is easily negatable. */
7676 if (!wins && negate_expr_p (arg1)
7677 && ((FLOAT_TYPE_P (type)
7678 /* Avoid this transformation if B is a positive REAL_CST. */
7679 && (TREE_CODE (arg1) != REAL_CST
7680 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7681 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7682 return fold_build2 (PLUS_EXPR, type,
7683 fold_convert (type, arg0),
7684 fold_convert (type, negate_expr (arg1)));
7686 /* Try folding difference of addresses. */
7688 HOST_WIDE_INT diff;
7690 if ((TREE_CODE (arg0) == ADDR_EXPR
7691 || TREE_CODE (arg1) == ADDR_EXPR)
7692 && ptr_difference_const (arg0, arg1, &diff))
7693 return build_int_cst_type (type, diff);
7696 /* Fold &a[i] - &a[j] to i-j. */
7697 if (TREE_CODE (arg0) == ADDR_EXPR
7698 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7699 && TREE_CODE (arg1) == ADDR_EXPR
7700 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7702 tree aref0 = TREE_OPERAND (arg0, 0);
7703 tree aref1 = TREE_OPERAND (arg1, 0);
7704 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7705 TREE_OPERAND (aref1, 0), 0))
7707 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7708 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7709 tree esz = array_ref_element_size (aref0);
7710 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7711 return fold_build2 (MULT_EXPR, type, diff,
7712 fold_convert (type, esz));
7717 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7718 of the array. Loop optimizer sometimes produce this type of
7719 expressions. */
7720 if (TREE_CODE (arg0) == ADDR_EXPR)
7722 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7723 if (tem)
7724 return fold_convert (type, fold (tem));
7727 if (flag_unsafe_math_optimizations
7728 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7729 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7730 && (tem = distribute_real_division (code, type, arg0, arg1)))
7731 return tem;
7733 if (TREE_CODE (arg0) == MULT_EXPR
7734 && TREE_CODE (arg1) == MULT_EXPR
7735 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7737 /* (A * C) - (B * C) -> (A-B) * C. */
7738 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7739 TREE_OPERAND (arg1, 1), 0))
7740 return fold_build2 (MULT_EXPR, type,
7741 fold_build2 (MINUS_EXPR, type,
7742 TREE_OPERAND (arg0, 0),
7743 TREE_OPERAND (arg1, 0)),
7744 TREE_OPERAND (arg0, 1));
7745 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7746 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7747 TREE_OPERAND (arg1, 0), 0))
7748 return fold_build2 (MULT_EXPR, type,
7749 TREE_OPERAND (arg0, 0),
7750 fold_build2 (MINUS_EXPR, type,
7751 TREE_OPERAND (arg0, 1),
7752 TREE_OPERAND (arg1, 1)));
7755 goto associate;
7757 case MULT_EXPR:
7758 /* (-A) * (-B) -> A * B */
7759 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7760 return fold_build2 (MULT_EXPR, type,
7761 TREE_OPERAND (arg0, 0),
7762 negate_expr (arg1));
7763 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7764 return fold_build2 (MULT_EXPR, type,
7765 negate_expr (arg0),
7766 TREE_OPERAND (arg1, 0));
7768 if (! FLOAT_TYPE_P (type))
7770 if (integer_zerop (arg1))
7771 return omit_one_operand (type, arg1, arg0);
7772 if (integer_onep (arg1))
7773 return non_lvalue (fold_convert (type, arg0));
7774 /* Transform x * -1 into -x. */
7775 if (integer_all_onesp (arg1))
7776 return fold_convert (type, negate_expr (arg0));
7778 /* (a * (1 << b)) is (a << b) */
7779 if (TREE_CODE (arg1) == LSHIFT_EXPR
7780 && integer_onep (TREE_OPERAND (arg1, 0)))
7781 return fold_build2 (LSHIFT_EXPR, type, arg0,
7782 TREE_OPERAND (arg1, 1));
7783 if (TREE_CODE (arg0) == LSHIFT_EXPR
7784 && integer_onep (TREE_OPERAND (arg0, 0)))
7785 return fold_build2 (LSHIFT_EXPR, type, arg1,
7786 TREE_OPERAND (arg0, 1));
7788 if (TREE_CODE (arg1) == INTEGER_CST
7789 && 0 != (tem = extract_muldiv (op0,
7790 fold_convert (type, arg1),
7791 code, NULL_TREE)))
7792 return fold_convert (type, tem);
7795 else
7797 /* Maybe fold x * 0 to 0. The expressions aren't the same
7798 when x is NaN, since x * 0 is also NaN. Nor are they the
7799 same in modes with signed zeros, since multiplying a
7800 negative value by 0 gives -0, not +0. */
7801 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7802 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7803 && real_zerop (arg1))
7804 return omit_one_operand (type, arg1, arg0);
7805 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7806 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7807 && real_onep (arg1))
7808 return non_lvalue (fold_convert (type, arg0));
7810 /* Transform x * -1.0 into -x. */
7811 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7812 && real_minus_onep (arg1))
7813 return fold_convert (type, negate_expr (arg0));
7815 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7816 if (flag_unsafe_math_optimizations
7817 && TREE_CODE (arg0) == RDIV_EXPR
7818 && TREE_CODE (arg1) == REAL_CST
7819 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7821 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7822 arg1, 0);
7823 if (tem)
7824 return fold_build2 (RDIV_EXPR, type, tem,
7825 TREE_OPERAND (arg0, 1));
7828 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7829 if (operand_equal_p (arg0, arg1, 0))
7831 tree tem = fold_strip_sign_ops (arg0);
7832 if (tem != NULL_TREE)
7834 tem = fold_convert (type, tem);
7835 return fold_build2 (MULT_EXPR, type, tem, tem);
7839 if (flag_unsafe_math_optimizations)
7841 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7842 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7844 /* Optimizations of root(...)*root(...). */
7845 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7847 tree rootfn, arg, arglist;
7848 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7849 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7851 /* Optimize sqrt(x)*sqrt(x) as x. */
7852 if (BUILTIN_SQRT_P (fcode0)
7853 && operand_equal_p (arg00, arg10, 0)
7854 && ! HONOR_SNANS (TYPE_MODE (type)))
7855 return arg00;
7857 /* Optimize root(x)*root(y) as root(x*y). */
7858 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7859 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7860 arglist = build_tree_list (NULL_TREE, arg);
7861 return build_function_call_expr (rootfn, arglist);
7864 /* Optimize expN(x)*expN(y) as expN(x+y). */
7865 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7867 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7868 tree arg = fold_build2 (PLUS_EXPR, type,
7869 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7870 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7871 tree arglist = build_tree_list (NULL_TREE, arg);
7872 return build_function_call_expr (expfn, arglist);
7875 /* Optimizations of pow(...)*pow(...). */
7876 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7877 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7878 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7880 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7881 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7882 1)));
7883 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7884 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7885 1)));
7887 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7888 if (operand_equal_p (arg01, arg11, 0))
7890 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7891 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7892 tree arglist = tree_cons (NULL_TREE, arg,
7893 build_tree_list (NULL_TREE,
7894 arg01));
7895 return build_function_call_expr (powfn, arglist);
7898 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7899 if (operand_equal_p (arg00, arg10, 0))
7901 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7902 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7903 tree arglist = tree_cons (NULL_TREE, arg00,
7904 build_tree_list (NULL_TREE,
7905 arg));
7906 return build_function_call_expr (powfn, arglist);
7910 /* Optimize tan(x)*cos(x) as sin(x). */
7911 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7912 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7913 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7914 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7915 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7916 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7917 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7918 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7920 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7922 if (sinfn != NULL_TREE)
7923 return build_function_call_expr (sinfn,
7924 TREE_OPERAND (arg0, 1));
7927 /* Optimize x*pow(x,c) as pow(x,c+1). */
7928 if (fcode1 == BUILT_IN_POW
7929 || fcode1 == BUILT_IN_POWF
7930 || fcode1 == BUILT_IN_POWL)
7932 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7933 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7934 1)));
7935 if (TREE_CODE (arg11) == REAL_CST
7936 && ! TREE_CONSTANT_OVERFLOW (arg11)
7937 && operand_equal_p (arg0, arg10, 0))
7939 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7940 REAL_VALUE_TYPE c;
7941 tree arg, arglist;
7943 c = TREE_REAL_CST (arg11);
7944 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7945 arg = build_real (type, c);
7946 arglist = build_tree_list (NULL_TREE, arg);
7947 arglist = tree_cons (NULL_TREE, arg0, arglist);
7948 return build_function_call_expr (powfn, arglist);
7952 /* Optimize pow(x,c)*x as pow(x,c+1). */
7953 if (fcode0 == BUILT_IN_POW
7954 || fcode0 == BUILT_IN_POWF
7955 || fcode0 == BUILT_IN_POWL)
7957 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7958 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7959 1)));
7960 if (TREE_CODE (arg01) == REAL_CST
7961 && ! TREE_CONSTANT_OVERFLOW (arg01)
7962 && operand_equal_p (arg1, arg00, 0))
7964 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7965 REAL_VALUE_TYPE c;
7966 tree arg, arglist;
7968 c = TREE_REAL_CST (arg01);
7969 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7970 arg = build_real (type, c);
7971 arglist = build_tree_list (NULL_TREE, arg);
7972 arglist = tree_cons (NULL_TREE, arg1, arglist);
7973 return build_function_call_expr (powfn, arglist);
7977 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7978 if (! optimize_size
7979 && operand_equal_p (arg0, arg1, 0))
7981 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7983 if (powfn)
7985 tree arg = build_real (type, dconst2);
7986 tree arglist = build_tree_list (NULL_TREE, arg);
7987 arglist = tree_cons (NULL_TREE, arg0, arglist);
7988 return build_function_call_expr (powfn, arglist);
7993 goto associate;
7995 case BIT_IOR_EXPR:
7996 bit_ior:
7997 if (integer_all_onesp (arg1))
7998 return omit_one_operand (type, arg1, arg0);
7999 if (integer_zerop (arg1))
8000 return non_lvalue (fold_convert (type, arg0));
8001 if (operand_equal_p (arg0, arg1, 0))
8002 return non_lvalue (fold_convert (type, arg0));
8004 /* ~X | X is -1. */
8005 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8006 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8008 t1 = build_int_cst (type, -1);
8009 t1 = force_fit_type (t1, 0, false, false);
8010 return omit_one_operand (type, t1, arg1);
8013 /* X | ~X is -1. */
8014 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8015 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8017 t1 = build_int_cst (type, -1);
8018 t1 = force_fit_type (t1, 0, false, false);
8019 return omit_one_operand (type, t1, arg0);
8022 t1 = distribute_bit_expr (code, type, arg0, arg1);
8023 if (t1 != NULL_TREE)
8024 return t1;
8026 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8028 This results in more efficient code for machines without a NAND
8029 instruction. Combine will canonicalize to the first form
8030 which will allow use of NAND instructions provided by the
8031 backend if they exist. */
8032 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8033 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8035 return fold_build1 (BIT_NOT_EXPR, type,
8036 build2 (BIT_AND_EXPR, type,
8037 TREE_OPERAND (arg0, 0),
8038 TREE_OPERAND (arg1, 0)));
8041 /* See if this can be simplified into a rotate first. If that
8042 is unsuccessful continue in the association code. */
8043 goto bit_rotate;
8045 case BIT_XOR_EXPR:
8046 if (integer_zerop (arg1))
8047 return non_lvalue (fold_convert (type, arg0));
8048 if (integer_all_onesp (arg1))
8049 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8050 if (operand_equal_p (arg0, arg1, 0))
8051 return omit_one_operand (type, integer_zero_node, arg0);
8053 /* ~X ^ X is -1. */
8054 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8055 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8057 t1 = build_int_cst (type, -1);
8058 t1 = force_fit_type (t1, 0, false, false);
8059 return omit_one_operand (type, t1, arg1);
8062 /* X ^ ~X is -1. */
8063 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8064 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8066 t1 = build_int_cst (type, -1);
8067 t1 = force_fit_type (t1, 0, false, false);
8068 return omit_one_operand (type, t1, arg0);
8071 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8072 with a constant, and the two constants have no bits in common,
8073 we should treat this as a BIT_IOR_EXPR since this may produce more
8074 simplifications. */
8075 if (TREE_CODE (arg0) == BIT_AND_EXPR
8076 && TREE_CODE (arg1) == BIT_AND_EXPR
8077 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8078 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8079 && integer_zerop (const_binop (BIT_AND_EXPR,
8080 TREE_OPERAND (arg0, 1),
8081 TREE_OPERAND (arg1, 1), 0)))
8083 code = BIT_IOR_EXPR;
8084 goto bit_ior;
8087 /* (X | Y) ^ X -> Y & ~ X*/
8088 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8089 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8091 tree t2 = TREE_OPERAND (arg0, 1);
8092 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8093 arg1);
8094 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8095 fold_convert (type, t1));
8096 return t1;
8099 /* (Y | X) ^ X -> Y & ~ X*/
8100 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8101 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8103 tree t2 = TREE_OPERAND (arg0, 0);
8104 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8105 arg1);
8106 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8107 fold_convert (type, t1));
8108 return t1;
8111 /* X ^ (X | Y) -> Y & ~ X*/
8112 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8113 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8115 tree t2 = TREE_OPERAND (arg1, 1);
8116 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8117 arg0);
8118 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8119 fold_convert (type, t1));
8120 return t1;
8123 /* X ^ (Y | X) -> Y & ~ X*/
8124 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8125 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8127 tree t2 = TREE_OPERAND (arg1, 0);
8128 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8129 arg0);
8130 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8131 fold_convert (type, t1));
8132 return t1;
8135 /* Convert ~X ^ ~Y to X ^ Y. */
8136 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8137 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8138 return fold_build2 (code, type,
8139 fold_convert (type, TREE_OPERAND (arg0, 0)),
8140 fold_convert (type, TREE_OPERAND (arg1, 0)));
8142 /* See if this can be simplified into a rotate first. If that
8143 is unsuccessful continue in the association code. */
8144 goto bit_rotate;
8146 case BIT_AND_EXPR:
8147 if (integer_all_onesp (arg1))
8148 return non_lvalue (fold_convert (type, arg0));
8149 if (integer_zerop (arg1))
8150 return omit_one_operand (type, arg1, arg0);
8151 if (operand_equal_p (arg0, arg1, 0))
8152 return non_lvalue (fold_convert (type, arg0));
8154 /* ~X & X is always zero. */
8155 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8156 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8157 return omit_one_operand (type, integer_zero_node, arg1);
8159 /* X & ~X is always zero. */
8160 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8161 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8162 return omit_one_operand (type, integer_zero_node, arg0);
8164 t1 = distribute_bit_expr (code, type, arg0, arg1);
8165 if (t1 != NULL_TREE)
8166 return t1;
8167 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8168 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8169 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8171 unsigned int prec
8172 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8174 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8175 && (~TREE_INT_CST_LOW (arg1)
8176 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8177 return fold_convert (type, TREE_OPERAND (arg0, 0));
8180 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8182 This results in more efficient code for machines without a NOR
8183 instruction. Combine will canonicalize to the first form
8184 which will allow use of NOR instructions provided by the
8185 backend if they exist. */
8186 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8187 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8189 return fold_build1 (BIT_NOT_EXPR, type,
8190 build2 (BIT_IOR_EXPR, type,
8191 TREE_OPERAND (arg0, 0),
8192 TREE_OPERAND (arg1, 0)));
8195 goto associate;
8197 case RDIV_EXPR:
8198 /* Don't touch a floating-point divide by zero unless the mode
8199 of the constant can represent infinity. */
8200 if (TREE_CODE (arg1) == REAL_CST
8201 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8202 && real_zerop (arg1))
8203 return NULL_TREE;
8205 /* (-A) / (-B) -> A / B */
8206 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8207 return fold_build2 (RDIV_EXPR, type,
8208 TREE_OPERAND (arg0, 0),
8209 negate_expr (arg1));
8210 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8211 return fold_build2 (RDIV_EXPR, type,
8212 negate_expr (arg0),
8213 TREE_OPERAND (arg1, 0));
8215 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8216 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8217 && real_onep (arg1))
8218 return non_lvalue (fold_convert (type, arg0));
8220 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8221 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8222 && real_minus_onep (arg1))
8223 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8225 /* If ARG1 is a constant, we can convert this to a multiply by the
8226 reciprocal. This does not have the same rounding properties,
8227 so only do this if -funsafe-math-optimizations. We can actually
8228 always safely do it if ARG1 is a power of two, but it's hard to
8229 tell if it is or not in a portable manner. */
8230 if (TREE_CODE (arg1) == REAL_CST)
8232 if (flag_unsafe_math_optimizations
8233 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8234 arg1, 0)))
8235 return fold_build2 (MULT_EXPR, type, arg0, tem);
8236 /* Find the reciprocal if optimizing and the result is exact. */
8237 if (optimize)
8239 REAL_VALUE_TYPE r;
8240 r = TREE_REAL_CST (arg1);
8241 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8243 tem = build_real (type, r);
8244 return fold_build2 (MULT_EXPR, type,
8245 fold_convert (type, arg0), tem);
8249 /* Convert A/B/C to A/(B*C). */
8250 if (flag_unsafe_math_optimizations
8251 && TREE_CODE (arg0) == RDIV_EXPR)
8252 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8253 fold_build2 (MULT_EXPR, type,
8254 TREE_OPERAND (arg0, 1), arg1));
8256 /* Convert A/(B/C) to (A/B)*C. */
8257 if (flag_unsafe_math_optimizations
8258 && TREE_CODE (arg1) == RDIV_EXPR)
8259 return fold_build2 (MULT_EXPR, type,
8260 fold_build2 (RDIV_EXPR, type, arg0,
8261 TREE_OPERAND (arg1, 0)),
8262 TREE_OPERAND (arg1, 1));
8264 /* Convert C1/(X*C2) into (C1/C2)/X. */
8265 if (flag_unsafe_math_optimizations
8266 && TREE_CODE (arg1) == MULT_EXPR
8267 && TREE_CODE (arg0) == REAL_CST
8268 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8270 tree tem = const_binop (RDIV_EXPR, arg0,
8271 TREE_OPERAND (arg1, 1), 0);
8272 if (tem)
8273 return fold_build2 (RDIV_EXPR, type, tem,
8274 TREE_OPERAND (arg1, 0));
8277 if (flag_unsafe_math_optimizations)
8279 enum built_in_function fcode = builtin_mathfn_code (arg1);
8280 /* Optimize x/expN(y) into x*expN(-y). */
8281 if (BUILTIN_EXPONENT_P (fcode))
8283 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8284 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8285 tree arglist = build_tree_list (NULL_TREE,
8286 fold_convert (type, arg));
8287 arg1 = build_function_call_expr (expfn, arglist);
8288 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8291 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8292 if (fcode == BUILT_IN_POW
8293 || fcode == BUILT_IN_POWF
8294 || fcode == BUILT_IN_POWL)
8296 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8297 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8298 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8299 tree neg11 = fold_convert (type, negate_expr (arg11));
8300 tree arglist = tree_cons(NULL_TREE, arg10,
8301 build_tree_list (NULL_TREE, neg11));
8302 arg1 = build_function_call_expr (powfn, arglist);
8303 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8307 if (flag_unsafe_math_optimizations)
8309 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8310 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8312 /* Optimize sin(x)/cos(x) as tan(x). */
8313 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8314 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8315 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8316 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8317 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8319 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8321 if (tanfn != NULL_TREE)
8322 return build_function_call_expr (tanfn,
8323 TREE_OPERAND (arg0, 1));
8326 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8327 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8328 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8329 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8330 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8331 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8333 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8335 if (tanfn != NULL_TREE)
8337 tree tmp = TREE_OPERAND (arg0, 1);
8338 tmp = build_function_call_expr (tanfn, tmp);
8339 return fold_build2 (RDIV_EXPR, type,
8340 build_real (type, dconst1), tmp);
8344 /* Optimize pow(x,c)/x as pow(x,c-1). */
8345 if (fcode0 == BUILT_IN_POW
8346 || fcode0 == BUILT_IN_POWF
8347 || fcode0 == BUILT_IN_POWL)
8349 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8350 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8351 if (TREE_CODE (arg01) == REAL_CST
8352 && ! TREE_CONSTANT_OVERFLOW (arg01)
8353 && operand_equal_p (arg1, arg00, 0))
8355 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8356 REAL_VALUE_TYPE c;
8357 tree arg, arglist;
8359 c = TREE_REAL_CST (arg01);
8360 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8361 arg = build_real (type, c);
8362 arglist = build_tree_list (NULL_TREE, arg);
8363 arglist = tree_cons (NULL_TREE, arg1, arglist);
8364 return build_function_call_expr (powfn, arglist);
8368 goto binary;
8370 case TRUNC_DIV_EXPR:
8371 case ROUND_DIV_EXPR:
8372 case FLOOR_DIV_EXPR:
8373 case CEIL_DIV_EXPR:
8374 case EXACT_DIV_EXPR:
8375 if (integer_onep (arg1))
8376 return non_lvalue (fold_convert (type, arg0));
8377 if (integer_zerop (arg1))
8378 return NULL_TREE;
8379 /* X / -1 is -X. */
8380 if (!TYPE_UNSIGNED (type)
8381 && TREE_CODE (arg1) == INTEGER_CST
8382 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8383 && TREE_INT_CST_HIGH (arg1) == -1)
8384 return fold_convert (type, negate_expr (arg0));
8386 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8387 operation, EXACT_DIV_EXPR.
8389 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8390 At one time others generated faster code, it's not clear if they do
8391 after the last round to changes to the DIV code in expmed.c. */
8392 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8393 && multiple_of_p (type, arg0, arg1))
8394 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8396 if (TREE_CODE (arg1) == INTEGER_CST
8397 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8398 return fold_convert (type, tem);
8400 goto binary;
8402 case CEIL_MOD_EXPR:
8403 case FLOOR_MOD_EXPR:
8404 case ROUND_MOD_EXPR:
8405 case TRUNC_MOD_EXPR:
8406 /* X % 1 is always zero, but be sure to preserve any side
8407 effects in X. */
8408 if (integer_onep (arg1))
8409 return omit_one_operand (type, integer_zero_node, arg0);
8411 /* X % 0, return X % 0 unchanged so that we can get the
8412 proper warnings and errors. */
8413 if (integer_zerop (arg1))
8414 return NULL_TREE;
8416 /* 0 % X is always zero, but be sure to preserve any side
8417 effects in X. Place this after checking for X == 0. */
8418 if (integer_zerop (arg0))
8419 return omit_one_operand (type, integer_zero_node, arg1);
8421 /* X % -1 is zero. */
8422 if (!TYPE_UNSIGNED (type)
8423 && TREE_CODE (arg1) == INTEGER_CST
8424 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8425 && TREE_INT_CST_HIGH (arg1) == -1)
8426 return omit_one_operand (type, integer_zero_node, arg0);
8428 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8429 i.e. "X % C" into "X & C2", if X and C are positive. */
8430 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8431 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8432 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8434 unsigned HOST_WIDE_INT high, low;
8435 tree mask;
8436 int l;
8438 l = tree_log2 (arg1);
8439 if (l >= HOST_BITS_PER_WIDE_INT)
8441 high = ((unsigned HOST_WIDE_INT) 1
8442 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8443 low = -1;
8445 else
8447 high = 0;
8448 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8451 mask = build_int_cst_wide (type, low, high);
8452 return fold_build2 (BIT_AND_EXPR, type,
8453 fold_convert (type, arg0), mask);
8456 /* X % -C is the same as X % C. */
8457 if (code == TRUNC_MOD_EXPR
8458 && !TYPE_UNSIGNED (type)
8459 && TREE_CODE (arg1) == INTEGER_CST
8460 && !TREE_CONSTANT_OVERFLOW (arg1)
8461 && TREE_INT_CST_HIGH (arg1) < 0
8462 && !flag_trapv
8463 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8464 && !sign_bit_p (arg1, arg1))
8465 return fold_build2 (code, type, fold_convert (type, arg0),
8466 fold_convert (type, negate_expr (arg1)));
8468 /* X % -Y is the same as X % Y. */
8469 if (code == TRUNC_MOD_EXPR
8470 && !TYPE_UNSIGNED (type)
8471 && TREE_CODE (arg1) == NEGATE_EXPR
8472 && !flag_trapv)
8473 return fold_build2 (code, type, fold_convert (type, arg0),
8474 fold_convert (type, TREE_OPERAND (arg1, 0)));
8476 if (TREE_CODE (arg1) == INTEGER_CST
8477 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8478 return fold_convert (type, tem);
8480 goto binary;
8482 case LROTATE_EXPR:
8483 case RROTATE_EXPR:
8484 if (integer_all_onesp (arg0))
8485 return omit_one_operand (type, arg0, arg1);
8486 goto shift;
8488 case RSHIFT_EXPR:
8489 /* Optimize -1 >> x for arithmetic right shifts. */
8490 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8491 return omit_one_operand (type, arg0, arg1);
8492 /* ... fall through ... */
8494 case LSHIFT_EXPR:
8495 shift:
8496 if (integer_zerop (arg1))
8497 return non_lvalue (fold_convert (type, arg0));
8498 if (integer_zerop (arg0))
8499 return omit_one_operand (type, arg0, arg1);
8501 /* Since negative shift count is not well-defined,
8502 don't try to compute it in the compiler. */
8503 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8504 return NULL_TREE;
8506 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8507 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
8508 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8509 && host_integerp (TREE_OPERAND (arg0, 1), false)
8510 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8512 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8513 + TREE_INT_CST_LOW (arg1));
8515 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8516 being well defined. */
8517 if (low >= TYPE_PRECISION (type))
8519 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8520 low = low % TYPE_PRECISION (type);
8521 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8522 return build_int_cst (type, 0);
8523 else
8524 low = TYPE_PRECISION (type) - 1;
8527 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8528 build_int_cst (type, low));
8531 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8532 into x & ((unsigned)-1 >> c) for unsigned types. */
8533 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8534 || (TYPE_UNSIGNED (type)
8535 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8536 && host_integerp (arg1, false)
8537 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8538 && host_integerp (TREE_OPERAND (arg0, 1), false)
8539 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8541 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8542 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8543 tree lshift;
8544 tree arg00;
8546 if (low0 == low1)
8548 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8550 lshift = build_int_cst (type, -1);
8551 lshift = int_const_binop (code, lshift, arg1, 0);
8553 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8557 /* Rewrite an LROTATE_EXPR by a constant into an
8558 RROTATE_EXPR by a new constant. */
8559 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8561 tree tem = build_int_cst (NULL_TREE,
8562 GET_MODE_BITSIZE (TYPE_MODE (type)));
8563 tem = fold_convert (TREE_TYPE (arg1), tem);
8564 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8565 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8568 /* If we have a rotate of a bit operation with the rotate count and
8569 the second operand of the bit operation both constant,
8570 permute the two operations. */
8571 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8572 && (TREE_CODE (arg0) == BIT_AND_EXPR
8573 || TREE_CODE (arg0) == BIT_IOR_EXPR
8574 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8575 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8576 return fold_build2 (TREE_CODE (arg0), type,
8577 fold_build2 (code, type,
8578 TREE_OPERAND (arg0, 0), arg1),
8579 fold_build2 (code, type,
8580 TREE_OPERAND (arg0, 1), arg1));
8582 /* Two consecutive rotates adding up to the width of the mode can
8583 be ignored. */
8584 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8585 && TREE_CODE (arg0) == RROTATE_EXPR
8586 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8587 && TREE_INT_CST_HIGH (arg1) == 0
8588 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8589 && ((TREE_INT_CST_LOW (arg1)
8590 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8591 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8592 return TREE_OPERAND (arg0, 0);
8594 goto binary;
8596 case MIN_EXPR:
8597 if (operand_equal_p (arg0, arg1, 0))
8598 return omit_one_operand (type, arg0, arg1);
8599 if (INTEGRAL_TYPE_P (type)
8600 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8601 return omit_one_operand (type, arg1, arg0);
8602 goto associate;
8604 case MAX_EXPR:
8605 if (operand_equal_p (arg0, arg1, 0))
8606 return omit_one_operand (type, arg0, arg1);
8607 if (INTEGRAL_TYPE_P (type)
8608 && TYPE_MAX_VALUE (type)
8609 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8610 return omit_one_operand (type, arg1, arg0);
8611 goto associate;
8613 case TRUTH_ANDIF_EXPR:
8614 /* Note that the operands of this must be ints
8615 and their values must be 0 or 1.
8616 ("true" is a fixed value perhaps depending on the language.) */
8617 /* If first arg is constant zero, return it. */
8618 if (integer_zerop (arg0))
8619 return fold_convert (type, arg0);
8620 case TRUTH_AND_EXPR:
8621 /* If either arg is constant true, drop it. */
8622 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8623 return non_lvalue (fold_convert (type, arg1));
8624 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8625 /* Preserve sequence points. */
8626 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8627 return non_lvalue (fold_convert (type, arg0));
8628 /* If second arg is constant zero, result is zero, but first arg
8629 must be evaluated. */
8630 if (integer_zerop (arg1))
8631 return omit_one_operand (type, arg1, arg0);
8632 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8633 case will be handled here. */
8634 if (integer_zerop (arg0))
8635 return omit_one_operand (type, arg0, arg1);
8637 /* !X && X is always false. */
8638 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8639 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8640 return omit_one_operand (type, integer_zero_node, arg1);
8641 /* X && !X is always false. */
8642 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8643 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8644 return omit_one_operand (type, integer_zero_node, arg0);
8646 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8647 means A >= Y && A != MAX, but in this case we know that
8648 A < X <= MAX. */
8650 if (!TREE_SIDE_EFFECTS (arg0)
8651 && !TREE_SIDE_EFFECTS (arg1))
8653 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8654 if (tem && !operand_equal_p (tem, arg0, 0))
8655 return fold_build2 (code, type, tem, arg1);
8657 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8658 if (tem && !operand_equal_p (tem, arg1, 0))
8659 return fold_build2 (code, type, arg0, tem);
8662 truth_andor:
8663 /* We only do these simplifications if we are optimizing. */
8664 if (!optimize)
8665 return NULL_TREE;
8667 /* Check for things like (A || B) && (A || C). We can convert this
8668 to A || (B && C). Note that either operator can be any of the four
8669 truth and/or operations and the transformation will still be
8670 valid. Also note that we only care about order for the
8671 ANDIF and ORIF operators. If B contains side effects, this
8672 might change the truth-value of A. */
8673 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8674 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8675 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8676 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8677 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8678 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8680 tree a00 = TREE_OPERAND (arg0, 0);
8681 tree a01 = TREE_OPERAND (arg0, 1);
8682 tree a10 = TREE_OPERAND (arg1, 0);
8683 tree a11 = TREE_OPERAND (arg1, 1);
8684 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8685 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8686 && (code == TRUTH_AND_EXPR
8687 || code == TRUTH_OR_EXPR));
8689 if (operand_equal_p (a00, a10, 0))
8690 return fold_build2 (TREE_CODE (arg0), type, a00,
8691 fold_build2 (code, type, a01, a11));
8692 else if (commutative && operand_equal_p (a00, a11, 0))
8693 return fold_build2 (TREE_CODE (arg0), type, a00,
8694 fold_build2 (code, type, a01, a10));
8695 else if (commutative && operand_equal_p (a01, a10, 0))
8696 return fold_build2 (TREE_CODE (arg0), type, a01,
8697 fold_build2 (code, type, a00, a11));
8699 /* This case if tricky because we must either have commutative
8700 operators or else A10 must not have side-effects. */
8702 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8703 && operand_equal_p (a01, a11, 0))
8704 return fold_build2 (TREE_CODE (arg0), type,
8705 fold_build2 (code, type, a00, a10),
8706 a01);
8709 /* See if we can build a range comparison. */
8710 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8711 return tem;
8713 /* Check for the possibility of merging component references. If our
8714 lhs is another similar operation, try to merge its rhs with our
8715 rhs. Then try to merge our lhs and rhs. */
8716 if (TREE_CODE (arg0) == code
8717 && 0 != (tem = fold_truthop (code, type,
8718 TREE_OPERAND (arg0, 1), arg1)))
8719 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8721 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8722 return tem;
8724 return NULL_TREE;
8726 case TRUTH_ORIF_EXPR:
8727 /* Note that the operands of this must be ints
8728 and their values must be 0 or true.
8729 ("true" is a fixed value perhaps depending on the language.) */
8730 /* If first arg is constant true, return it. */
8731 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8732 return fold_convert (type, arg0);
8733 case TRUTH_OR_EXPR:
8734 /* If either arg is constant zero, drop it. */
8735 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8736 return non_lvalue (fold_convert (type, arg1));
8737 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8738 /* Preserve sequence points. */
8739 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8740 return non_lvalue (fold_convert (type, arg0));
8741 /* If second arg is constant true, result is true, but we must
8742 evaluate first arg. */
8743 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8744 return omit_one_operand (type, arg1, arg0);
8745 /* Likewise for first arg, but note this only occurs here for
8746 TRUTH_OR_EXPR. */
8747 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8748 return omit_one_operand (type, arg0, arg1);
8750 /* !X || X is always true. */
8751 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8752 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8753 return omit_one_operand (type, integer_one_node, arg1);
8754 /* X || !X is always true. */
8755 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8756 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8757 return omit_one_operand (type, integer_one_node, arg0);
8759 goto truth_andor;
8761 case TRUTH_XOR_EXPR:
8762 /* If the second arg is constant zero, drop it. */
8763 if (integer_zerop (arg1))
8764 return non_lvalue (fold_convert (type, arg0));
8765 /* If the second arg is constant true, this is a logical inversion. */
8766 if (integer_onep (arg1))
8768 /* Only call invert_truthvalue if operand is a truth value. */
8769 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8770 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8771 else
8772 tem = invert_truthvalue (arg0);
8773 return non_lvalue (fold_convert (type, tem));
8775 /* Identical arguments cancel to zero. */
8776 if (operand_equal_p (arg0, arg1, 0))
8777 return omit_one_operand (type, integer_zero_node, arg0);
8779 /* !X ^ X is always true. */
8780 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8781 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8782 return omit_one_operand (type, integer_one_node, arg1);
8784 /* X ^ !X is always true. */
8785 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8786 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8787 return omit_one_operand (type, integer_one_node, arg0);
8789 return NULL_TREE;
8791 case EQ_EXPR:
8792 case NE_EXPR:
8793 case LT_EXPR:
8794 case GT_EXPR:
8795 case LE_EXPR:
8796 case GE_EXPR:
8797 /* If one arg is a real or integer constant, put it last. */
8798 if (tree_swap_operands_p (arg0, arg1, true))
8799 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8801 /* bool_var != 0 becomes bool_var. */
8802 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8803 && code == NE_EXPR)
8804 return non_lvalue (fold_convert (type, arg0));
8806 /* bool_var == 1 becomes bool_var. */
8807 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8808 && code == EQ_EXPR)
8809 return non_lvalue (fold_convert (type, arg0));
8811 /* If this is an equality comparison of the address of a non-weak
8812 object against zero, then we know the result. */
8813 if ((code == EQ_EXPR || code == NE_EXPR)
8814 && TREE_CODE (arg0) == ADDR_EXPR
8815 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8816 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8817 && integer_zerop (arg1))
8818 return constant_boolean_node (code != EQ_EXPR, type);
8820 /* If this is an equality comparison of the address of two non-weak,
8821 unaliased symbols neither of which are extern (since we do not
8822 have access to attributes for externs), then we know the result. */
8823 if ((code == EQ_EXPR || code == NE_EXPR)
8824 && TREE_CODE (arg0) == ADDR_EXPR
8825 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8826 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8827 && ! lookup_attribute ("alias",
8828 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8829 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8830 && TREE_CODE (arg1) == ADDR_EXPR
8831 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
8832 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8833 && ! lookup_attribute ("alias",
8834 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8835 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8837 /* We know that we're looking at the address of two
8838 non-weak, unaliased, static _DECL nodes.
8840 It is both wasteful and incorrect to call operand_equal_p
8841 to compare the two ADDR_EXPR nodes. It is wasteful in that
8842 all we need to do is test pointer equality for the arguments
8843 to the two ADDR_EXPR nodes. It is incorrect to use
8844 operand_equal_p as that function is NOT equivalent to a
8845 C equality test. It can in fact return false for two
8846 objects which would test as equal using the C equality
8847 operator. */
8848 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
8849 return constant_boolean_node (equal
8850 ? code == EQ_EXPR : code != EQ_EXPR,
8851 type);
8854 /* If this is a comparison of two exprs that look like an
8855 ARRAY_REF of the same object, then we can fold this to a
8856 comparison of the two offsets. */
8857 if (TREE_CODE_CLASS (code) == tcc_comparison)
8859 tree base0, offset0, base1, offset1;
8861 if (extract_array_ref (arg0, &base0, &offset0)
8862 && extract_array_ref (arg1, &base1, &offset1)
8863 && operand_equal_p (base0, base1, 0))
8865 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))
8866 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))))
8867 offset0 = NULL_TREE;
8868 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))
8869 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))))
8870 offset1 = NULL_TREE;
8871 if (offset0 == NULL_TREE
8872 && offset1 == NULL_TREE)
8874 offset0 = integer_zero_node;
8875 offset1 = integer_zero_node;
8877 else if (offset0 == NULL_TREE)
8878 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8879 else if (offset1 == NULL_TREE)
8880 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8882 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
8883 return fold_build2 (code, type, offset0, offset1);
8887 /* Transform comparisons of the form X +- C CMP X. */
8888 if ((code != EQ_EXPR && code != NE_EXPR)
8889 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8890 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8891 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8892 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
8893 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8894 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8895 && !(flag_wrapv || flag_trapv))))
8897 tree arg01 = TREE_OPERAND (arg0, 1);
8898 enum tree_code code0 = TREE_CODE (arg0);
8899 int is_positive;
8901 if (TREE_CODE (arg01) == REAL_CST)
8902 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
8903 else
8904 is_positive = tree_int_cst_sgn (arg01);
8906 /* (X - c) > X becomes false. */
8907 if (code == GT_EXPR
8908 && ((code0 == MINUS_EXPR && is_positive >= 0)
8909 || (code0 == PLUS_EXPR && is_positive <= 0)))
8910 return constant_boolean_node (0, type);
8912 /* Likewise (X + c) < X becomes false. */
8913 if (code == LT_EXPR
8914 && ((code0 == PLUS_EXPR && is_positive >= 0)
8915 || (code0 == MINUS_EXPR && is_positive <= 0)))
8916 return constant_boolean_node (0, type);
8918 /* Convert (X - c) <= X to true. */
8919 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8920 && code == LE_EXPR
8921 && ((code0 == MINUS_EXPR && is_positive >= 0)
8922 || (code0 == PLUS_EXPR && is_positive <= 0)))
8923 return constant_boolean_node (1, type);
8925 /* Convert (X + c) >= X to true. */
8926 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8927 && code == GE_EXPR
8928 && ((code0 == PLUS_EXPR && is_positive >= 0)
8929 || (code0 == MINUS_EXPR && is_positive <= 0)))
8930 return constant_boolean_node (1, type);
8932 if (TREE_CODE (arg01) == INTEGER_CST)
8934 /* Convert X + c > X and X - c < X to true for integers. */
8935 if (code == GT_EXPR
8936 && ((code0 == PLUS_EXPR && is_positive > 0)
8937 || (code0 == MINUS_EXPR && is_positive < 0)))
8938 return constant_boolean_node (1, type);
8940 if (code == LT_EXPR
8941 && ((code0 == MINUS_EXPR && is_positive > 0)
8942 || (code0 == PLUS_EXPR && is_positive < 0)))
8943 return constant_boolean_node (1, type);
8945 /* Convert X + c <= X and X - c >= X to false for integers. */
8946 if (code == LE_EXPR
8947 && ((code0 == PLUS_EXPR && is_positive > 0)
8948 || (code0 == MINUS_EXPR && is_positive < 0)))
8949 return constant_boolean_node (0, type);
8951 if (code == GE_EXPR
8952 && ((code0 == MINUS_EXPR && is_positive > 0)
8953 || (code0 == PLUS_EXPR && is_positive < 0)))
8954 return constant_boolean_node (0, type);
8958 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8959 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8960 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8961 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8962 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8963 && !(flag_wrapv || flag_trapv))
8964 && (TREE_CODE (arg1) == INTEGER_CST
8965 && !TREE_OVERFLOW (arg1)))
8967 tree const1 = TREE_OPERAND (arg0, 1);
8968 tree const2 = arg1;
8969 tree variable = TREE_OPERAND (arg0, 0);
8970 tree lhs;
8971 int lhs_add;
8972 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8974 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8975 TREE_TYPE (arg1), const2, const1);
8976 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8977 && (TREE_CODE (lhs) != INTEGER_CST
8978 || !TREE_OVERFLOW (lhs)))
8979 return fold_build2 (code, type, variable, lhs);
8982 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8984 tree targ0 = strip_float_extensions (arg0);
8985 tree targ1 = strip_float_extensions (arg1);
8986 tree newtype = TREE_TYPE (targ0);
8988 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8989 newtype = TREE_TYPE (targ1);
8991 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8992 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8993 return fold_build2 (code, type, fold_convert (newtype, targ0),
8994 fold_convert (newtype, targ1));
8996 /* (-a) CMP (-b) -> b CMP a */
8997 if (TREE_CODE (arg0) == NEGATE_EXPR
8998 && TREE_CODE (arg1) == NEGATE_EXPR)
8999 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9000 TREE_OPERAND (arg0, 0));
9002 if (TREE_CODE (arg1) == REAL_CST)
9004 REAL_VALUE_TYPE cst;
9005 cst = TREE_REAL_CST (arg1);
9007 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9008 if (TREE_CODE (arg0) == NEGATE_EXPR)
9009 return
9010 fold_build2 (swap_tree_comparison (code), type,
9011 TREE_OPERAND (arg0, 0),
9012 build_real (TREE_TYPE (arg1),
9013 REAL_VALUE_NEGATE (cst)));
9015 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9016 /* a CMP (-0) -> a CMP 0 */
9017 if (REAL_VALUE_MINUS_ZERO (cst))
9018 return fold_build2 (code, type, arg0,
9019 build_real (TREE_TYPE (arg1), dconst0));
9021 /* x != NaN is always true, other ops are always false. */
9022 if (REAL_VALUE_ISNAN (cst)
9023 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9025 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9026 return omit_one_operand (type, tem, arg0);
9029 /* Fold comparisons against infinity. */
9030 if (REAL_VALUE_ISINF (cst))
9032 tem = fold_inf_compare (code, type, arg0, arg1);
9033 if (tem != NULL_TREE)
9034 return tem;
9038 /* If this is a comparison of a real constant with a PLUS_EXPR
9039 or a MINUS_EXPR of a real constant, we can convert it into a
9040 comparison with a revised real constant as long as no overflow
9041 occurs when unsafe_math_optimizations are enabled. */
9042 if (flag_unsafe_math_optimizations
9043 && TREE_CODE (arg1) == REAL_CST
9044 && (TREE_CODE (arg0) == PLUS_EXPR
9045 || TREE_CODE (arg0) == MINUS_EXPR)
9046 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9047 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9048 ? MINUS_EXPR : PLUS_EXPR,
9049 arg1, TREE_OPERAND (arg0, 1), 0))
9050 && ! TREE_CONSTANT_OVERFLOW (tem))
9051 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9053 /* Likewise, we can simplify a comparison of a real constant with
9054 a MINUS_EXPR whose first operand is also a real constant, i.e.
9055 (c1 - x) < c2 becomes x > c1-c2. */
9056 if (flag_unsafe_math_optimizations
9057 && TREE_CODE (arg1) == REAL_CST
9058 && TREE_CODE (arg0) == MINUS_EXPR
9059 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9060 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9061 arg1, 0))
9062 && ! TREE_CONSTANT_OVERFLOW (tem))
9063 return fold_build2 (swap_tree_comparison (code), type,
9064 TREE_OPERAND (arg0, 1), tem);
9066 /* Fold comparisons against built-in math functions. */
9067 if (TREE_CODE (arg1) == REAL_CST
9068 && flag_unsafe_math_optimizations
9069 && ! flag_errno_math)
9071 enum built_in_function fcode = builtin_mathfn_code (arg0);
9073 if (fcode != END_BUILTINS)
9075 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9076 if (tem != NULL_TREE)
9077 return tem;
9082 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9083 if (TREE_CONSTANT (arg1)
9084 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9085 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9086 /* This optimization is invalid for ordered comparisons
9087 if CONST+INCR overflows or if foo+incr might overflow.
9088 This optimization is invalid for floating point due to rounding.
9089 For pointer types we assume overflow doesn't happen. */
9090 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9091 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9092 && (code == EQ_EXPR || code == NE_EXPR))))
9094 tree varop, newconst;
9096 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9098 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9099 arg1, TREE_OPERAND (arg0, 1));
9100 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9101 TREE_OPERAND (arg0, 0),
9102 TREE_OPERAND (arg0, 1));
9104 else
9106 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9107 arg1, TREE_OPERAND (arg0, 1));
9108 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9109 TREE_OPERAND (arg0, 0),
9110 TREE_OPERAND (arg0, 1));
9114 /* If VAROP is a reference to a bitfield, we must mask
9115 the constant by the width of the field. */
9116 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9117 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9118 && host_integerp (DECL_SIZE (TREE_OPERAND
9119 (TREE_OPERAND (varop, 0), 1)), 1))
9121 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9122 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9123 tree folded_compare, shift;
9125 /* First check whether the comparison would come out
9126 always the same. If we don't do that we would
9127 change the meaning with the masking. */
9128 folded_compare = fold_build2 (code, type,
9129 TREE_OPERAND (varop, 0), arg1);
9130 if (integer_zerop (folded_compare)
9131 || integer_onep (folded_compare))
9132 return omit_one_operand (type, folded_compare, varop);
9134 shift = build_int_cst (NULL_TREE,
9135 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9136 shift = fold_convert (TREE_TYPE (varop), shift);
9137 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9138 newconst, shift);
9139 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9140 newconst, shift);
9143 return fold_build2 (code, type, varop, newconst);
9146 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9147 This transformation affects the cases which are handled in later
9148 optimizations involving comparisons with non-negative constants. */
9149 if (TREE_CODE (arg1) == INTEGER_CST
9150 && TREE_CODE (arg0) != INTEGER_CST
9151 && tree_int_cst_sgn (arg1) > 0)
9153 switch (code)
9155 case GE_EXPR:
9156 arg1 = const_binop (MINUS_EXPR, arg1,
9157 build_int_cst (TREE_TYPE (arg1), 1), 0);
9158 return fold_build2 (GT_EXPR, type, arg0,
9159 fold_convert (TREE_TYPE (arg0), arg1));
9161 case LT_EXPR:
9162 arg1 = const_binop (MINUS_EXPR, arg1,
9163 build_int_cst (TREE_TYPE (arg1), 1), 0);
9164 return fold_build2 (LE_EXPR, type, arg0,
9165 fold_convert (TREE_TYPE (arg0), arg1));
9167 default:
9168 break;
9172 /* Comparisons with the highest or lowest possible integer of
9173 the specified size will have known values. */
9175 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9177 if (TREE_CODE (arg1) == INTEGER_CST
9178 && ! TREE_CONSTANT_OVERFLOW (arg1)
9179 && width <= 2 * HOST_BITS_PER_WIDE_INT
9180 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9181 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9183 HOST_WIDE_INT signed_max_hi;
9184 unsigned HOST_WIDE_INT signed_max_lo;
9185 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9187 if (width <= HOST_BITS_PER_WIDE_INT)
9189 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9190 - 1;
9191 signed_max_hi = 0;
9192 max_hi = 0;
9194 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9196 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9197 min_lo = 0;
9198 min_hi = 0;
9200 else
9202 max_lo = signed_max_lo;
9203 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9204 min_hi = -1;
9207 else
9209 width -= HOST_BITS_PER_WIDE_INT;
9210 signed_max_lo = -1;
9211 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9212 - 1;
9213 max_lo = -1;
9214 min_lo = 0;
9216 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9218 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9219 min_hi = 0;
9221 else
9223 max_hi = signed_max_hi;
9224 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9228 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9229 && TREE_INT_CST_LOW (arg1) == max_lo)
9230 switch (code)
9232 case GT_EXPR:
9233 return omit_one_operand (type, integer_zero_node, arg0);
9235 case GE_EXPR:
9236 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9238 case LE_EXPR:
9239 return omit_one_operand (type, integer_one_node, arg0);
9241 case LT_EXPR:
9242 return fold_build2 (NE_EXPR, type, arg0, arg1);
9244 /* The GE_EXPR and LT_EXPR cases above are not normally
9245 reached because of previous transformations. */
9247 default:
9248 break;
9250 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9251 == max_hi
9252 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9253 switch (code)
9255 case GT_EXPR:
9256 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9257 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9258 case LE_EXPR:
9259 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9260 return fold_build2 (NE_EXPR, type, arg0, arg1);
9261 default:
9262 break;
9264 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9265 == min_hi
9266 && TREE_INT_CST_LOW (arg1) == min_lo)
9267 switch (code)
9269 case LT_EXPR:
9270 return omit_one_operand (type, integer_zero_node, arg0);
9272 case LE_EXPR:
9273 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9275 case GE_EXPR:
9276 return omit_one_operand (type, integer_one_node, arg0);
9278 case GT_EXPR:
9279 return fold_build2 (NE_EXPR, type, arg0, arg1);
9281 default:
9282 break;
9284 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9285 == min_hi
9286 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9287 switch (code)
9289 case GE_EXPR:
9290 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9291 return fold_build2 (NE_EXPR, type, arg0, arg1);
9292 case LT_EXPR:
9293 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9294 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9295 default:
9296 break;
9299 else if (!in_gimple_form
9300 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9301 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9302 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9303 /* signed_type does not work on pointer types. */
9304 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9306 /* The following case also applies to X < signed_max+1
9307 and X >= signed_max+1 because previous transformations. */
9308 if (code == LE_EXPR || code == GT_EXPR)
9310 tree st0, st1;
9311 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9312 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9313 return fold
9314 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9315 type, fold_convert (st0, arg0),
9316 fold_convert (st1, integer_zero_node)));
9322 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9323 a MINUS_EXPR of a constant, we can convert it into a comparison with
9324 a revised constant as long as no overflow occurs. */
9325 if ((code == EQ_EXPR || code == NE_EXPR)
9326 && TREE_CODE (arg1) == INTEGER_CST
9327 && (TREE_CODE (arg0) == PLUS_EXPR
9328 || TREE_CODE (arg0) == MINUS_EXPR)
9329 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9330 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9331 ? MINUS_EXPR : PLUS_EXPR,
9332 arg1, TREE_OPERAND (arg0, 1), 0))
9333 && ! TREE_CONSTANT_OVERFLOW (tem))
9334 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9336 /* Similarly for a NEGATE_EXPR. */
9337 else if ((code == EQ_EXPR || code == NE_EXPR)
9338 && TREE_CODE (arg0) == NEGATE_EXPR
9339 && TREE_CODE (arg1) == INTEGER_CST
9340 && 0 != (tem = negate_expr (arg1))
9341 && TREE_CODE (tem) == INTEGER_CST
9342 && ! TREE_CONSTANT_OVERFLOW (tem))
9343 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9345 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9346 for !=. Don't do this for ordered comparisons due to overflow. */
9347 else if ((code == NE_EXPR || code == EQ_EXPR)
9348 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9349 return fold_build2 (code, type,
9350 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9352 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9353 && (TREE_CODE (arg0) == NOP_EXPR
9354 || TREE_CODE (arg0) == CONVERT_EXPR))
9356 /* If we are widening one operand of an integer comparison,
9357 see if the other operand is similarly being widened. Perhaps we
9358 can do the comparison in the narrower type. */
9359 tem = fold_widened_comparison (code, type, arg0, arg1);
9360 if (tem)
9361 return tem;
9363 /* Or if we are changing signedness. */
9364 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9365 if (tem)
9366 return tem;
9369 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9370 constant, we can simplify it. */
9371 else if (TREE_CODE (arg1) == INTEGER_CST
9372 && (TREE_CODE (arg0) == MIN_EXPR
9373 || TREE_CODE (arg0) == MAX_EXPR)
9374 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9376 tem = optimize_minmax_comparison (code, type, op0, op1);
9377 if (tem)
9378 return tem;
9380 return NULL_TREE;
9383 /* If we are comparing an ABS_EXPR with a constant, we can
9384 convert all the cases into explicit comparisons, but they may
9385 well not be faster than doing the ABS and one comparison.
9386 But ABS (X) <= C is a range comparison, which becomes a subtraction
9387 and a comparison, and is probably faster. */
9388 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9389 && TREE_CODE (arg0) == ABS_EXPR
9390 && ! TREE_SIDE_EFFECTS (arg0)
9391 && (0 != (tem = negate_expr (arg1)))
9392 && TREE_CODE (tem) == INTEGER_CST
9393 && ! TREE_CONSTANT_OVERFLOW (tem))
9394 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9395 build2 (GE_EXPR, type,
9396 TREE_OPERAND (arg0, 0), tem),
9397 build2 (LE_EXPR, type,
9398 TREE_OPERAND (arg0, 0), arg1));
9400 /* Convert ABS_EXPR<x> >= 0 to true. */
9401 else if (code == GE_EXPR
9402 && tree_expr_nonnegative_p (arg0)
9403 && (integer_zerop (arg1)
9404 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9405 && real_zerop (arg1))))
9406 return omit_one_operand (type, integer_one_node, arg0);
9408 /* Convert ABS_EXPR<x> < 0 to false. */
9409 else if (code == LT_EXPR
9410 && tree_expr_nonnegative_p (arg0)
9411 && (integer_zerop (arg1) || real_zerop (arg1)))
9412 return omit_one_operand (type, integer_zero_node, arg0);
9414 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9415 else if ((code == EQ_EXPR || code == NE_EXPR)
9416 && TREE_CODE (arg0) == ABS_EXPR
9417 && (integer_zerop (arg1) || real_zerop (arg1)))
9418 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9420 /* If this is an EQ or NE comparison with zero and ARG0 is
9421 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9422 two operations, but the latter can be done in one less insn
9423 on machines that have only two-operand insns or on which a
9424 constant cannot be the first operand. */
9425 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9426 && TREE_CODE (arg0) == BIT_AND_EXPR)
9428 tree arg00 = TREE_OPERAND (arg0, 0);
9429 tree arg01 = TREE_OPERAND (arg0, 1);
9430 if (TREE_CODE (arg00) == LSHIFT_EXPR
9431 && integer_onep (TREE_OPERAND (arg00, 0)))
9432 return
9433 fold_build2 (code, type,
9434 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9435 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9436 arg01, TREE_OPERAND (arg00, 1)),
9437 fold_convert (TREE_TYPE (arg0),
9438 integer_one_node)),
9439 arg1);
9440 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9441 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9442 return
9443 fold_build2 (code, type,
9444 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9445 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9446 arg00, TREE_OPERAND (arg01, 1)),
9447 fold_convert (TREE_TYPE (arg0),
9448 integer_one_node)),
9449 arg1);
9452 /* If this is an NE or EQ comparison of zero against the result of a
9453 signed MOD operation whose second operand is a power of 2, make
9454 the MOD operation unsigned since it is simpler and equivalent. */
9455 if ((code == NE_EXPR || code == EQ_EXPR)
9456 && integer_zerop (arg1)
9457 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9458 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9459 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9460 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9461 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9462 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9464 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9465 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9466 fold_convert (newtype,
9467 TREE_OPERAND (arg0, 0)),
9468 fold_convert (newtype,
9469 TREE_OPERAND (arg0, 1)));
9471 return fold_build2 (code, type, newmod,
9472 fold_convert (newtype, arg1));
9475 /* If this is an NE comparison of zero with an AND of one, remove the
9476 comparison since the AND will give the correct value. */
9477 if (code == NE_EXPR && integer_zerop (arg1)
9478 && TREE_CODE (arg0) == BIT_AND_EXPR
9479 && integer_onep (TREE_OPERAND (arg0, 1)))
9480 return fold_convert (type, arg0);
9482 /* If we have (A & C) == C where C is a power of 2, convert this into
9483 (A & C) != 0. Similarly for NE_EXPR. */
9484 if ((code == EQ_EXPR || code == NE_EXPR)
9485 && TREE_CODE (arg0) == BIT_AND_EXPR
9486 && integer_pow2p (TREE_OPERAND (arg0, 1))
9487 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9488 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9489 arg0, fold_convert (TREE_TYPE (arg0),
9490 integer_zero_node));
9492 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9493 bit, then fold the expression into A < 0 or A >= 0. */
9494 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9495 if (tem)
9496 return tem;
9498 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9499 Similarly for NE_EXPR. */
9500 if ((code == EQ_EXPR || code == NE_EXPR)
9501 && TREE_CODE (arg0) == BIT_AND_EXPR
9502 && TREE_CODE (arg1) == INTEGER_CST
9503 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9505 tree notc = fold_build1 (BIT_NOT_EXPR,
9506 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9507 TREE_OPERAND (arg0, 1));
9508 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9509 arg1, notc);
9510 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9511 if (integer_nonzerop (dandnotc))
9512 return omit_one_operand (type, rslt, arg0);
9515 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9516 Similarly for NE_EXPR. */
9517 if ((code == EQ_EXPR || code == NE_EXPR)
9518 && TREE_CODE (arg0) == BIT_IOR_EXPR
9519 && TREE_CODE (arg1) == INTEGER_CST
9520 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9522 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9523 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9524 TREE_OPERAND (arg0, 1), notd);
9525 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9526 if (integer_nonzerop (candnotd))
9527 return omit_one_operand (type, rslt, arg0);
9530 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9531 and similarly for >= into !=. */
9532 if ((code == LT_EXPR || code == GE_EXPR)
9533 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9534 && TREE_CODE (arg1) == LSHIFT_EXPR
9535 && integer_onep (TREE_OPERAND (arg1, 0)))
9536 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9537 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9538 TREE_OPERAND (arg1, 1)),
9539 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9541 else if ((code == LT_EXPR || code == GE_EXPR)
9542 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9543 && (TREE_CODE (arg1) == NOP_EXPR
9544 || TREE_CODE (arg1) == CONVERT_EXPR)
9545 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9546 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9547 return
9548 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9549 fold_convert (TREE_TYPE (arg0),
9550 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9551 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9552 1))),
9553 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9555 /* Simplify comparison of something with itself. (For IEEE
9556 floating-point, we can only do some of these simplifications.) */
9557 if (operand_equal_p (arg0, arg1, 0))
9559 switch (code)
9561 case EQ_EXPR:
9562 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9563 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9564 return constant_boolean_node (1, type);
9565 break;
9567 case GE_EXPR:
9568 case LE_EXPR:
9569 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9570 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9571 return constant_boolean_node (1, type);
9572 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9574 case NE_EXPR:
9575 /* For NE, we can only do this simplification if integer
9576 or we don't honor IEEE floating point NaNs. */
9577 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9578 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9579 break;
9580 /* ... fall through ... */
9581 case GT_EXPR:
9582 case LT_EXPR:
9583 return constant_boolean_node (0, type);
9584 default:
9585 gcc_unreachable ();
9589 /* If we are comparing an expression that just has comparisons
9590 of two integer values, arithmetic expressions of those comparisons,
9591 and constants, we can simplify it. There are only three cases
9592 to check: the two values can either be equal, the first can be
9593 greater, or the second can be greater. Fold the expression for
9594 those three values. Since each value must be 0 or 1, we have
9595 eight possibilities, each of which corresponds to the constant 0
9596 or 1 or one of the six possible comparisons.
9598 This handles common cases like (a > b) == 0 but also handles
9599 expressions like ((x > y) - (y > x)) > 0, which supposedly
9600 occur in macroized code. */
9602 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9604 tree cval1 = 0, cval2 = 0;
9605 int save_p = 0;
9607 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9608 /* Don't handle degenerate cases here; they should already
9609 have been handled anyway. */
9610 && cval1 != 0 && cval2 != 0
9611 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9612 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9613 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9614 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9615 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9616 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9617 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9619 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9620 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9622 /* We can't just pass T to eval_subst in case cval1 or cval2
9623 was the same as ARG1. */
9625 tree high_result
9626 = fold_build2 (code, type,
9627 eval_subst (arg0, cval1, maxval,
9628 cval2, minval),
9629 arg1);
9630 tree equal_result
9631 = fold_build2 (code, type,
9632 eval_subst (arg0, cval1, maxval,
9633 cval2, maxval),
9634 arg1);
9635 tree low_result
9636 = fold_build2 (code, type,
9637 eval_subst (arg0, cval1, minval,
9638 cval2, maxval),
9639 arg1);
9641 /* All three of these results should be 0 or 1. Confirm they
9642 are. Then use those values to select the proper code
9643 to use. */
9645 if ((integer_zerop (high_result)
9646 || integer_onep (high_result))
9647 && (integer_zerop (equal_result)
9648 || integer_onep (equal_result))
9649 && (integer_zerop (low_result)
9650 || integer_onep (low_result)))
9652 /* Make a 3-bit mask with the high-order bit being the
9653 value for `>', the next for '=', and the low for '<'. */
9654 switch ((integer_onep (high_result) * 4)
9655 + (integer_onep (equal_result) * 2)
9656 + integer_onep (low_result))
9658 case 0:
9659 /* Always false. */
9660 return omit_one_operand (type, integer_zero_node, arg0);
9661 case 1:
9662 code = LT_EXPR;
9663 break;
9664 case 2:
9665 code = EQ_EXPR;
9666 break;
9667 case 3:
9668 code = LE_EXPR;
9669 break;
9670 case 4:
9671 code = GT_EXPR;
9672 break;
9673 case 5:
9674 code = NE_EXPR;
9675 break;
9676 case 6:
9677 code = GE_EXPR;
9678 break;
9679 case 7:
9680 /* Always true. */
9681 return omit_one_operand (type, integer_one_node, arg0);
9684 if (save_p)
9685 return save_expr (build2 (code, type, cval1, cval2));
9686 else
9687 return fold_build2 (code, type, cval1, cval2);
9692 /* If this is a comparison of a field, we may be able to simplify it. */
9693 if (((TREE_CODE (arg0) == COMPONENT_REF
9694 && lang_hooks.can_use_bit_fields_p ())
9695 || TREE_CODE (arg0) == BIT_FIELD_REF)
9696 && (code == EQ_EXPR || code == NE_EXPR)
9697 /* Handle the constant case even without -O
9698 to make sure the warnings are given. */
9699 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9701 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9702 if (t1)
9703 return t1;
9706 /* Fold a comparison of the address of COMPONENT_REFs with the same
9707 type and component to a comparison of the address of the base
9708 object. In short, &x->a OP &y->a to x OP y and
9709 &x->a OP &y.a to x OP &y */
9710 if (TREE_CODE (arg0) == ADDR_EXPR
9711 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9712 && TREE_CODE (arg1) == ADDR_EXPR
9713 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9715 tree cref0 = TREE_OPERAND (arg0, 0);
9716 tree cref1 = TREE_OPERAND (arg1, 0);
9717 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9719 tree op0 = TREE_OPERAND (cref0, 0);
9720 tree op1 = TREE_OPERAND (cref1, 0);
9721 return fold_build2 (code, type,
9722 build_fold_addr_expr (op0),
9723 build_fold_addr_expr (op1));
9727 /* Optimize comparisons of strlen vs zero to a compare of the
9728 first character of the string vs zero. To wit,
9729 strlen(ptr) == 0 => *ptr == 0
9730 strlen(ptr) != 0 => *ptr != 0
9731 Other cases should reduce to one of these two (or a constant)
9732 due to the return value of strlen being unsigned. */
9733 if ((code == EQ_EXPR || code == NE_EXPR)
9734 && integer_zerop (arg1)
9735 && TREE_CODE (arg0) == CALL_EXPR)
9737 tree fndecl = get_callee_fndecl (arg0);
9738 tree arglist;
9740 if (fndecl
9741 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9742 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9743 && (arglist = TREE_OPERAND (arg0, 1))
9744 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9745 && ! TREE_CHAIN (arglist))
9747 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9748 return fold_build2 (code, type, iref,
9749 build_int_cst (TREE_TYPE (iref), 0));
9753 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9754 into a single range test. */
9755 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9756 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9757 && TREE_CODE (arg1) == INTEGER_CST
9758 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9759 && !integer_zerop (TREE_OPERAND (arg0, 1))
9760 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9761 && !TREE_OVERFLOW (arg1))
9763 t1 = fold_div_compare (code, type, arg0, arg1);
9764 if (t1 != NULL_TREE)
9765 return t1;
9768 if ((code == EQ_EXPR || code == NE_EXPR)
9769 && integer_zerop (arg1)
9770 && tree_expr_nonzero_p (arg0))
9772 tree res = constant_boolean_node (code==NE_EXPR, type);
9773 return omit_one_operand (type, res, arg0);
9776 t1 = fold_relational_const (code, type, arg0, arg1);
9777 return t1 == NULL_TREE ? NULL_TREE : t1;
9779 case UNORDERED_EXPR:
9780 case ORDERED_EXPR:
9781 case UNLT_EXPR:
9782 case UNLE_EXPR:
9783 case UNGT_EXPR:
9784 case UNGE_EXPR:
9785 case UNEQ_EXPR:
9786 case LTGT_EXPR:
9787 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9789 t1 = fold_relational_const (code, type, arg0, arg1);
9790 if (t1 != NULL_TREE)
9791 return t1;
9794 /* If the first operand is NaN, the result is constant. */
9795 if (TREE_CODE (arg0) == REAL_CST
9796 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9797 && (code != LTGT_EXPR || ! flag_trapping_math))
9799 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9800 ? integer_zero_node
9801 : integer_one_node;
9802 return omit_one_operand (type, t1, arg1);
9805 /* If the second operand is NaN, the result is constant. */
9806 if (TREE_CODE (arg1) == REAL_CST
9807 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9808 && (code != LTGT_EXPR || ! flag_trapping_math))
9810 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9811 ? integer_zero_node
9812 : integer_one_node;
9813 return omit_one_operand (type, t1, arg0);
9816 /* Simplify unordered comparison of something with itself. */
9817 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9818 && operand_equal_p (arg0, arg1, 0))
9819 return constant_boolean_node (1, type);
9821 if (code == LTGT_EXPR
9822 && !flag_trapping_math
9823 && operand_equal_p (arg0, arg1, 0))
9824 return constant_boolean_node (0, type);
9826 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9828 tree targ0 = strip_float_extensions (arg0);
9829 tree targ1 = strip_float_extensions (arg1);
9830 tree newtype = TREE_TYPE (targ0);
9832 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9833 newtype = TREE_TYPE (targ1);
9835 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9836 return fold_build2 (code, type, fold_convert (newtype, targ0),
9837 fold_convert (newtype, targ1));
9840 return NULL_TREE;
9842 case COMPOUND_EXPR:
9843 /* When pedantic, a compound expression can be neither an lvalue
9844 nor an integer constant expression. */
9845 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9846 return NULL_TREE;
9847 /* Don't let (0, 0) be null pointer constant. */
9848 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9849 : fold_convert (type, arg1);
9850 return pedantic_non_lvalue (tem);
9852 case COMPLEX_EXPR:
9853 if (wins)
9854 return build_complex (type, arg0, arg1);
9855 return NULL_TREE;
9857 case ASSERT_EXPR:
9858 /* An ASSERT_EXPR should never be passed to fold_binary. */
9859 gcc_unreachable ();
9861 default:
9862 return NULL_TREE;
9863 } /* switch (code) */
9866 /* Callback for walk_tree, looking for LABEL_EXPR.
9867 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
9868 Do not check the sub-tree of GOTO_EXPR. */
9870 static tree
9871 contains_label_1 (tree *tp,
9872 int *walk_subtrees,
9873 void *data ATTRIBUTE_UNUSED)
9875 switch (TREE_CODE (*tp))
9877 case LABEL_EXPR:
9878 return *tp;
9879 case GOTO_EXPR:
9880 *walk_subtrees = 0;
9881 /* no break */
9882 default:
9883 return NULL_TREE;
9887 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
9888 accessible from outside the sub-tree. Returns NULL_TREE if no
9889 addressable label is found. */
9891 static bool
9892 contains_label_p (tree st)
9894 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
9897 /* Fold a ternary expression of code CODE and type TYPE with operands
9898 OP0, OP1, and OP2. Return the folded expression if folding is
9899 successful. Otherwise, return NULL_TREE. */
9901 tree
9902 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
9904 tree tem;
9905 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9906 enum tree_code_class kind = TREE_CODE_CLASS (code);
9908 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9909 && TREE_CODE_LENGTH (code) == 3);
9911 /* Strip any conversions that don't change the mode. This is safe
9912 for every expression, except for a comparison expression because
9913 its signedness is derived from its operands. So, in the latter
9914 case, only strip conversions that don't change the signedness.
9916 Note that this is done as an internal manipulation within the
9917 constant folder, in order to find the simplest representation of
9918 the arguments so that their form can be studied. In any cases,
9919 the appropriate type conversions should be put back in the tree
9920 that will get out of the constant folder. */
9921 if (op0)
9923 arg0 = op0;
9924 STRIP_NOPS (arg0);
9927 if (op1)
9929 arg1 = op1;
9930 STRIP_NOPS (arg1);
9933 switch (code)
9935 case COMPONENT_REF:
9936 if (TREE_CODE (arg0) == CONSTRUCTOR
9937 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
9939 unsigned HOST_WIDE_INT idx;
9940 tree field, value;
9941 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
9942 if (field == arg1)
9943 return value;
9945 return NULL_TREE;
9947 case COND_EXPR:
9948 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9949 so all simple results must be passed through pedantic_non_lvalue. */
9950 if (TREE_CODE (arg0) == INTEGER_CST)
9952 tree unused_op = integer_zerop (arg0) ? op1 : op2;
9953 tem = integer_zerop (arg0) ? op2 : op1;
9954 /* Only optimize constant conditions when the selected branch
9955 has the same type as the COND_EXPR. This avoids optimizing
9956 away "c ? x : throw", where the throw has a void type.
9957 Avoid throwing away that operand which contains label. */
9958 if ((!TREE_SIDE_EFFECTS (unused_op)
9959 || !contains_label_p (unused_op))
9960 && (! VOID_TYPE_P (TREE_TYPE (tem))
9961 || VOID_TYPE_P (type)))
9962 return pedantic_non_lvalue (tem);
9963 return NULL_TREE;
9965 if (operand_equal_p (arg1, op2, 0))
9966 return pedantic_omit_one_operand (type, arg1, arg0);
9968 /* If we have A op B ? A : C, we may be able to convert this to a
9969 simpler expression, depending on the operation and the values
9970 of B and C. Signed zeros prevent all of these transformations,
9971 for reasons given above each one.
9973 Also try swapping the arguments and inverting the conditional. */
9974 if (COMPARISON_CLASS_P (arg0)
9975 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9976 arg1, TREE_OPERAND (arg0, 1))
9977 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9979 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
9980 if (tem)
9981 return tem;
9984 if (COMPARISON_CLASS_P (arg0)
9985 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9986 op2,
9987 TREE_OPERAND (arg0, 1))
9988 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
9990 tem = invert_truthvalue (arg0);
9991 if (COMPARISON_CLASS_P (tem))
9993 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
9994 if (tem)
9995 return tem;
9999 /* If the second operand is simpler than the third, swap them
10000 since that produces better jump optimization results. */
10001 if (tree_swap_operands_p (op1, op2, false))
10003 /* See if this can be inverted. If it can't, possibly because
10004 it was a floating-point inequality comparison, don't do
10005 anything. */
10006 tem = invert_truthvalue (arg0);
10008 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10009 return fold_build3 (code, type, tem, op2, op1);
10012 /* Convert A ? 1 : 0 to simply A. */
10013 if (integer_onep (op1)
10014 && integer_zerop (op2)
10015 /* If we try to convert OP0 to our type, the
10016 call to fold will try to move the conversion inside
10017 a COND, which will recurse. In that case, the COND_EXPR
10018 is probably the best choice, so leave it alone. */
10019 && type == TREE_TYPE (arg0))
10020 return pedantic_non_lvalue (arg0);
10022 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10023 over COND_EXPR in cases such as floating point comparisons. */
10024 if (integer_zerop (op1)
10025 && integer_onep (op2)
10026 && truth_value_p (TREE_CODE (arg0)))
10027 return pedantic_non_lvalue (fold_convert (type,
10028 invert_truthvalue (arg0)));
10030 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10031 if (TREE_CODE (arg0) == LT_EXPR
10032 && integer_zerop (TREE_OPERAND (arg0, 1))
10033 && integer_zerop (op2)
10034 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10035 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10036 TREE_TYPE (tem), tem, arg1));
10038 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10039 already handled above. */
10040 if (TREE_CODE (arg0) == BIT_AND_EXPR
10041 && integer_onep (TREE_OPERAND (arg0, 1))
10042 && integer_zerop (op2)
10043 && integer_pow2p (arg1))
10045 tree tem = TREE_OPERAND (arg0, 0);
10046 STRIP_NOPS (tem);
10047 if (TREE_CODE (tem) == RSHIFT_EXPR
10048 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10049 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10050 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10051 return fold_build2 (BIT_AND_EXPR, type,
10052 TREE_OPERAND (tem, 0), arg1);
10055 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10056 is probably obsolete because the first operand should be a
10057 truth value (that's why we have the two cases above), but let's
10058 leave it in until we can confirm this for all front-ends. */
10059 if (integer_zerop (op2)
10060 && TREE_CODE (arg0) == NE_EXPR
10061 && integer_zerop (TREE_OPERAND (arg0, 1))
10062 && integer_pow2p (arg1)
10063 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10064 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10065 arg1, OEP_ONLY_CONST))
10066 return pedantic_non_lvalue (fold_convert (type,
10067 TREE_OPERAND (arg0, 0)));
10069 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10070 if (integer_zerop (op2)
10071 && truth_value_p (TREE_CODE (arg0))
10072 && truth_value_p (TREE_CODE (arg1)))
10073 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10075 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10076 if (integer_onep (op2)
10077 && truth_value_p (TREE_CODE (arg0))
10078 && truth_value_p (TREE_CODE (arg1)))
10080 /* Only perform transformation if ARG0 is easily inverted. */
10081 tem = invert_truthvalue (arg0);
10082 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10083 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10086 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10087 if (integer_zerop (arg1)
10088 && truth_value_p (TREE_CODE (arg0))
10089 && truth_value_p (TREE_CODE (op2)))
10091 /* Only perform transformation if ARG0 is easily inverted. */
10092 tem = invert_truthvalue (arg0);
10093 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10094 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10097 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10098 if (integer_onep (arg1)
10099 && truth_value_p (TREE_CODE (arg0))
10100 && truth_value_p (TREE_CODE (op2)))
10101 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10103 return NULL_TREE;
10105 case CALL_EXPR:
10106 /* Check for a built-in function. */
10107 if (TREE_CODE (op0) == ADDR_EXPR
10108 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10109 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10111 tree fndecl = TREE_OPERAND (op0, 0);
10112 tree arglist = op1;
10113 tree tmp = fold_builtin (fndecl, arglist, false);
10114 if (tmp)
10115 return tmp;
10117 return NULL_TREE;
10119 case BIT_FIELD_REF:
10120 if (TREE_CODE (arg0) == VECTOR_CST
10121 && type == TREE_TYPE (TREE_TYPE (arg0))
10122 && host_integerp (arg1, 1)
10123 && host_integerp (op2, 1))
10125 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10126 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10128 if (width != 0
10129 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10130 && (idx % width) == 0
10131 && (idx = idx / width)
10132 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10134 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10135 while (idx-- > 0 && elements)
10136 elements = TREE_CHAIN (elements);
10137 if (elements)
10138 return TREE_VALUE (elements);
10139 else
10140 return fold_convert (type, integer_zero_node);
10143 return NULL_TREE;
10145 default:
10146 return NULL_TREE;
10147 } /* switch (code) */
10150 /* Perform constant folding and related simplification of EXPR.
10151 The related simplifications include x*1 => x, x*0 => 0, etc.,
10152 and application of the associative law.
10153 NOP_EXPR conversions may be removed freely (as long as we
10154 are careful not to change the type of the overall expression).
10155 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10156 but we can constant-fold them if they have constant operands. */
10158 #ifdef ENABLE_FOLD_CHECKING
10159 # define fold(x) fold_1 (x)
10160 static tree fold_1 (tree);
10161 static
10162 #endif
10163 tree
10164 fold (tree expr)
10166 const tree t = expr;
10167 enum tree_code code = TREE_CODE (t);
10168 enum tree_code_class kind = TREE_CODE_CLASS (code);
10169 tree tem;
10171 /* Return right away if a constant. */
10172 if (kind == tcc_constant)
10173 return t;
10175 if (IS_EXPR_CODE_CLASS (kind))
10177 tree type = TREE_TYPE (t);
10178 tree op0, op1, op2;
10180 switch (TREE_CODE_LENGTH (code))
10182 case 1:
10183 op0 = TREE_OPERAND (t, 0);
10184 tem = fold_unary (code, type, op0);
10185 return tem ? tem : expr;
10186 case 2:
10187 op0 = TREE_OPERAND (t, 0);
10188 op1 = TREE_OPERAND (t, 1);
10189 tem = fold_binary (code, type, op0, op1);
10190 return tem ? tem : expr;
10191 case 3:
10192 op0 = TREE_OPERAND (t, 0);
10193 op1 = TREE_OPERAND (t, 1);
10194 op2 = TREE_OPERAND (t, 2);
10195 tem = fold_ternary (code, type, op0, op1, op2);
10196 return tem ? tem : expr;
10197 default:
10198 break;
10202 switch (code)
10204 case CONST_DECL:
10205 return fold (DECL_INITIAL (t));
10207 default:
10208 return t;
10209 } /* switch (code) */
10212 #ifdef ENABLE_FOLD_CHECKING
10213 #undef fold
10215 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10216 static void fold_check_failed (tree, tree);
10217 void print_fold_checksum (tree);
10219 /* When --enable-checking=fold, compute a digest of expr before
10220 and after actual fold call to see if fold did not accidentally
10221 change original expr. */
10223 tree
10224 fold (tree expr)
10226 tree ret;
10227 struct md5_ctx ctx;
10228 unsigned char checksum_before[16], checksum_after[16];
10229 htab_t ht;
10231 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10232 md5_init_ctx (&ctx);
10233 fold_checksum_tree (expr, &ctx, ht);
10234 md5_finish_ctx (&ctx, checksum_before);
10235 htab_empty (ht);
10237 ret = fold_1 (expr);
10239 md5_init_ctx (&ctx);
10240 fold_checksum_tree (expr, &ctx, ht);
10241 md5_finish_ctx (&ctx, checksum_after);
10242 htab_delete (ht);
10244 if (memcmp (checksum_before, checksum_after, 16))
10245 fold_check_failed (expr, ret);
10247 return ret;
10250 void
10251 print_fold_checksum (tree expr)
10253 struct md5_ctx ctx;
10254 unsigned char checksum[16], cnt;
10255 htab_t ht;
10257 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10258 md5_init_ctx (&ctx);
10259 fold_checksum_tree (expr, &ctx, ht);
10260 md5_finish_ctx (&ctx, checksum);
10261 htab_delete (ht);
10262 for (cnt = 0; cnt < 16; ++cnt)
10263 fprintf (stderr, "%02x", checksum[cnt]);
10264 putc ('\n', stderr);
10267 static void
10268 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10270 internal_error ("fold check: original tree changed by fold");
10273 static void
10274 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10276 void **slot;
10277 enum tree_code code;
10278 char buf[sizeof (struct tree_decl_non_common)];
10279 int i, len;
10281 recursive_label:
10283 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10284 <= sizeof (struct tree_decl_non_common))
10285 && sizeof (struct tree_type) <= sizeof (struct tree_decl_non_common));
10286 if (expr == NULL)
10287 return;
10288 slot = htab_find_slot (ht, expr, INSERT);
10289 if (*slot != NULL)
10290 return;
10291 *slot = expr;
10292 code = TREE_CODE (expr);
10293 if (TREE_CODE_CLASS (code) == tcc_declaration
10294 && DECL_ASSEMBLER_NAME_SET_P (expr))
10296 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10297 memcpy (buf, expr, tree_size (expr));
10298 expr = (tree) buf;
10299 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10301 else if (TREE_CODE_CLASS (code) == tcc_type
10302 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10303 || TYPE_CACHED_VALUES_P (expr)
10304 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10306 /* Allow these fields to be modified. */
10307 memcpy (buf, expr, tree_size (expr));
10308 expr = (tree) buf;
10309 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10310 TYPE_POINTER_TO (expr) = NULL;
10311 TYPE_REFERENCE_TO (expr) = NULL;
10312 if (TYPE_CACHED_VALUES_P (expr))
10314 TYPE_CACHED_VALUES_P (expr) = 0;
10315 TYPE_CACHED_VALUES (expr) = NULL;
10318 md5_process_bytes (expr, tree_size (expr), ctx);
10319 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10320 if (TREE_CODE_CLASS (code) != tcc_type
10321 && TREE_CODE_CLASS (code) != tcc_declaration
10322 && code != TREE_LIST)
10323 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10324 switch (TREE_CODE_CLASS (code))
10326 case tcc_constant:
10327 switch (code)
10329 case STRING_CST:
10330 md5_process_bytes (TREE_STRING_POINTER (expr),
10331 TREE_STRING_LENGTH (expr), ctx);
10332 break;
10333 case COMPLEX_CST:
10334 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10335 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10336 break;
10337 case VECTOR_CST:
10338 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10339 break;
10340 default:
10341 break;
10343 break;
10344 case tcc_exceptional:
10345 switch (code)
10347 case TREE_LIST:
10348 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10349 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10350 expr = TREE_CHAIN (expr);
10351 goto recursive_label;
10352 break;
10353 case TREE_VEC:
10354 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10355 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10356 break;
10357 default:
10358 break;
10360 break;
10361 case tcc_expression:
10362 case tcc_reference:
10363 case tcc_comparison:
10364 case tcc_unary:
10365 case tcc_binary:
10366 case tcc_statement:
10367 len = TREE_CODE_LENGTH (code);
10368 for (i = 0; i < len; ++i)
10369 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10370 break;
10371 case tcc_declaration:
10372 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10373 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10374 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10375 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10376 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
10377 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10378 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10379 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10380 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10381 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10382 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10383 break;
10384 case tcc_type:
10385 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10386 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10387 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10388 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10389 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10390 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10391 if (INTEGRAL_TYPE_P (expr)
10392 || SCALAR_FLOAT_TYPE_P (expr))
10394 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10395 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10397 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10398 if (TREE_CODE (expr) == RECORD_TYPE
10399 || TREE_CODE (expr) == UNION_TYPE
10400 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10401 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10402 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10403 break;
10404 default:
10405 break;
10409 #endif
10411 /* Fold a unary tree expression with code CODE of type TYPE with an
10412 operand OP0. Return a folded expression if successful. Otherwise,
10413 return a tree expression with code CODE of type TYPE with an
10414 operand OP0. */
10416 tree
10417 fold_build1 (enum tree_code code, tree type, tree op0)
10419 tree tem;
10420 #ifdef ENABLE_FOLD_CHECKING
10421 unsigned char checksum_before[16], checksum_after[16];
10422 struct md5_ctx ctx;
10423 htab_t ht;
10425 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10426 md5_init_ctx (&ctx);
10427 fold_checksum_tree (op0, &ctx, ht);
10428 md5_finish_ctx (&ctx, checksum_before);
10429 htab_empty (ht);
10430 #endif
10432 tem = fold_unary (code, type, op0);
10433 if (!tem)
10434 tem = build1 (code, type, op0);
10436 #ifdef ENABLE_FOLD_CHECKING
10437 md5_init_ctx (&ctx);
10438 fold_checksum_tree (op0, &ctx, ht);
10439 md5_finish_ctx (&ctx, checksum_after);
10440 htab_delete (ht);
10442 if (memcmp (checksum_before, checksum_after, 16))
10443 fold_check_failed (op0, tem);
10444 #endif
10445 return tem;
10448 /* Fold a binary tree expression with code CODE of type TYPE with
10449 operands OP0 and OP1. Return a folded expression if successful.
10450 Otherwise, return a tree expression with code CODE of type TYPE
10451 with operands OP0 and OP1. */
10453 tree
10454 fold_build2 (enum tree_code code, tree type, tree op0, tree op1)
10456 tree tem;
10457 #ifdef ENABLE_FOLD_CHECKING
10458 unsigned char checksum_before_op0[16],
10459 checksum_before_op1[16],
10460 checksum_after_op0[16],
10461 checksum_after_op1[16];
10462 struct md5_ctx ctx;
10463 htab_t ht;
10465 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10466 md5_init_ctx (&ctx);
10467 fold_checksum_tree (op0, &ctx, ht);
10468 md5_finish_ctx (&ctx, checksum_before_op0);
10469 htab_empty (ht);
10471 md5_init_ctx (&ctx);
10472 fold_checksum_tree (op1, &ctx, ht);
10473 md5_finish_ctx (&ctx, checksum_before_op1);
10474 htab_empty (ht);
10475 #endif
10477 tem = fold_binary (code, type, op0, op1);
10478 if (!tem)
10479 tem = build2 (code, type, op0, op1);
10481 #ifdef ENABLE_FOLD_CHECKING
10482 md5_init_ctx (&ctx);
10483 fold_checksum_tree (op0, &ctx, ht);
10484 md5_finish_ctx (&ctx, checksum_after_op0);
10485 htab_empty (ht);
10487 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10488 fold_check_failed (op0, tem);
10490 md5_init_ctx (&ctx);
10491 fold_checksum_tree (op1, &ctx, ht);
10492 md5_finish_ctx (&ctx, checksum_after_op1);
10493 htab_delete (ht);
10495 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10496 fold_check_failed (op1, tem);
10497 #endif
10498 return tem;
10501 /* Fold a ternary tree expression with code CODE of type TYPE with
10502 operands OP0, OP1, and OP2. Return a folded expression if
10503 successful. Otherwise, return a tree expression with code CODE of
10504 type TYPE with operands OP0, OP1, and OP2. */
10506 tree
10507 fold_build3 (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10508 { tree tem;
10509 #ifdef ENABLE_FOLD_CHECKING
10510 unsigned char checksum_before_op0[16],
10511 checksum_before_op1[16],
10512 checksum_before_op2[16],
10513 checksum_after_op0[16],
10514 checksum_after_op1[16],
10515 checksum_after_op2[16];
10516 struct md5_ctx ctx;
10517 htab_t ht;
10519 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10520 md5_init_ctx (&ctx);
10521 fold_checksum_tree (op0, &ctx, ht);
10522 md5_finish_ctx (&ctx, checksum_before_op0);
10523 htab_empty (ht);
10525 md5_init_ctx (&ctx);
10526 fold_checksum_tree (op1, &ctx, ht);
10527 md5_finish_ctx (&ctx, checksum_before_op1);
10528 htab_empty (ht);
10530 md5_init_ctx (&ctx);
10531 fold_checksum_tree (op2, &ctx, ht);
10532 md5_finish_ctx (&ctx, checksum_before_op2);
10533 htab_empty (ht);
10534 #endif
10536 tem = fold_ternary (code, type, op0, op1, op2);
10537 if (!tem)
10538 tem = build3 (code, type, op0, op1, op2);
10540 #ifdef ENABLE_FOLD_CHECKING
10541 md5_init_ctx (&ctx);
10542 fold_checksum_tree (op0, &ctx, ht);
10543 md5_finish_ctx (&ctx, checksum_after_op0);
10544 htab_empty (ht);
10546 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10547 fold_check_failed (op0, tem);
10549 md5_init_ctx (&ctx);
10550 fold_checksum_tree (op1, &ctx, ht);
10551 md5_finish_ctx (&ctx, checksum_after_op1);
10552 htab_empty (ht);
10554 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10555 fold_check_failed (op1, tem);
10557 md5_init_ctx (&ctx);
10558 fold_checksum_tree (op2, &ctx, ht);
10559 md5_finish_ctx (&ctx, checksum_after_op2);
10560 htab_delete (ht);
10562 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
10563 fold_check_failed (op2, tem);
10564 #endif
10565 return tem;
10568 /* Perform constant folding and related simplification of initializer
10569 expression EXPR. This behaves identically to "fold" but ignores
10570 potential run-time traps and exceptions that fold must preserve. */
10572 tree
10573 fold_initializer (tree expr)
10575 int saved_signaling_nans = flag_signaling_nans;
10576 int saved_trapping_math = flag_trapping_math;
10577 int saved_rounding_math = flag_rounding_math;
10578 int saved_trapv = flag_trapv;
10579 tree result;
10581 flag_signaling_nans = 0;
10582 flag_trapping_math = 0;
10583 flag_rounding_math = 0;
10584 flag_trapv = 0;
10586 result = fold (expr);
10588 flag_signaling_nans = saved_signaling_nans;
10589 flag_trapping_math = saved_trapping_math;
10590 flag_rounding_math = saved_rounding_math;
10591 flag_trapv = saved_trapv;
10593 return result;
10596 /* Determine if first argument is a multiple of second argument. Return 0 if
10597 it is not, or we cannot easily determined it to be.
10599 An example of the sort of thing we care about (at this point; this routine
10600 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10601 fold cases do now) is discovering that
10603 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10605 is a multiple of
10607 SAVE_EXPR (J * 8)
10609 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10611 This code also handles discovering that
10613 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10615 is a multiple of 8 so we don't have to worry about dealing with a
10616 possible remainder.
10618 Note that we *look* inside a SAVE_EXPR only to determine how it was
10619 calculated; it is not safe for fold to do much of anything else with the
10620 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10621 at run time. For example, the latter example above *cannot* be implemented
10622 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10623 evaluation time of the original SAVE_EXPR is not necessarily the same at
10624 the time the new expression is evaluated. The only optimization of this
10625 sort that would be valid is changing
10627 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10629 divided by 8 to
10631 SAVE_EXPR (I) * SAVE_EXPR (J)
10633 (where the same SAVE_EXPR (J) is used in the original and the
10634 transformed version). */
10636 static int
10637 multiple_of_p (tree type, tree top, tree bottom)
10639 if (operand_equal_p (top, bottom, 0))
10640 return 1;
10642 if (TREE_CODE (type) != INTEGER_TYPE)
10643 return 0;
10645 switch (TREE_CODE (top))
10647 case BIT_AND_EXPR:
10648 /* Bitwise and provides a power of two multiple. If the mask is
10649 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10650 if (!integer_pow2p (bottom))
10651 return 0;
10652 /* FALLTHRU */
10654 case MULT_EXPR:
10655 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10656 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10658 case PLUS_EXPR:
10659 case MINUS_EXPR:
10660 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10661 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10663 case LSHIFT_EXPR:
10664 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10666 tree op1, t1;
10668 op1 = TREE_OPERAND (top, 1);
10669 /* const_binop may not detect overflow correctly,
10670 so check for it explicitly here. */
10671 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10672 > TREE_INT_CST_LOW (op1)
10673 && TREE_INT_CST_HIGH (op1) == 0
10674 && 0 != (t1 = fold_convert (type,
10675 const_binop (LSHIFT_EXPR,
10676 size_one_node,
10677 op1, 0)))
10678 && ! TREE_OVERFLOW (t1))
10679 return multiple_of_p (type, t1, bottom);
10681 return 0;
10683 case NOP_EXPR:
10684 /* Can't handle conversions from non-integral or wider integral type. */
10685 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10686 || (TYPE_PRECISION (type)
10687 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10688 return 0;
10690 /* .. fall through ... */
10692 case SAVE_EXPR:
10693 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10695 case INTEGER_CST:
10696 if (TREE_CODE (bottom) != INTEGER_CST
10697 || (TYPE_UNSIGNED (type)
10698 && (tree_int_cst_sgn (top) < 0
10699 || tree_int_cst_sgn (bottom) < 0)))
10700 return 0;
10701 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10702 top, bottom, 0));
10704 default:
10705 return 0;
10709 /* Return true if `t' is known to be non-negative. */
10712 tree_expr_nonnegative_p (tree t)
10714 if (TYPE_UNSIGNED (TREE_TYPE (t)))
10715 return 1;
10717 switch (TREE_CODE (t))
10719 case ABS_EXPR:
10720 /* We can't return 1 if flag_wrapv is set because
10721 ABS_EXPR<INT_MIN> = INT_MIN. */
10722 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
10723 return 1;
10724 break;
10726 case INTEGER_CST:
10727 return tree_int_cst_sgn (t) >= 0;
10729 case REAL_CST:
10730 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10732 case PLUS_EXPR:
10733 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10734 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10735 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10737 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10738 both unsigned and at least 2 bits shorter than the result. */
10739 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10740 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10741 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10743 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10744 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10745 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10746 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10748 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10749 TYPE_PRECISION (inner2)) + 1;
10750 return prec < TYPE_PRECISION (TREE_TYPE (t));
10753 break;
10755 case MULT_EXPR:
10756 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10758 /* x * x for floating point x is always non-negative. */
10759 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10760 return 1;
10761 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10762 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10765 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10766 both unsigned and their total bits is shorter than the result. */
10767 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10768 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10769 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10771 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10772 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10773 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10774 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10775 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10776 < TYPE_PRECISION (TREE_TYPE (t));
10778 return 0;
10780 case TRUNC_DIV_EXPR:
10781 case CEIL_DIV_EXPR:
10782 case FLOOR_DIV_EXPR:
10783 case ROUND_DIV_EXPR:
10784 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10785 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10787 case TRUNC_MOD_EXPR:
10788 case CEIL_MOD_EXPR:
10789 case FLOOR_MOD_EXPR:
10790 case ROUND_MOD_EXPR:
10791 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10793 case RDIV_EXPR:
10794 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10795 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10797 case BIT_AND_EXPR:
10798 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10799 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10800 case BIT_IOR_EXPR:
10801 case BIT_XOR_EXPR:
10802 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10803 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10805 case NOP_EXPR:
10807 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10808 tree outer_type = TREE_TYPE (t);
10810 if (TREE_CODE (outer_type) == REAL_TYPE)
10812 if (TREE_CODE (inner_type) == REAL_TYPE)
10813 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10814 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10816 if (TYPE_UNSIGNED (inner_type))
10817 return 1;
10818 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10821 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10823 if (TREE_CODE (inner_type) == REAL_TYPE)
10824 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10825 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10826 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10827 && TYPE_UNSIGNED (inner_type);
10830 break;
10832 case COND_EXPR:
10833 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10834 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10835 case COMPOUND_EXPR:
10836 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10837 case MIN_EXPR:
10838 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10839 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10840 case MAX_EXPR:
10841 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10842 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10843 case MODIFY_EXPR:
10844 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10845 case BIND_EXPR:
10846 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10847 case SAVE_EXPR:
10848 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10849 case NON_LVALUE_EXPR:
10850 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10851 case FLOAT_EXPR:
10852 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10854 case TARGET_EXPR:
10856 tree temp = TARGET_EXPR_SLOT (t);
10857 t = TARGET_EXPR_INITIAL (t);
10859 /* If the initializer is non-void, then it's a normal expression
10860 that will be assigned to the slot. */
10861 if (!VOID_TYPE_P (t))
10862 return tree_expr_nonnegative_p (t);
10864 /* Otherwise, the initializer sets the slot in some way. One common
10865 way is an assignment statement at the end of the initializer. */
10866 while (1)
10868 if (TREE_CODE (t) == BIND_EXPR)
10869 t = expr_last (BIND_EXPR_BODY (t));
10870 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10871 || TREE_CODE (t) == TRY_CATCH_EXPR)
10872 t = expr_last (TREE_OPERAND (t, 0));
10873 else if (TREE_CODE (t) == STATEMENT_LIST)
10874 t = expr_last (t);
10875 else
10876 break;
10878 if (TREE_CODE (t) == MODIFY_EXPR
10879 && TREE_OPERAND (t, 0) == temp)
10880 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10882 return 0;
10885 case CALL_EXPR:
10887 tree fndecl = get_callee_fndecl (t);
10888 tree arglist = TREE_OPERAND (t, 1);
10889 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10890 switch (DECL_FUNCTION_CODE (fndecl))
10892 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10893 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10894 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10895 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10897 CASE_BUILTIN_F (BUILT_IN_ACOS)
10898 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10899 CASE_BUILTIN_F (BUILT_IN_CABS)
10900 CASE_BUILTIN_F (BUILT_IN_COSH)
10901 CASE_BUILTIN_F (BUILT_IN_ERFC)
10902 CASE_BUILTIN_F (BUILT_IN_EXP)
10903 CASE_BUILTIN_F (BUILT_IN_EXP10)
10904 CASE_BUILTIN_F (BUILT_IN_EXP2)
10905 CASE_BUILTIN_F (BUILT_IN_FABS)
10906 CASE_BUILTIN_F (BUILT_IN_FDIM)
10907 CASE_BUILTIN_F (BUILT_IN_FREXP)
10908 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10909 CASE_BUILTIN_F (BUILT_IN_POW10)
10910 CASE_BUILTIN_I (BUILT_IN_FFS)
10911 CASE_BUILTIN_I (BUILT_IN_PARITY)
10912 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10913 /* Always true. */
10914 return 1;
10916 CASE_BUILTIN_F (BUILT_IN_SQRT)
10917 /* sqrt(-0.0) is -0.0. */
10918 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10919 return 1;
10920 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10922 CASE_BUILTIN_F (BUILT_IN_ASINH)
10923 CASE_BUILTIN_F (BUILT_IN_ATAN)
10924 CASE_BUILTIN_F (BUILT_IN_ATANH)
10925 CASE_BUILTIN_F (BUILT_IN_CBRT)
10926 CASE_BUILTIN_F (BUILT_IN_CEIL)
10927 CASE_BUILTIN_F (BUILT_IN_ERF)
10928 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10929 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10930 CASE_BUILTIN_F (BUILT_IN_FMOD)
10931 CASE_BUILTIN_F (BUILT_IN_LCEIL)
10932 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10933 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
10934 CASE_BUILTIN_F (BUILT_IN_LLCEIL)
10935 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
10936 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10937 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10938 CASE_BUILTIN_F (BUILT_IN_LRINT)
10939 CASE_BUILTIN_F (BUILT_IN_LROUND)
10940 CASE_BUILTIN_F (BUILT_IN_MODF)
10941 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
10942 CASE_BUILTIN_F (BUILT_IN_POW)
10943 CASE_BUILTIN_F (BUILT_IN_RINT)
10944 CASE_BUILTIN_F (BUILT_IN_ROUND)
10945 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
10946 CASE_BUILTIN_F (BUILT_IN_SINH)
10947 CASE_BUILTIN_F (BUILT_IN_TANH)
10948 CASE_BUILTIN_F (BUILT_IN_TRUNC)
10949 /* True if the 1st argument is nonnegative. */
10950 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10952 CASE_BUILTIN_F (BUILT_IN_FMAX)
10953 /* True if the 1st OR 2nd arguments are nonnegative. */
10954 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10955 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10957 CASE_BUILTIN_F (BUILT_IN_FMIN)
10958 /* True if the 1st AND 2nd arguments are nonnegative. */
10959 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10960 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10962 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
10963 /* True if the 2nd argument is nonnegative. */
10964 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10966 default:
10967 break;
10968 #undef CASE_BUILTIN_F
10969 #undef CASE_BUILTIN_I
10973 /* ... fall through ... */
10975 default:
10976 if (truth_value_p (TREE_CODE (t)))
10977 /* Truth values evaluate to 0 or 1, which is nonnegative. */
10978 return 1;
10981 /* We don't know sign of `t', so be conservative and return false. */
10982 return 0;
10985 /* Return true when T is an address and is known to be nonzero.
10986 For floating point we further ensure that T is not denormal.
10987 Similar logic is present in nonzero_address in rtlanal.h. */
10989 bool
10990 tree_expr_nonzero_p (tree t)
10992 tree type = TREE_TYPE (t);
10994 /* Doing something useful for floating point would need more work. */
10995 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10996 return false;
10998 switch (TREE_CODE (t))
11000 case ABS_EXPR:
11001 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11003 case INTEGER_CST:
11004 /* We used to test for !integer_zerop here. This does not work correctly
11005 if TREE_CONSTANT_OVERFLOW (t). */
11006 return (TREE_INT_CST_LOW (t) != 0
11007 || TREE_INT_CST_HIGH (t) != 0);
11009 case PLUS_EXPR:
11010 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11012 /* With the presence of negative values it is hard
11013 to say something. */
11014 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11015 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11016 return false;
11017 /* One of operands must be positive and the other non-negative. */
11018 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11019 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11021 break;
11023 case MULT_EXPR:
11024 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11026 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11027 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11029 break;
11031 case NOP_EXPR:
11033 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11034 tree outer_type = TREE_TYPE (t);
11036 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11037 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11039 break;
11041 case ADDR_EXPR:
11043 tree base = get_base_address (TREE_OPERAND (t, 0));
11045 if (!base)
11046 return false;
11048 /* Weak declarations may link to NULL. */
11049 if (VAR_OR_FUNCTION_DECL_P (base))
11050 return !DECL_WEAK (base);
11052 /* Constants are never weak. */
11053 if (CONSTANT_CLASS_P (base))
11054 return true;
11056 return false;
11059 case COND_EXPR:
11060 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11061 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11063 case MIN_EXPR:
11064 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11065 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11067 case MAX_EXPR:
11068 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11070 /* When both operands are nonzero, then MAX must be too. */
11071 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11072 return true;
11074 /* MAX where operand 0 is positive is positive. */
11075 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11077 /* MAX where operand 1 is positive is positive. */
11078 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11079 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11080 return true;
11081 break;
11083 case COMPOUND_EXPR:
11084 case MODIFY_EXPR:
11085 case BIND_EXPR:
11086 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11088 case SAVE_EXPR:
11089 case NON_LVALUE_EXPR:
11090 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11092 case BIT_IOR_EXPR:
11093 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11094 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11096 case CALL_EXPR:
11097 return alloca_call_p (t);
11099 default:
11100 break;
11102 return false;
11105 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11106 attempt to fold the expression to a constant without modifying TYPE,
11107 OP0 or OP1.
11109 If the expression could be simplified to a constant, then return
11110 the constant. If the expression would not be simplified to a
11111 constant, then return NULL_TREE. */
11113 tree
11114 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11116 tree tem = fold_binary (code, type, op0, op1);
11117 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11120 /* Given the components of a unary expression CODE, TYPE and OP0,
11121 attempt to fold the expression to a constant without modifying
11122 TYPE or OP0.
11124 If the expression could be simplified to a constant, then return
11125 the constant. If the expression would not be simplified to a
11126 constant, then return NULL_TREE. */
11128 tree
11129 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11131 tree tem = fold_unary (code, type, op0);
11132 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11135 /* If EXP represents referencing an element in a constant string
11136 (either via pointer arithmetic or array indexing), return the
11137 tree representing the value accessed, otherwise return NULL. */
11139 tree
11140 fold_read_from_constant_string (tree exp)
11142 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11144 tree exp1 = TREE_OPERAND (exp, 0);
11145 tree index;
11146 tree string;
11148 if (TREE_CODE (exp) == INDIRECT_REF)
11149 string = string_constant (exp1, &index);
11150 else
11152 tree low_bound = array_ref_low_bound (exp);
11153 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11155 /* Optimize the special-case of a zero lower bound.
11157 We convert the low_bound to sizetype to avoid some problems
11158 with constant folding. (E.g. suppose the lower bound is 1,
11159 and its mode is QI. Without the conversion,l (ARRAY
11160 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11161 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11162 if (! integer_zerop (low_bound))
11163 index = size_diffop (index, fold_convert (sizetype, low_bound));
11165 string = exp1;
11168 if (string
11169 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11170 && TREE_CODE (string) == STRING_CST
11171 && TREE_CODE (index) == INTEGER_CST
11172 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11173 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11174 == MODE_INT)
11175 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11176 return fold_convert (TREE_TYPE (exp),
11177 build_int_cst (NULL_TREE,
11178 (TREE_STRING_POINTER (string)
11179 [TREE_INT_CST_LOW (index)])));
11181 return NULL;
11184 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11185 an integer constant or real constant.
11187 TYPE is the type of the result. */
11189 static tree
11190 fold_negate_const (tree arg0, tree type)
11192 tree t = NULL_TREE;
11194 switch (TREE_CODE (arg0))
11196 case INTEGER_CST:
11198 unsigned HOST_WIDE_INT low;
11199 HOST_WIDE_INT high;
11200 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11201 TREE_INT_CST_HIGH (arg0),
11202 &low, &high);
11203 t = build_int_cst_wide (type, low, high);
11204 t = force_fit_type (t, 1,
11205 (overflow | TREE_OVERFLOW (arg0))
11206 && !TYPE_UNSIGNED (type),
11207 TREE_CONSTANT_OVERFLOW (arg0));
11208 break;
11211 case REAL_CST:
11212 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11213 break;
11215 default:
11216 gcc_unreachable ();
11219 return t;
11222 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11223 an integer constant or real constant.
11225 TYPE is the type of the result. */
11227 tree
11228 fold_abs_const (tree arg0, tree type)
11230 tree t = NULL_TREE;
11232 switch (TREE_CODE (arg0))
11234 case INTEGER_CST:
11235 /* If the value is unsigned, then the absolute value is
11236 the same as the ordinary value. */
11237 if (TYPE_UNSIGNED (type))
11238 t = arg0;
11239 /* Similarly, if the value is non-negative. */
11240 else if (INT_CST_LT (integer_minus_one_node, arg0))
11241 t = arg0;
11242 /* If the value is negative, then the absolute value is
11243 its negation. */
11244 else
11246 unsigned HOST_WIDE_INT low;
11247 HOST_WIDE_INT high;
11248 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11249 TREE_INT_CST_HIGH (arg0),
11250 &low, &high);
11251 t = build_int_cst_wide (type, low, high);
11252 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11253 TREE_CONSTANT_OVERFLOW (arg0));
11255 break;
11257 case REAL_CST:
11258 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11259 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11260 else
11261 t = arg0;
11262 break;
11264 default:
11265 gcc_unreachable ();
11268 return t;
11271 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11272 constant. TYPE is the type of the result. */
11274 static tree
11275 fold_not_const (tree arg0, tree type)
11277 tree t = NULL_TREE;
11279 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11281 t = build_int_cst_wide (type,
11282 ~ TREE_INT_CST_LOW (arg0),
11283 ~ TREE_INT_CST_HIGH (arg0));
11284 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11285 TREE_CONSTANT_OVERFLOW (arg0));
11287 return t;
11290 /* Given CODE, a relational operator, the target type, TYPE and two
11291 constant operands OP0 and OP1, return the result of the
11292 relational operation. If the result is not a compile time
11293 constant, then return NULL_TREE. */
11295 static tree
11296 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11298 int result, invert;
11300 /* From here on, the only cases we handle are when the result is
11301 known to be a constant. */
11303 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11305 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11306 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11308 /* Handle the cases where either operand is a NaN. */
11309 if (real_isnan (c0) || real_isnan (c1))
11311 switch (code)
11313 case EQ_EXPR:
11314 case ORDERED_EXPR:
11315 result = 0;
11316 break;
11318 case NE_EXPR:
11319 case UNORDERED_EXPR:
11320 case UNLT_EXPR:
11321 case UNLE_EXPR:
11322 case UNGT_EXPR:
11323 case UNGE_EXPR:
11324 case UNEQ_EXPR:
11325 result = 1;
11326 break;
11328 case LT_EXPR:
11329 case LE_EXPR:
11330 case GT_EXPR:
11331 case GE_EXPR:
11332 case LTGT_EXPR:
11333 if (flag_trapping_math)
11334 return NULL_TREE;
11335 result = 0;
11336 break;
11338 default:
11339 gcc_unreachable ();
11342 return constant_boolean_node (result, type);
11345 return constant_boolean_node (real_compare (code, c0, c1), type);
11348 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11350 To compute GT, swap the arguments and do LT.
11351 To compute GE, do LT and invert the result.
11352 To compute LE, swap the arguments, do LT and invert the result.
11353 To compute NE, do EQ and invert the result.
11355 Therefore, the code below must handle only EQ and LT. */
11357 if (code == LE_EXPR || code == GT_EXPR)
11359 tree tem = op0;
11360 op0 = op1;
11361 op1 = tem;
11362 code = swap_tree_comparison (code);
11365 /* Note that it is safe to invert for real values here because we
11366 have already handled the one case that it matters. */
11368 invert = 0;
11369 if (code == NE_EXPR || code == GE_EXPR)
11371 invert = 1;
11372 code = invert_tree_comparison (code, false);
11375 /* Compute a result for LT or EQ if args permit;
11376 Otherwise return T. */
11377 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11379 if (code == EQ_EXPR)
11380 result = tree_int_cst_equal (op0, op1);
11381 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11382 result = INT_CST_LT_UNSIGNED (op0, op1);
11383 else
11384 result = INT_CST_LT (op0, op1);
11386 else
11387 return NULL_TREE;
11389 if (invert)
11390 result ^= 1;
11391 return constant_boolean_node (result, type);
11394 /* Build an expression for the a clean point containing EXPR with type TYPE.
11395 Don't build a cleanup point expression for EXPR which don't have side
11396 effects. */
11398 tree
11399 fold_build_cleanup_point_expr (tree type, tree expr)
11401 /* If the expression does not have side effects then we don't have to wrap
11402 it with a cleanup point expression. */
11403 if (!TREE_SIDE_EFFECTS (expr))
11404 return expr;
11406 /* If the expression is a return, check to see if the expression inside the
11407 return has no side effects or the right hand side of the modify expression
11408 inside the return. If either don't have side effects set we don't need to
11409 wrap the expression in a cleanup point expression. Note we don't check the
11410 left hand side of the modify because it should always be a return decl. */
11411 if (TREE_CODE (expr) == RETURN_EXPR)
11413 tree op = TREE_OPERAND (expr, 0);
11414 if (!op || !TREE_SIDE_EFFECTS (op))
11415 return expr;
11416 op = TREE_OPERAND (op, 1);
11417 if (!TREE_SIDE_EFFECTS (op))
11418 return expr;
11421 return build1 (CLEANUP_POINT_EXPR, type, expr);
11424 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11425 avoid confusing the gimplify process. */
11427 tree
11428 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11430 /* The size of the object is not relevant when talking about its address. */
11431 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11432 t = TREE_OPERAND (t, 0);
11434 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11435 if (TREE_CODE (t) == INDIRECT_REF
11436 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11438 t = TREE_OPERAND (t, 0);
11439 if (TREE_TYPE (t) != ptrtype)
11440 t = build1 (NOP_EXPR, ptrtype, t);
11442 else
11444 tree base = t;
11446 while (handled_component_p (base))
11447 base = TREE_OPERAND (base, 0);
11448 if (DECL_P (base))
11449 TREE_ADDRESSABLE (base) = 1;
11451 t = build1 (ADDR_EXPR, ptrtype, t);
11454 return t;
11457 tree
11458 build_fold_addr_expr (tree t)
11460 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11463 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11464 of an indirection through OP0, or NULL_TREE if no simplification is
11465 possible. */
11467 tree
11468 fold_indirect_ref_1 (tree type, tree op0)
11470 tree sub = op0;
11471 tree subtype;
11473 STRIP_NOPS (sub);
11474 subtype = TREE_TYPE (sub);
11475 if (!POINTER_TYPE_P (subtype))
11476 return NULL_TREE;
11478 if (TREE_CODE (sub) == ADDR_EXPR)
11480 tree op = TREE_OPERAND (sub, 0);
11481 tree optype = TREE_TYPE (op);
11482 /* *&p => p */
11483 if (type == optype)
11484 return op;
11485 /* *(foo *)&fooarray => fooarray[0] */
11486 else if (TREE_CODE (optype) == ARRAY_TYPE
11487 && type == TREE_TYPE (optype))
11489 tree type_domain = TYPE_DOMAIN (optype);
11490 tree min_val = size_zero_node;
11491 if (type_domain && TYPE_MIN_VALUE (type_domain))
11492 min_val = TYPE_MIN_VALUE (type_domain);
11493 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11497 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11498 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11499 && type == TREE_TYPE (TREE_TYPE (subtype)))
11501 tree type_domain;
11502 tree min_val = size_zero_node;
11503 sub = build_fold_indirect_ref (sub);
11504 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11505 if (type_domain && TYPE_MIN_VALUE (type_domain))
11506 min_val = TYPE_MIN_VALUE (type_domain);
11507 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11510 return NULL_TREE;
11513 /* Builds an expression for an indirection through T, simplifying some
11514 cases. */
11516 tree
11517 build_fold_indirect_ref (tree t)
11519 tree type = TREE_TYPE (TREE_TYPE (t));
11520 tree sub = fold_indirect_ref_1 (type, t);
11522 if (sub)
11523 return sub;
11524 else
11525 return build1 (INDIRECT_REF, type, t);
11528 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11530 tree
11531 fold_indirect_ref (tree t)
11533 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11535 if (sub)
11536 return sub;
11537 else
11538 return t;
11541 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11542 whose result is ignored. The type of the returned tree need not be
11543 the same as the original expression. */
11545 tree
11546 fold_ignored_result (tree t)
11548 if (!TREE_SIDE_EFFECTS (t))
11549 return integer_zero_node;
11551 for (;;)
11552 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11554 case tcc_unary:
11555 t = TREE_OPERAND (t, 0);
11556 break;
11558 case tcc_binary:
11559 case tcc_comparison:
11560 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11561 t = TREE_OPERAND (t, 0);
11562 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11563 t = TREE_OPERAND (t, 1);
11564 else
11565 return t;
11566 break;
11568 case tcc_expression:
11569 switch (TREE_CODE (t))
11571 case COMPOUND_EXPR:
11572 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11573 return t;
11574 t = TREE_OPERAND (t, 0);
11575 break;
11577 case COND_EXPR:
11578 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11579 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11580 return t;
11581 t = TREE_OPERAND (t, 0);
11582 break;
11584 default:
11585 return t;
11587 break;
11589 default:
11590 return t;
11594 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11595 This can only be applied to objects of a sizetype. */
11597 tree
11598 round_up (tree value, int divisor)
11600 tree div = NULL_TREE;
11602 gcc_assert (divisor > 0);
11603 if (divisor == 1)
11604 return value;
11606 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11607 have to do anything. Only do this when we are not given a const,
11608 because in that case, this check is more expensive than just
11609 doing it. */
11610 if (TREE_CODE (value) != INTEGER_CST)
11612 div = build_int_cst (TREE_TYPE (value), divisor);
11614 if (multiple_of_p (TREE_TYPE (value), value, div))
11615 return value;
11618 /* If divisor is a power of two, simplify this to bit manipulation. */
11619 if (divisor == (divisor & -divisor))
11621 tree t;
11623 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11624 value = size_binop (PLUS_EXPR, value, t);
11625 t = build_int_cst (TREE_TYPE (value), -divisor);
11626 value = size_binop (BIT_AND_EXPR, value, t);
11628 else
11630 if (!div)
11631 div = build_int_cst (TREE_TYPE (value), divisor);
11632 value = size_binop (CEIL_DIV_EXPR, value, div);
11633 value = size_binop (MULT_EXPR, value, div);
11636 return value;
11639 /* Likewise, but round down. */
11641 tree
11642 round_down (tree value, int divisor)
11644 tree div = NULL_TREE;
11646 gcc_assert (divisor > 0);
11647 if (divisor == 1)
11648 return value;
11650 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11651 have to do anything. Only do this when we are not given a const,
11652 because in that case, this check is more expensive than just
11653 doing it. */
11654 if (TREE_CODE (value) != INTEGER_CST)
11656 div = build_int_cst (TREE_TYPE (value), divisor);
11658 if (multiple_of_p (TREE_TYPE (value), value, div))
11659 return value;
11662 /* If divisor is a power of two, simplify this to bit manipulation. */
11663 if (divisor == (divisor & -divisor))
11665 tree t;
11667 t = build_int_cst (TREE_TYPE (value), -divisor);
11668 value = size_binop (BIT_AND_EXPR, value, t);
11670 else
11672 if (!div)
11673 div = build_int_cst (TREE_TYPE (value), divisor);
11674 value = size_binop (FLOOR_DIV_EXPR, value, div);
11675 value = size_binop (MULT_EXPR, value, div);
11678 return value;
11681 /* Returns the pointer to the base of the object addressed by EXP and
11682 extracts the information about the offset of the access, storing it
11683 to PBITPOS and POFFSET. */
11685 static tree
11686 split_address_to_core_and_offset (tree exp,
11687 HOST_WIDE_INT *pbitpos, tree *poffset)
11689 tree core;
11690 enum machine_mode mode;
11691 int unsignedp, volatilep;
11692 HOST_WIDE_INT bitsize;
11694 if (TREE_CODE (exp) == ADDR_EXPR)
11696 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11697 poffset, &mode, &unsignedp, &volatilep,
11698 false);
11699 core = build_fold_addr_expr (core);
11701 else
11703 core = exp;
11704 *pbitpos = 0;
11705 *poffset = NULL_TREE;
11708 return core;
11711 /* Returns true if addresses of E1 and E2 differ by a constant, false
11712 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11714 bool
11715 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11717 tree core1, core2;
11718 HOST_WIDE_INT bitpos1, bitpos2;
11719 tree toffset1, toffset2, tdiff, type;
11721 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11722 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11724 if (bitpos1 % BITS_PER_UNIT != 0
11725 || bitpos2 % BITS_PER_UNIT != 0
11726 || !operand_equal_p (core1, core2, 0))
11727 return false;
11729 if (toffset1 && toffset2)
11731 type = TREE_TYPE (toffset1);
11732 if (type != TREE_TYPE (toffset2))
11733 toffset2 = fold_convert (type, toffset2);
11735 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11736 if (!host_integerp (tdiff, 0))
11737 return false;
11739 *diff = tree_low_cst (tdiff, 0);
11741 else if (toffset1 || toffset2)
11743 /* If only one of the offsets is non-constant, the difference cannot
11744 be a constant. */
11745 return false;
11747 else
11748 *diff = 0;
11750 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11751 return true;
11754 /* Simplify the floating point expression EXP when the sign of the
11755 result is not significant. Return NULL_TREE if no simplification
11756 is possible. */
11758 tree
11759 fold_strip_sign_ops (tree exp)
11761 tree arg0, arg1;
11763 switch (TREE_CODE (exp))
11765 case ABS_EXPR:
11766 case NEGATE_EXPR:
11767 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11768 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11770 case MULT_EXPR:
11771 case RDIV_EXPR:
11772 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11773 return NULL_TREE;
11774 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11775 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11776 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11777 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11778 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11779 arg1 ? arg1 : TREE_OPERAND (exp, 1));
11780 break;
11782 default:
11783 break;
11785 return NULL_TREE;