PR fortran/15809
[official-gcc.git] / gcc / fold-const.c
blobdfac2eddb832ac6e3ac31b1133c1d50c23ccc450
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
106 tree *, tree *);
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
114 tree);
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
124 tree, tree,
125 tree, tree, int);
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
128 tree, tree, tree);
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
139 addition.
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
143 sign. */
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
151 #define LOWPART(x) \
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
161 static void
162 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
164 words[0] = LOWPART (low);
165 words[1] = HIGHPART (low);
166 words[2] = LOWPART (hi);
167 words[3] = HIGHPART (hi);
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
174 static void
175 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
176 HOST_WIDE_INT *hi)
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
197 tree
198 force_fit_type (tree t, int overflowable,
199 bool overflowed, bool overflowed_const)
201 unsigned HOST_WIDE_INT low;
202 HOST_WIDE_INT high;
203 unsigned int prec;
204 int sign_extended_type;
206 gcc_assert (TREE_CODE (t) == INTEGER_CST);
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
211 if (POINTER_TYPE_P (TREE_TYPE (t))
212 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
213 prec = POINTER_SIZE;
214 else
215 prec = TYPE_PRECISION (TREE_TYPE (t));
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
218 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 else
229 high = 0;
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 low &= ~((HOST_WIDE_INT) (-1) << prec);
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (high & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)low < 0)
248 high = -1;
250 else
252 /* Sign extend bottom half? */
253 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
255 high = -1;
256 low |= (HOST_WIDE_INT)(-1) << prec;
260 /* If the value changed, return a new node. */
261 if (overflowed || overflowed_const
262 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
264 t = build_int_cst_wide (TREE_TYPE (t), low, high);
266 if (overflowed
267 || overflowable < 0
268 || (overflowable > 0 && sign_extended_type))
270 t = copy_node (t);
271 TREE_OVERFLOW (t) = 1;
272 TREE_CONSTANT_OVERFLOW (t) = 1;
274 else if (overflowed_const)
276 t = copy_node (t);
277 TREE_CONSTANT_OVERFLOW (t) = 1;
281 return t;
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
291 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
292 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
294 unsigned HOST_WIDE_INT l;
295 HOST_WIDE_INT h;
297 l = l1 + l2;
298 h = h1 + h2 + (l < l1);
300 *lv = l;
301 *hv = h;
302 return OVERFLOW_SUM_SIGN (h1, h2, h);
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
314 if (l1 == 0)
316 *lv = 0;
317 *hv = - h1;
318 return (*hv & h1) < 0;
320 else
322 *lv = -l1;
323 *hv = ~h1;
324 return 0;
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
335 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
336 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
337 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
339 HOST_WIDE_INT arg1[4];
340 HOST_WIDE_INT arg2[4];
341 HOST_WIDE_INT prod[4 * 2];
342 unsigned HOST_WIDE_INT carry;
343 int i, j, k;
344 unsigned HOST_WIDE_INT toplow, neglow;
345 HOST_WIDE_INT tophigh, neghigh;
347 encode (arg1, l1, h1);
348 encode (arg2, l2, h2);
350 memset (prod, 0, sizeof prod);
352 for (i = 0; i < 4; i++)
354 carry = 0;
355 for (j = 0; j < 4; j++)
357 k = i + j;
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry += arg1[i] * arg2[j];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
361 carry += prod[k];
362 prod[k] = LOWPART (carry);
363 carry = HIGHPART (carry);
365 prod[i + 4] = carry;
368 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod + 4, &toplow, &tophigh);
373 if (h1 < 0)
375 neg_double (l2, h2, &neglow, &neghigh);
376 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
378 if (h2 < 0)
380 neg_double (l1, h1, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
392 void
393 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
394 HOST_WIDE_INT count, unsigned int prec,
395 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
397 unsigned HOST_WIDE_INT signmask;
399 if (count < 0)
401 rshift_double (l1, h1, -count, prec, lv, hv, arith);
402 return;
405 if (SHIFT_COUNT_TRUNCATED)
406 count %= prec;
408 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
412 *hv = 0;
413 *lv = 0;
415 else if (count >= HOST_BITS_PER_WIDE_INT)
417 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
418 *lv = 0;
420 else
422 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
423 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
424 *lv = l1 << count;
427 /* Sign extend all bits that are beyond the precision. */
429 signmask = -((prec > HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT) *hv
431 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
432 : (*lv >> (prec - 1))) & 1);
434 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
436 else if (prec >= HOST_BITS_PER_WIDE_INT)
438 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
439 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
441 else
443 *hv = signmask;
444 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
445 *lv |= signmask << prec;
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
454 void
455 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
456 HOST_WIDE_INT count, unsigned int prec,
457 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
458 int arith)
460 unsigned HOST_WIDE_INT signmask;
462 signmask = (arith
463 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
464 : 0);
466 if (SHIFT_COUNT_TRUNCATED)
467 count %= prec;
469 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
473 *hv = 0;
474 *lv = 0;
476 else if (count >= HOST_BITS_PER_WIDE_INT)
478 *hv = 0;
479 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
481 else
483 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
484 *lv = ((l1 >> count)
485 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
488 /* Zero / sign extend all bits that are beyond the precision. */
490 if (count >= (HOST_WIDE_INT)prec)
492 *hv = signmask;
493 *lv = signmask;
495 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
497 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
499 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
500 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
502 else
504 *hv = signmask;
505 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
506 *lv |= signmask << (prec - count);
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
515 void
516 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
517 HOST_WIDE_INT count, unsigned int prec,
518 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
520 unsigned HOST_WIDE_INT s1l, s2l;
521 HOST_WIDE_INT s1h, s2h;
523 count %= prec;
524 if (count < 0)
525 count += prec;
527 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
528 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
529 *lv = s1l | s2l;
530 *hv = s1h | s2h;
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
537 void
538 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
545 count %= prec;
546 if (count < 0)
547 count += prec;
549 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551 *lv = s1l | s2l;
552 *hv = s1h | s2h;
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
559 or EXACT_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
565 div_and_round_double (enum tree_code code, int uns,
566 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig,
568 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig,
570 unsigned HOST_WIDE_INT *lquo,
571 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
572 HOST_WIDE_INT *hrem)
574 int quo_neg = 0;
575 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den[4], quo[4];
577 int i, j;
578 unsigned HOST_WIDE_INT work;
579 unsigned HOST_WIDE_INT carry = 0;
580 unsigned HOST_WIDE_INT lnum = lnum_orig;
581 HOST_WIDE_INT hnum = hnum_orig;
582 unsigned HOST_WIDE_INT lden = lden_orig;
583 HOST_WIDE_INT hden = hden_orig;
584 int overflow = 0;
586 if (hden == 0 && lden == 0)
587 overflow = 1, lden = 1;
589 /* Calculate quotient sign and convert operands to unsigned. */
590 if (!uns)
592 if (hnum < 0)
594 quo_neg = ~ quo_neg;
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum, hnum, &lnum, &hnum)
597 && ((HOST_WIDE_INT) lden & hden) == -1)
598 overflow = 1;
600 if (hden < 0)
602 quo_neg = ~ quo_neg;
603 neg_double (lden, hden, &lden, &hden);
607 if (hnum == 0 && hden == 0)
608 { /* single precision */
609 *hquo = *hrem = 0;
610 /* This unsigned division rounds toward zero. */
611 *lquo = lnum / lden;
612 goto finish_up;
615 if (hnum == 0)
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
618 *hquo = *lquo = 0;
619 *hrem = hnum;
620 *lrem = lnum;
621 goto finish_up;
624 memset (quo, 0, sizeof quo);
626 memset (num, 0, sizeof num); /* to zero 9th element */
627 memset (den, 0, sizeof den);
629 encode (num, lnum, hnum);
630 encode (den, lden, hden);
632 /* Special code for when the divisor < BASE. */
633 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
635 /* hnum != 0 already checked. */
636 for (i = 4 - 1; i >= 0; i--)
638 work = num[i] + carry * BASE;
639 quo[i] = work / lden;
640 carry = work % lden;
643 else
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig, den_hi_sig;
648 unsigned HOST_WIDE_INT quo_est, scale;
650 /* Find the highest nonzero divisor digit. */
651 for (i = 4 - 1;; i--)
652 if (den[i] != 0)
654 den_hi_sig = i;
655 break;
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
661 scale = BASE / (den[den_hi_sig] + 1);
662 if (scale > 1)
663 { /* scale divisor and dividend */
664 carry = 0;
665 for (i = 0; i <= 4 - 1; i++)
667 work = (num[i] * scale) + carry;
668 num[i] = LOWPART (work);
669 carry = HIGHPART (work);
672 num[4] = carry;
673 carry = 0;
674 for (i = 0; i <= 4 - 1; i++)
676 work = (den[i] * scale) + carry;
677 den[i] = LOWPART (work);
678 carry = HIGHPART (work);
679 if (den[i] != 0) den_hi_sig = i;
683 num_hi_sig = 4;
685 /* Main loop */
686 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp;
693 num_hi_sig = i + den_hi_sig + 1;
694 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
695 if (num[num_hi_sig] != den[den_hi_sig])
696 quo_est = work / den[den_hi_sig];
697 else
698 quo_est = BASE - 1;
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp = work - quo_est * den[den_hi_sig];
702 if (tmp < BASE
703 && (den[den_hi_sig - 1] * quo_est
704 > (tmp * BASE + num[num_hi_sig - 2])))
705 quo_est--;
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
711 carry = 0;
712 for (j = 0; j <= den_hi_sig; j++)
714 work = quo_est * den[j] + carry;
715 carry = HIGHPART (work);
716 work = num[i + j] - LOWPART (work);
717 num[i + j] = LOWPART (work);
718 carry += HIGHPART (work) != 0;
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
725 quo_est--;
726 carry = 0; /* add divisor back in */
727 for (j = 0; j <= den_hi_sig; j++)
729 work = num[i + j] + den[j] + carry;
730 carry = HIGHPART (work);
731 num[i + j] = LOWPART (work);
734 num [num_hi_sig] += carry;
737 /* Store the quotient digit. */
738 quo[i] = quo_est;
742 decode (quo, lquo, hquo);
744 finish_up:
745 /* If result is negative, make it so. */
746 if (quo_neg)
747 neg_double (*lquo, *hquo, lquo, hquo);
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
751 neg_double (*lrem, *hrem, lrem, hrem);
752 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
754 switch (code)
756 case TRUNC_DIV_EXPR:
757 case TRUNC_MOD_EXPR: /* round toward zero */
758 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
759 return overflow;
761 case FLOOR_DIV_EXPR:
762 case FLOOR_MOD_EXPR: /* round toward negative infinity */
763 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
765 /* quo = quo - 1; */
766 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
767 lquo, hquo);
769 else
770 return overflow;
771 break;
773 case CEIL_DIV_EXPR:
774 case CEIL_MOD_EXPR: /* round toward positive infinity */
775 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
778 lquo, hquo);
780 else
781 return overflow;
782 break;
784 case ROUND_DIV_EXPR:
785 case ROUND_MOD_EXPR: /* round to closest integer */
787 unsigned HOST_WIDE_INT labs_rem = *lrem;
788 HOST_WIDE_INT habs_rem = *hrem;
789 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
790 HOST_WIDE_INT habs_den = hden, htwice;
792 /* Get absolute values. */
793 if (*hrem < 0)
794 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
795 if (hden < 0)
796 neg_double (lden, hden, &labs_den, &habs_den);
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
800 labs_rem, habs_rem, &ltwice, &htwice);
802 if (((unsigned HOST_WIDE_INT) habs_den
803 < (unsigned HOST_WIDE_INT) htwice)
804 || (((unsigned HOST_WIDE_INT) habs_den
805 == (unsigned HOST_WIDE_INT) htwice)
806 && (labs_den < ltwice)))
808 if (*hquo < 0)
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo,
811 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
812 else
813 /* quo = quo + 1; */
814 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
815 lquo, hquo);
817 else
818 return overflow;
820 break;
822 default:
823 gcc_unreachable ();
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
828 neg_double (*lrem, *hrem, lrem, hrem);
829 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
830 return overflow;
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
837 static tree
838 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
840 unsigned HOST_WIDE_INT int1l, int2l;
841 HOST_WIDE_INT int1h, int2h;
842 unsigned HOST_WIDE_INT quol, reml;
843 HOST_WIDE_INT quoh, remh;
844 tree type = TREE_TYPE (arg1);
845 int uns = TYPE_UNSIGNED (type);
847 int1l = TREE_INT_CST_LOW (arg1);
848 int1h = TREE_INT_CST_HIGH (arg1);
849 int2l = TREE_INT_CST_LOW (arg2);
850 int2h = TREE_INT_CST_HIGH (arg2);
852 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
853 &quol, &quoh, &reml, &remh);
854 if (remh != 0 || reml != 0)
855 return NULL_TREE;
857 return build_int_cst_wide (type, quol, quoh);
860 /* Return true if the built-in mathematical function specified by CODE
861 is odd, i.e. -f(x) == f(-x). */
863 static bool
864 negate_mathfn_p (enum built_in_function code)
866 switch (code)
868 CASE_FLT_FN (BUILT_IN_ASIN):
869 CASE_FLT_FN (BUILT_IN_ASINH):
870 CASE_FLT_FN (BUILT_IN_ATAN):
871 CASE_FLT_FN (BUILT_IN_ATANH):
872 CASE_FLT_FN (BUILT_IN_CBRT):
873 CASE_FLT_FN (BUILT_IN_SIN):
874 CASE_FLT_FN (BUILT_IN_SINH):
875 CASE_FLT_FN (BUILT_IN_TAN):
876 CASE_FLT_FN (BUILT_IN_TANH):
877 return true;
879 default:
880 break;
882 return false;
885 /* Check whether we may negate an integer constant T without causing
886 overflow. */
888 bool
889 may_negate_without_overflow_p (tree t)
891 unsigned HOST_WIDE_INT val;
892 unsigned int prec;
893 tree type;
895 gcc_assert (TREE_CODE (t) == INTEGER_CST);
897 type = TREE_TYPE (t);
898 if (TYPE_UNSIGNED (type))
899 return false;
901 prec = TYPE_PRECISION (type);
902 if (prec > HOST_BITS_PER_WIDE_INT)
904 if (TREE_INT_CST_LOW (t) != 0)
905 return true;
906 prec -= HOST_BITS_PER_WIDE_INT;
907 val = TREE_INT_CST_HIGH (t);
909 else
910 val = TREE_INT_CST_LOW (t);
911 if (prec < HOST_BITS_PER_WIDE_INT)
912 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
913 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
916 /* Determine whether an expression T can be cheaply negated using
917 the function negate_expr. */
919 static bool
920 negate_expr_p (tree t)
922 tree type;
924 if (t == 0)
925 return false;
927 type = TREE_TYPE (t);
929 STRIP_SIGN_NOPS (t);
930 switch (TREE_CODE (t))
932 case INTEGER_CST:
933 if (TYPE_UNSIGNED (type) || ! flag_trapv)
934 return true;
936 /* Check that -CST will not overflow type. */
937 return may_negate_without_overflow_p (t);
938 case BIT_NOT_EXPR:
939 return INTEGRAL_TYPE_P (type);
941 case REAL_CST:
942 case NEGATE_EXPR:
943 return true;
945 case COMPLEX_CST:
946 return negate_expr_p (TREE_REALPART (t))
947 && negate_expr_p (TREE_IMAGPART (t));
949 case PLUS_EXPR:
950 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
951 return false;
952 /* -(A + B) -> (-B) - A. */
953 if (negate_expr_p (TREE_OPERAND (t, 1))
954 && reorder_operands_p (TREE_OPERAND (t, 0),
955 TREE_OPERAND (t, 1)))
956 return true;
957 /* -(A + B) -> (-A) - B. */
958 return negate_expr_p (TREE_OPERAND (t, 0));
960 case MINUS_EXPR:
961 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
962 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
963 && reorder_operands_p (TREE_OPERAND (t, 0),
964 TREE_OPERAND (t, 1));
966 case MULT_EXPR:
967 if (TYPE_UNSIGNED (TREE_TYPE (t)))
968 break;
970 /* Fall through. */
972 case RDIV_EXPR:
973 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
974 return negate_expr_p (TREE_OPERAND (t, 1))
975 || negate_expr_p (TREE_OPERAND (t, 0));
976 break;
978 case TRUNC_DIV_EXPR:
979 case ROUND_DIV_EXPR:
980 case FLOOR_DIV_EXPR:
981 case CEIL_DIV_EXPR:
982 case EXACT_DIV_EXPR:
983 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
984 break;
985 return negate_expr_p (TREE_OPERAND (t, 1))
986 || negate_expr_p (TREE_OPERAND (t, 0));
988 case NOP_EXPR:
989 /* Negate -((double)float) as (double)(-float). */
990 if (TREE_CODE (type) == REAL_TYPE)
992 tree tem = strip_float_extensions (t);
993 if (tem != t)
994 return negate_expr_p (tem);
996 break;
998 case CALL_EXPR:
999 /* Negate -f(x) as f(-x). */
1000 if (negate_mathfn_p (builtin_mathfn_code (t)))
1001 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1002 break;
1004 case RSHIFT_EXPR:
1005 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1006 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1008 tree op1 = TREE_OPERAND (t, 1);
1009 if (TREE_INT_CST_HIGH (op1) == 0
1010 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1011 == TREE_INT_CST_LOW (op1))
1012 return true;
1014 break;
1016 default:
1017 break;
1019 return false;
1022 /* Given T, an expression, return the negation of T. Allow for T to be
1023 null, in which case return null. */
1025 static tree
1026 negate_expr (tree t)
1028 tree type;
1029 tree tem;
1031 if (t == 0)
1032 return 0;
1034 type = TREE_TYPE (t);
1035 STRIP_SIGN_NOPS (t);
1037 switch (TREE_CODE (t))
1039 /* Convert - (~A) to A + 1. */
1040 case BIT_NOT_EXPR:
1041 if (INTEGRAL_TYPE_P (type))
1042 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1043 build_int_cst (type, 1));
1044 break;
1046 case INTEGER_CST:
1047 tem = fold_negate_const (t, type);
1048 if (! TREE_OVERFLOW (tem)
1049 || TYPE_UNSIGNED (type)
1050 || ! flag_trapv)
1051 return tem;
1052 break;
1054 case REAL_CST:
1055 tem = fold_negate_const (t, type);
1056 /* Two's complement FP formats, such as c4x, may overflow. */
1057 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1058 return fold_convert (type, tem);
1059 break;
1061 case COMPLEX_CST:
1063 tree rpart = negate_expr (TREE_REALPART (t));
1064 tree ipart = negate_expr (TREE_IMAGPART (t));
1066 if ((TREE_CODE (rpart) == REAL_CST
1067 && TREE_CODE (ipart) == REAL_CST)
1068 || (TREE_CODE (rpart) == INTEGER_CST
1069 && TREE_CODE (ipart) == INTEGER_CST))
1070 return build_complex (type, rpart, ipart);
1072 break;
1074 case NEGATE_EXPR:
1075 return fold_convert (type, TREE_OPERAND (t, 0));
1077 case PLUS_EXPR:
1078 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1080 /* -(A + B) -> (-B) - A. */
1081 if (negate_expr_p (TREE_OPERAND (t, 1))
1082 && reorder_operands_p (TREE_OPERAND (t, 0),
1083 TREE_OPERAND (t, 1)))
1085 tem = negate_expr (TREE_OPERAND (t, 1));
1086 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1087 tem, TREE_OPERAND (t, 0));
1088 return fold_convert (type, tem);
1091 /* -(A + B) -> (-A) - B. */
1092 if (negate_expr_p (TREE_OPERAND (t, 0)))
1094 tem = negate_expr (TREE_OPERAND (t, 0));
1095 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1096 tem, TREE_OPERAND (t, 1));
1097 return fold_convert (type, tem);
1100 break;
1102 case MINUS_EXPR:
1103 /* - (A - B) -> B - A */
1104 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1105 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1106 return fold_convert (type,
1107 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1108 TREE_OPERAND (t, 1),
1109 TREE_OPERAND (t, 0)));
1110 break;
1112 case MULT_EXPR:
1113 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1114 break;
1116 /* Fall through. */
1118 case RDIV_EXPR:
1119 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1121 tem = TREE_OPERAND (t, 1);
1122 if (negate_expr_p (tem))
1123 return fold_convert (type,
1124 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1125 TREE_OPERAND (t, 0),
1126 negate_expr (tem)));
1127 tem = TREE_OPERAND (t, 0);
1128 if (negate_expr_p (tem))
1129 return fold_convert (type,
1130 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1131 negate_expr (tem),
1132 TREE_OPERAND (t, 1)));
1134 break;
1136 case TRUNC_DIV_EXPR:
1137 case ROUND_DIV_EXPR:
1138 case FLOOR_DIV_EXPR:
1139 case CEIL_DIV_EXPR:
1140 case EXACT_DIV_EXPR:
1141 if (!TYPE_UNSIGNED (TREE_TYPE (t)) && !flag_wrapv)
1143 tem = TREE_OPERAND (t, 1);
1144 if (negate_expr_p (tem))
1145 return fold_convert (type,
1146 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1147 TREE_OPERAND (t, 0),
1148 negate_expr (tem)));
1149 tem = TREE_OPERAND (t, 0);
1150 if (negate_expr_p (tem))
1151 return fold_convert (type,
1152 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1153 negate_expr (tem),
1154 TREE_OPERAND (t, 1)));
1156 break;
1158 case NOP_EXPR:
1159 /* Convert -((double)float) into (double)(-float). */
1160 if (TREE_CODE (type) == REAL_TYPE)
1162 tem = strip_float_extensions (t);
1163 if (tem != t && negate_expr_p (tem))
1164 return fold_convert (type, negate_expr (tem));
1166 break;
1168 case CALL_EXPR:
1169 /* Negate -f(x) as f(-x). */
1170 if (negate_mathfn_p (builtin_mathfn_code (t))
1171 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1173 tree fndecl, arg, arglist;
1175 fndecl = get_callee_fndecl (t);
1176 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1177 arglist = build_tree_list (NULL_TREE, arg);
1178 return build_function_call_expr (fndecl, arglist);
1180 break;
1182 case RSHIFT_EXPR:
1183 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1184 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1186 tree op1 = TREE_OPERAND (t, 1);
1187 if (TREE_INT_CST_HIGH (op1) == 0
1188 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1189 == TREE_INT_CST_LOW (op1))
1191 tree ntype = TYPE_UNSIGNED (type)
1192 ? lang_hooks.types.signed_type (type)
1193 : lang_hooks.types.unsigned_type (type);
1194 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1195 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1196 return fold_convert (type, temp);
1199 break;
1201 default:
1202 break;
1205 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1206 return fold_convert (type, tem);
1209 /* Split a tree IN into a constant, literal and variable parts that could be
1210 combined with CODE to make IN. "constant" means an expression with
1211 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1212 commutative arithmetic operation. Store the constant part into *CONP,
1213 the literal in *LITP and return the variable part. If a part isn't
1214 present, set it to null. If the tree does not decompose in this way,
1215 return the entire tree as the variable part and the other parts as null.
1217 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1218 case, we negate an operand that was subtracted. Except if it is a
1219 literal for which we use *MINUS_LITP instead.
1221 If NEGATE_P is true, we are negating all of IN, again except a literal
1222 for which we use *MINUS_LITP instead.
1224 If IN is itself a literal or constant, return it as appropriate.
1226 Note that we do not guarantee that any of the three values will be the
1227 same type as IN, but they will have the same signedness and mode. */
1229 static tree
1230 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1231 tree *minus_litp, int negate_p)
1233 tree var = 0;
1235 *conp = 0;
1236 *litp = 0;
1237 *minus_litp = 0;
1239 /* Strip any conversions that don't change the machine mode or signedness. */
1240 STRIP_SIGN_NOPS (in);
1242 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1243 *litp = in;
1244 else if (TREE_CODE (in) == code
1245 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1246 /* We can associate addition and subtraction together (even
1247 though the C standard doesn't say so) for integers because
1248 the value is not affected. For reals, the value might be
1249 affected, so we can't. */
1250 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1251 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1253 tree op0 = TREE_OPERAND (in, 0);
1254 tree op1 = TREE_OPERAND (in, 1);
1255 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1256 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1258 /* First see if either of the operands is a literal, then a constant. */
1259 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1260 *litp = op0, op0 = 0;
1261 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1262 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1264 if (op0 != 0 && TREE_CONSTANT (op0))
1265 *conp = op0, op0 = 0;
1266 else if (op1 != 0 && TREE_CONSTANT (op1))
1267 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1269 /* If we haven't dealt with either operand, this is not a case we can
1270 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1271 if (op0 != 0 && op1 != 0)
1272 var = in;
1273 else if (op0 != 0)
1274 var = op0;
1275 else
1276 var = op1, neg_var_p = neg1_p;
1278 /* Now do any needed negations. */
1279 if (neg_litp_p)
1280 *minus_litp = *litp, *litp = 0;
1281 if (neg_conp_p)
1282 *conp = negate_expr (*conp);
1283 if (neg_var_p)
1284 var = negate_expr (var);
1286 else if (TREE_CONSTANT (in))
1287 *conp = in;
1288 else
1289 var = in;
1291 if (negate_p)
1293 if (*litp)
1294 *minus_litp = *litp, *litp = 0;
1295 else if (*minus_litp)
1296 *litp = *minus_litp, *minus_litp = 0;
1297 *conp = negate_expr (*conp);
1298 var = negate_expr (var);
1301 return var;
1304 /* Re-associate trees split by the above function. T1 and T2 are either
1305 expressions to associate or null. Return the new expression, if any. If
1306 we build an operation, do it in TYPE and with CODE. */
1308 static tree
1309 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1311 if (t1 == 0)
1312 return t2;
1313 else if (t2 == 0)
1314 return t1;
1316 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1317 try to fold this since we will have infinite recursion. But do
1318 deal with any NEGATE_EXPRs. */
1319 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1320 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1322 if (code == PLUS_EXPR)
1324 if (TREE_CODE (t1) == NEGATE_EXPR)
1325 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1326 fold_convert (type, TREE_OPERAND (t1, 0)));
1327 else if (TREE_CODE (t2) == NEGATE_EXPR)
1328 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1329 fold_convert (type, TREE_OPERAND (t2, 0)));
1330 else if (integer_zerop (t2))
1331 return fold_convert (type, t1);
1333 else if (code == MINUS_EXPR)
1335 if (integer_zerop (t2))
1336 return fold_convert (type, t1);
1339 return build2 (code, type, fold_convert (type, t1),
1340 fold_convert (type, t2));
1343 return fold_build2 (code, type, fold_convert (type, t1),
1344 fold_convert (type, t2));
1347 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1348 to produce a new constant.
1350 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1352 tree
1353 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1355 unsigned HOST_WIDE_INT int1l, int2l;
1356 HOST_WIDE_INT int1h, int2h;
1357 unsigned HOST_WIDE_INT low;
1358 HOST_WIDE_INT hi;
1359 unsigned HOST_WIDE_INT garbagel;
1360 HOST_WIDE_INT garbageh;
1361 tree t;
1362 tree type = TREE_TYPE (arg1);
1363 int uns = TYPE_UNSIGNED (type);
1364 int is_sizetype
1365 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1366 int overflow = 0;
1368 int1l = TREE_INT_CST_LOW (arg1);
1369 int1h = TREE_INT_CST_HIGH (arg1);
1370 int2l = TREE_INT_CST_LOW (arg2);
1371 int2h = TREE_INT_CST_HIGH (arg2);
1373 switch (code)
1375 case BIT_IOR_EXPR:
1376 low = int1l | int2l, hi = int1h | int2h;
1377 break;
1379 case BIT_XOR_EXPR:
1380 low = int1l ^ int2l, hi = int1h ^ int2h;
1381 break;
1383 case BIT_AND_EXPR:
1384 low = int1l & int2l, hi = int1h & int2h;
1385 break;
1387 case RSHIFT_EXPR:
1388 int2l = -int2l;
1389 case LSHIFT_EXPR:
1390 /* It's unclear from the C standard whether shifts can overflow.
1391 The following code ignores overflow; perhaps a C standard
1392 interpretation ruling is needed. */
1393 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1394 &low, &hi, !uns);
1395 break;
1397 case RROTATE_EXPR:
1398 int2l = - int2l;
1399 case LROTATE_EXPR:
1400 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1401 &low, &hi);
1402 break;
1404 case PLUS_EXPR:
1405 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1406 break;
1408 case MINUS_EXPR:
1409 neg_double (int2l, int2h, &low, &hi);
1410 add_double (int1l, int1h, low, hi, &low, &hi);
1411 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1412 break;
1414 case MULT_EXPR:
1415 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1416 break;
1418 case TRUNC_DIV_EXPR:
1419 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1420 case EXACT_DIV_EXPR:
1421 /* This is a shortcut for a common special case. */
1422 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1423 && ! TREE_CONSTANT_OVERFLOW (arg1)
1424 && ! TREE_CONSTANT_OVERFLOW (arg2)
1425 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1427 if (code == CEIL_DIV_EXPR)
1428 int1l += int2l - 1;
1430 low = int1l / int2l, hi = 0;
1431 break;
1434 /* ... fall through ... */
1436 case ROUND_DIV_EXPR:
1437 if (int2h == 0 && int2l == 1)
1439 low = int1l, hi = int1h;
1440 break;
1442 if (int1l == int2l && int1h == int2h
1443 && ! (int1l == 0 && int1h == 0))
1445 low = 1, hi = 0;
1446 break;
1448 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1449 &low, &hi, &garbagel, &garbageh);
1450 break;
1452 case TRUNC_MOD_EXPR:
1453 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1454 /* This is a shortcut for a common special case. */
1455 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1456 && ! TREE_CONSTANT_OVERFLOW (arg1)
1457 && ! TREE_CONSTANT_OVERFLOW (arg2)
1458 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1460 if (code == CEIL_MOD_EXPR)
1461 int1l += int2l - 1;
1462 low = int1l % int2l, hi = 0;
1463 break;
1466 /* ... fall through ... */
1468 case ROUND_MOD_EXPR:
1469 overflow = div_and_round_double (code, uns,
1470 int1l, int1h, int2l, int2h,
1471 &garbagel, &garbageh, &low, &hi);
1472 break;
1474 case MIN_EXPR:
1475 case MAX_EXPR:
1476 if (uns)
1477 low = (((unsigned HOST_WIDE_INT) int1h
1478 < (unsigned HOST_WIDE_INT) int2h)
1479 || (((unsigned HOST_WIDE_INT) int1h
1480 == (unsigned HOST_WIDE_INT) int2h)
1481 && int1l < int2l));
1482 else
1483 low = (int1h < int2h
1484 || (int1h == int2h && int1l < int2l));
1486 if (low == (code == MIN_EXPR))
1487 low = int1l, hi = int1h;
1488 else
1489 low = int2l, hi = int2h;
1490 break;
1492 default:
1493 gcc_unreachable ();
1496 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1498 if (notrunc)
1500 /* Propagate overflow flags ourselves. */
1501 if (((!uns || is_sizetype) && overflow)
1502 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1504 t = copy_node (t);
1505 TREE_OVERFLOW (t) = 1;
1506 TREE_CONSTANT_OVERFLOW (t) = 1;
1508 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1510 t = copy_node (t);
1511 TREE_CONSTANT_OVERFLOW (t) = 1;
1514 else
1515 t = force_fit_type (t, 1,
1516 ((!uns || is_sizetype) && overflow)
1517 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1518 TREE_CONSTANT_OVERFLOW (arg1)
1519 | TREE_CONSTANT_OVERFLOW (arg2));
1521 return t;
1524 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1525 constant. We assume ARG1 and ARG2 have the same data type, or at least
1526 are the same kind of constant and the same machine mode.
1528 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1530 static tree
1531 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1533 STRIP_NOPS (arg1);
1534 STRIP_NOPS (arg2);
1536 if (TREE_CODE (arg1) == INTEGER_CST)
1537 return int_const_binop (code, arg1, arg2, notrunc);
1539 if (TREE_CODE (arg1) == REAL_CST)
1541 enum machine_mode mode;
1542 REAL_VALUE_TYPE d1;
1543 REAL_VALUE_TYPE d2;
1544 REAL_VALUE_TYPE value;
1545 REAL_VALUE_TYPE result;
1546 bool inexact;
1547 tree t, type;
1549 d1 = TREE_REAL_CST (arg1);
1550 d2 = TREE_REAL_CST (arg2);
1552 type = TREE_TYPE (arg1);
1553 mode = TYPE_MODE (type);
1555 /* Don't perform operation if we honor signaling NaNs and
1556 either operand is a NaN. */
1557 if (HONOR_SNANS (mode)
1558 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1559 return NULL_TREE;
1561 /* Don't perform operation if it would raise a division
1562 by zero exception. */
1563 if (code == RDIV_EXPR
1564 && REAL_VALUES_EQUAL (d2, dconst0)
1565 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1566 return NULL_TREE;
1568 /* If either operand is a NaN, just return it. Otherwise, set up
1569 for floating-point trap; we return an overflow. */
1570 if (REAL_VALUE_ISNAN (d1))
1571 return arg1;
1572 else if (REAL_VALUE_ISNAN (d2))
1573 return arg2;
1575 inexact = real_arithmetic (&value, code, &d1, &d2);
1576 real_convert (&result, mode, &value);
1578 /* Don't constant fold this floating point operation if
1579 the result has overflowed and flag_trapping_math. */
1581 if (flag_trapping_math
1582 && MODE_HAS_INFINITIES (mode)
1583 && REAL_VALUE_ISINF (result)
1584 && !REAL_VALUE_ISINF (d1)
1585 && !REAL_VALUE_ISINF (d2))
1586 return NULL_TREE;
1588 /* Don't constant fold this floating point operation if the
1589 result may dependent upon the run-time rounding mode and
1590 flag_rounding_math is set, or if GCC's software emulation
1591 is unable to accurately represent the result. */
1593 if ((flag_rounding_math
1594 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1595 && !flag_unsafe_math_optimizations))
1596 && (inexact || !real_identical (&result, &value)))
1597 return NULL_TREE;
1599 t = build_real (type, result);
1601 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1602 TREE_CONSTANT_OVERFLOW (t)
1603 = TREE_OVERFLOW (t)
1604 | TREE_CONSTANT_OVERFLOW (arg1)
1605 | TREE_CONSTANT_OVERFLOW (arg2);
1606 return t;
1608 if (TREE_CODE (arg1) == COMPLEX_CST)
1610 tree type = TREE_TYPE (arg1);
1611 tree r1 = TREE_REALPART (arg1);
1612 tree i1 = TREE_IMAGPART (arg1);
1613 tree r2 = TREE_REALPART (arg2);
1614 tree i2 = TREE_IMAGPART (arg2);
1615 tree t;
1617 switch (code)
1619 case PLUS_EXPR:
1620 t = build_complex (type,
1621 const_binop (PLUS_EXPR, r1, r2, notrunc),
1622 const_binop (PLUS_EXPR, i1, i2, notrunc));
1623 break;
1625 case MINUS_EXPR:
1626 t = build_complex (type,
1627 const_binop (MINUS_EXPR, r1, r2, notrunc),
1628 const_binop (MINUS_EXPR, i1, i2, notrunc));
1629 break;
1631 case MULT_EXPR:
1632 t = build_complex (type,
1633 const_binop (MINUS_EXPR,
1634 const_binop (MULT_EXPR,
1635 r1, r2, notrunc),
1636 const_binop (MULT_EXPR,
1637 i1, i2, notrunc),
1638 notrunc),
1639 const_binop (PLUS_EXPR,
1640 const_binop (MULT_EXPR,
1641 r1, i2, notrunc),
1642 const_binop (MULT_EXPR,
1643 i1, r2, notrunc),
1644 notrunc));
1645 break;
1647 case RDIV_EXPR:
1649 tree t1, t2, real, imag;
1650 tree magsquared
1651 = const_binop (PLUS_EXPR,
1652 const_binop (MULT_EXPR, r2, r2, notrunc),
1653 const_binop (MULT_EXPR, i2, i2, notrunc),
1654 notrunc);
1656 t1 = const_binop (PLUS_EXPR,
1657 const_binop (MULT_EXPR, r1, r2, notrunc),
1658 const_binop (MULT_EXPR, i1, i2, notrunc),
1659 notrunc);
1660 t2 = const_binop (MINUS_EXPR,
1661 const_binop (MULT_EXPR, i1, r2, notrunc),
1662 const_binop (MULT_EXPR, r1, i2, notrunc),
1663 notrunc);
1665 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1667 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1668 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1670 else
1672 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1673 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1674 if (!real || !imag)
1675 return NULL_TREE;
1678 t = build_complex (type, real, imag);
1680 break;
1682 default:
1683 gcc_unreachable ();
1685 return t;
1687 return 0;
1690 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1691 indicates which particular sizetype to create. */
1693 tree
1694 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1696 return build_int_cst (sizetype_tab[(int) kind], number);
1699 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1700 is a tree code. The type of the result is taken from the operands.
1701 Both must be the same type integer type and it must be a size type.
1702 If the operands are constant, so is the result. */
1704 tree
1705 size_binop (enum tree_code code, tree arg0, tree arg1)
1707 tree type = TREE_TYPE (arg0);
1709 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1710 && type == TREE_TYPE (arg1));
1712 /* Handle the special case of two integer constants faster. */
1713 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1715 /* And some specific cases even faster than that. */
1716 if (code == PLUS_EXPR && integer_zerop (arg0))
1717 return arg1;
1718 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1719 && integer_zerop (arg1))
1720 return arg0;
1721 else if (code == MULT_EXPR && integer_onep (arg0))
1722 return arg1;
1724 /* Handle general case of two integer constants. */
1725 return int_const_binop (code, arg0, arg1, 0);
1728 if (arg0 == error_mark_node || arg1 == error_mark_node)
1729 return error_mark_node;
1731 return fold_build2 (code, type, arg0, arg1);
1734 /* Given two values, either both of sizetype or both of bitsizetype,
1735 compute the difference between the two values. Return the value
1736 in signed type corresponding to the type of the operands. */
1738 tree
1739 size_diffop (tree arg0, tree arg1)
1741 tree type = TREE_TYPE (arg0);
1742 tree ctype;
1744 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1745 && type == TREE_TYPE (arg1));
1747 /* If the type is already signed, just do the simple thing. */
1748 if (!TYPE_UNSIGNED (type))
1749 return size_binop (MINUS_EXPR, arg0, arg1);
1751 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1753 /* If either operand is not a constant, do the conversions to the signed
1754 type and subtract. The hardware will do the right thing with any
1755 overflow in the subtraction. */
1756 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1757 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1758 fold_convert (ctype, arg1));
1760 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1761 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1762 overflow) and negate (which can't either). Special-case a result
1763 of zero while we're here. */
1764 if (tree_int_cst_equal (arg0, arg1))
1765 return fold_convert (ctype, integer_zero_node);
1766 else if (tree_int_cst_lt (arg1, arg0))
1767 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1768 else
1769 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1770 fold_convert (ctype, size_binop (MINUS_EXPR,
1771 arg1, arg0)));
1774 /* A subroutine of fold_convert_const handling conversions of an
1775 INTEGER_CST to another integer type. */
1777 static tree
1778 fold_convert_const_int_from_int (tree type, tree arg1)
1780 tree t;
1782 /* Given an integer constant, make new constant with new type,
1783 appropriately sign-extended or truncated. */
1784 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1785 TREE_INT_CST_HIGH (arg1));
1787 t = force_fit_type (t,
1788 /* Don't set the overflow when
1789 converting a pointer */
1790 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1791 (TREE_INT_CST_HIGH (arg1) < 0
1792 && (TYPE_UNSIGNED (type)
1793 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1794 | TREE_OVERFLOW (arg1),
1795 TREE_CONSTANT_OVERFLOW (arg1));
1797 return t;
1800 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1801 to an integer type. */
1803 static tree
1804 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1806 int overflow = 0;
1807 tree t;
1809 /* The following code implements the floating point to integer
1810 conversion rules required by the Java Language Specification,
1811 that IEEE NaNs are mapped to zero and values that overflow
1812 the target precision saturate, i.e. values greater than
1813 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1814 are mapped to INT_MIN. These semantics are allowed by the
1815 C and C++ standards that simply state that the behavior of
1816 FP-to-integer conversion is unspecified upon overflow. */
1818 HOST_WIDE_INT high, low;
1819 REAL_VALUE_TYPE r;
1820 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1822 switch (code)
1824 case FIX_TRUNC_EXPR:
1825 real_trunc (&r, VOIDmode, &x);
1826 break;
1828 case FIX_CEIL_EXPR:
1829 real_ceil (&r, VOIDmode, &x);
1830 break;
1832 case FIX_FLOOR_EXPR:
1833 real_floor (&r, VOIDmode, &x);
1834 break;
1836 case FIX_ROUND_EXPR:
1837 real_round (&r, VOIDmode, &x);
1838 break;
1840 default:
1841 gcc_unreachable ();
1844 /* If R is NaN, return zero and show we have an overflow. */
1845 if (REAL_VALUE_ISNAN (r))
1847 overflow = 1;
1848 high = 0;
1849 low = 0;
1852 /* See if R is less than the lower bound or greater than the
1853 upper bound. */
1855 if (! overflow)
1857 tree lt = TYPE_MIN_VALUE (type);
1858 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1859 if (REAL_VALUES_LESS (r, l))
1861 overflow = 1;
1862 high = TREE_INT_CST_HIGH (lt);
1863 low = TREE_INT_CST_LOW (lt);
1867 if (! overflow)
1869 tree ut = TYPE_MAX_VALUE (type);
1870 if (ut)
1872 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1873 if (REAL_VALUES_LESS (u, r))
1875 overflow = 1;
1876 high = TREE_INT_CST_HIGH (ut);
1877 low = TREE_INT_CST_LOW (ut);
1882 if (! overflow)
1883 REAL_VALUE_TO_INT (&low, &high, r);
1885 t = build_int_cst_wide (type, low, high);
1887 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1888 TREE_CONSTANT_OVERFLOW (arg1));
1889 return t;
1892 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1893 to another floating point type. */
1895 static tree
1896 fold_convert_const_real_from_real (tree type, tree arg1)
1898 REAL_VALUE_TYPE value;
1899 tree t;
1901 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1902 t = build_real (type, value);
1904 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1905 TREE_CONSTANT_OVERFLOW (t)
1906 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1907 return t;
1910 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1911 type TYPE. If no simplification can be done return NULL_TREE. */
1913 static tree
1914 fold_convert_const (enum tree_code code, tree type, tree arg1)
1916 if (TREE_TYPE (arg1) == type)
1917 return arg1;
1919 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1921 if (TREE_CODE (arg1) == INTEGER_CST)
1922 return fold_convert_const_int_from_int (type, arg1);
1923 else if (TREE_CODE (arg1) == REAL_CST)
1924 return fold_convert_const_int_from_real (code, type, arg1);
1926 else if (TREE_CODE (type) == REAL_TYPE)
1928 if (TREE_CODE (arg1) == INTEGER_CST)
1929 return build_real_from_int_cst (type, arg1);
1930 if (TREE_CODE (arg1) == REAL_CST)
1931 return fold_convert_const_real_from_real (type, arg1);
1933 return NULL_TREE;
1936 /* Construct a vector of zero elements of vector type TYPE. */
1938 static tree
1939 build_zero_vector (tree type)
1941 tree elem, list;
1942 int i, units;
1944 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1945 units = TYPE_VECTOR_SUBPARTS (type);
1947 list = NULL_TREE;
1948 for (i = 0; i < units; i++)
1949 list = tree_cons (NULL_TREE, elem, list);
1950 return build_vector (type, list);
1953 /* Convert expression ARG to type TYPE. Used by the middle-end for
1954 simple conversions in preference to calling the front-end's convert. */
1956 tree
1957 fold_convert (tree type, tree arg)
1959 tree orig = TREE_TYPE (arg);
1960 tree tem;
1962 if (type == orig)
1963 return arg;
1965 if (TREE_CODE (arg) == ERROR_MARK
1966 || TREE_CODE (type) == ERROR_MARK
1967 || TREE_CODE (orig) == ERROR_MARK)
1968 return error_mark_node;
1970 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1971 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1972 TYPE_MAIN_VARIANT (orig)))
1973 return fold_build1 (NOP_EXPR, type, arg);
1975 switch (TREE_CODE (type))
1977 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1978 case POINTER_TYPE: case REFERENCE_TYPE:
1979 case OFFSET_TYPE:
1980 if (TREE_CODE (arg) == INTEGER_CST)
1982 tem = fold_convert_const (NOP_EXPR, type, arg);
1983 if (tem != NULL_TREE)
1984 return tem;
1986 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1987 || TREE_CODE (orig) == OFFSET_TYPE)
1988 return fold_build1 (NOP_EXPR, type, arg);
1989 if (TREE_CODE (orig) == COMPLEX_TYPE)
1991 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1992 return fold_convert (type, tem);
1994 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1995 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1996 return fold_build1 (NOP_EXPR, type, arg);
1998 case REAL_TYPE:
1999 if (TREE_CODE (arg) == INTEGER_CST)
2001 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2002 if (tem != NULL_TREE)
2003 return tem;
2005 else if (TREE_CODE (arg) == REAL_CST)
2007 tem = fold_convert_const (NOP_EXPR, type, arg);
2008 if (tem != NULL_TREE)
2009 return tem;
2012 switch (TREE_CODE (orig))
2014 case INTEGER_TYPE: case CHAR_TYPE:
2015 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2016 case POINTER_TYPE: case REFERENCE_TYPE:
2017 return fold_build1 (FLOAT_EXPR, type, arg);
2019 case REAL_TYPE:
2020 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
2021 type, arg);
2023 case COMPLEX_TYPE:
2024 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2025 return fold_convert (type, tem);
2027 default:
2028 gcc_unreachable ();
2031 case COMPLEX_TYPE:
2032 switch (TREE_CODE (orig))
2034 case INTEGER_TYPE: case CHAR_TYPE:
2035 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2036 case POINTER_TYPE: case REFERENCE_TYPE:
2037 case REAL_TYPE:
2038 return build2 (COMPLEX_EXPR, type,
2039 fold_convert (TREE_TYPE (type), arg),
2040 fold_convert (TREE_TYPE (type), integer_zero_node));
2041 case COMPLEX_TYPE:
2043 tree rpart, ipart;
2045 if (TREE_CODE (arg) == COMPLEX_EXPR)
2047 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2048 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2049 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2052 arg = save_expr (arg);
2053 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2054 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2055 rpart = fold_convert (TREE_TYPE (type), rpart);
2056 ipart = fold_convert (TREE_TYPE (type), ipart);
2057 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2060 default:
2061 gcc_unreachable ();
2064 case VECTOR_TYPE:
2065 if (integer_zerop (arg))
2066 return build_zero_vector (type);
2067 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2068 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2069 || TREE_CODE (orig) == VECTOR_TYPE);
2070 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2072 case VOID_TYPE:
2073 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2075 default:
2076 gcc_unreachable ();
2080 /* Return false if expr can be assumed not to be an lvalue, true
2081 otherwise. */
2083 static bool
2084 maybe_lvalue_p (tree x)
2086 /* We only need to wrap lvalue tree codes. */
2087 switch (TREE_CODE (x))
2089 case VAR_DECL:
2090 case PARM_DECL:
2091 case RESULT_DECL:
2092 case LABEL_DECL:
2093 case FUNCTION_DECL:
2094 case SSA_NAME:
2096 case COMPONENT_REF:
2097 case INDIRECT_REF:
2098 case ALIGN_INDIRECT_REF:
2099 case MISALIGNED_INDIRECT_REF:
2100 case ARRAY_REF:
2101 case ARRAY_RANGE_REF:
2102 case BIT_FIELD_REF:
2103 case OBJ_TYPE_REF:
2105 case REALPART_EXPR:
2106 case IMAGPART_EXPR:
2107 case PREINCREMENT_EXPR:
2108 case PREDECREMENT_EXPR:
2109 case SAVE_EXPR:
2110 case TRY_CATCH_EXPR:
2111 case WITH_CLEANUP_EXPR:
2112 case COMPOUND_EXPR:
2113 case MODIFY_EXPR:
2114 case TARGET_EXPR:
2115 case COND_EXPR:
2116 case BIND_EXPR:
2117 case MIN_EXPR:
2118 case MAX_EXPR:
2119 break;
2121 default:
2122 /* Assume the worst for front-end tree codes. */
2123 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2124 break;
2125 return false;
2128 return true;
2131 /* Return an expr equal to X but certainly not valid as an lvalue. */
2133 tree
2134 non_lvalue (tree x)
2136 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2137 us. */
2138 if (in_gimple_form)
2139 return x;
2141 if (! maybe_lvalue_p (x))
2142 return x;
2143 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2146 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2147 Zero means allow extended lvalues. */
2149 int pedantic_lvalues;
2151 /* When pedantic, return an expr equal to X but certainly not valid as a
2152 pedantic lvalue. Otherwise, return X. */
2154 static tree
2155 pedantic_non_lvalue (tree x)
2157 if (pedantic_lvalues)
2158 return non_lvalue (x);
2159 else
2160 return x;
2163 /* Given a tree comparison code, return the code that is the logical inverse
2164 of the given code. It is not safe to do this for floating-point
2165 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2166 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2168 enum tree_code
2169 invert_tree_comparison (enum tree_code code, bool honor_nans)
2171 if (honor_nans && flag_trapping_math)
2172 return ERROR_MARK;
2174 switch (code)
2176 case EQ_EXPR:
2177 return NE_EXPR;
2178 case NE_EXPR:
2179 return EQ_EXPR;
2180 case GT_EXPR:
2181 return honor_nans ? UNLE_EXPR : LE_EXPR;
2182 case GE_EXPR:
2183 return honor_nans ? UNLT_EXPR : LT_EXPR;
2184 case LT_EXPR:
2185 return honor_nans ? UNGE_EXPR : GE_EXPR;
2186 case LE_EXPR:
2187 return honor_nans ? UNGT_EXPR : GT_EXPR;
2188 case LTGT_EXPR:
2189 return UNEQ_EXPR;
2190 case UNEQ_EXPR:
2191 return LTGT_EXPR;
2192 case UNGT_EXPR:
2193 return LE_EXPR;
2194 case UNGE_EXPR:
2195 return LT_EXPR;
2196 case UNLT_EXPR:
2197 return GE_EXPR;
2198 case UNLE_EXPR:
2199 return GT_EXPR;
2200 case ORDERED_EXPR:
2201 return UNORDERED_EXPR;
2202 case UNORDERED_EXPR:
2203 return ORDERED_EXPR;
2204 default:
2205 gcc_unreachable ();
2209 /* Similar, but return the comparison that results if the operands are
2210 swapped. This is safe for floating-point. */
2212 enum tree_code
2213 swap_tree_comparison (enum tree_code code)
2215 switch (code)
2217 case EQ_EXPR:
2218 case NE_EXPR:
2219 case ORDERED_EXPR:
2220 case UNORDERED_EXPR:
2221 case LTGT_EXPR:
2222 case UNEQ_EXPR:
2223 return code;
2224 case GT_EXPR:
2225 return LT_EXPR;
2226 case GE_EXPR:
2227 return LE_EXPR;
2228 case LT_EXPR:
2229 return GT_EXPR;
2230 case LE_EXPR:
2231 return GE_EXPR;
2232 case UNGT_EXPR:
2233 return UNLT_EXPR;
2234 case UNGE_EXPR:
2235 return UNLE_EXPR;
2236 case UNLT_EXPR:
2237 return UNGT_EXPR;
2238 case UNLE_EXPR:
2239 return UNGE_EXPR;
2240 default:
2241 gcc_unreachable ();
2246 /* Convert a comparison tree code from an enum tree_code representation
2247 into a compcode bit-based encoding. This function is the inverse of
2248 compcode_to_comparison. */
2250 static enum comparison_code
2251 comparison_to_compcode (enum tree_code code)
2253 switch (code)
2255 case LT_EXPR:
2256 return COMPCODE_LT;
2257 case EQ_EXPR:
2258 return COMPCODE_EQ;
2259 case LE_EXPR:
2260 return COMPCODE_LE;
2261 case GT_EXPR:
2262 return COMPCODE_GT;
2263 case NE_EXPR:
2264 return COMPCODE_NE;
2265 case GE_EXPR:
2266 return COMPCODE_GE;
2267 case ORDERED_EXPR:
2268 return COMPCODE_ORD;
2269 case UNORDERED_EXPR:
2270 return COMPCODE_UNORD;
2271 case UNLT_EXPR:
2272 return COMPCODE_UNLT;
2273 case UNEQ_EXPR:
2274 return COMPCODE_UNEQ;
2275 case UNLE_EXPR:
2276 return COMPCODE_UNLE;
2277 case UNGT_EXPR:
2278 return COMPCODE_UNGT;
2279 case LTGT_EXPR:
2280 return COMPCODE_LTGT;
2281 case UNGE_EXPR:
2282 return COMPCODE_UNGE;
2283 default:
2284 gcc_unreachable ();
2288 /* Convert a compcode bit-based encoding of a comparison operator back
2289 to GCC's enum tree_code representation. This function is the
2290 inverse of comparison_to_compcode. */
2292 static enum tree_code
2293 compcode_to_comparison (enum comparison_code code)
2295 switch (code)
2297 case COMPCODE_LT:
2298 return LT_EXPR;
2299 case COMPCODE_EQ:
2300 return EQ_EXPR;
2301 case COMPCODE_LE:
2302 return LE_EXPR;
2303 case COMPCODE_GT:
2304 return GT_EXPR;
2305 case COMPCODE_NE:
2306 return NE_EXPR;
2307 case COMPCODE_GE:
2308 return GE_EXPR;
2309 case COMPCODE_ORD:
2310 return ORDERED_EXPR;
2311 case COMPCODE_UNORD:
2312 return UNORDERED_EXPR;
2313 case COMPCODE_UNLT:
2314 return UNLT_EXPR;
2315 case COMPCODE_UNEQ:
2316 return UNEQ_EXPR;
2317 case COMPCODE_UNLE:
2318 return UNLE_EXPR;
2319 case COMPCODE_UNGT:
2320 return UNGT_EXPR;
2321 case COMPCODE_LTGT:
2322 return LTGT_EXPR;
2323 case COMPCODE_UNGE:
2324 return UNGE_EXPR;
2325 default:
2326 gcc_unreachable ();
2330 /* Return a tree for the comparison which is the combination of
2331 doing the AND or OR (depending on CODE) of the two operations LCODE
2332 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2333 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2334 if this makes the transformation invalid. */
2336 tree
2337 combine_comparisons (enum tree_code code, enum tree_code lcode,
2338 enum tree_code rcode, tree truth_type,
2339 tree ll_arg, tree lr_arg)
2341 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2342 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2343 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2344 enum comparison_code compcode;
2346 switch (code)
2348 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2349 compcode = lcompcode & rcompcode;
2350 break;
2352 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2353 compcode = lcompcode | rcompcode;
2354 break;
2356 default:
2357 return NULL_TREE;
2360 if (!honor_nans)
2362 /* Eliminate unordered comparisons, as well as LTGT and ORD
2363 which are not used unless the mode has NaNs. */
2364 compcode &= ~COMPCODE_UNORD;
2365 if (compcode == COMPCODE_LTGT)
2366 compcode = COMPCODE_NE;
2367 else if (compcode == COMPCODE_ORD)
2368 compcode = COMPCODE_TRUE;
2370 else if (flag_trapping_math)
2372 /* Check that the original operation and the optimized ones will trap
2373 under the same condition. */
2374 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2375 && (lcompcode != COMPCODE_EQ)
2376 && (lcompcode != COMPCODE_ORD);
2377 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2378 && (rcompcode != COMPCODE_EQ)
2379 && (rcompcode != COMPCODE_ORD);
2380 bool trap = (compcode & COMPCODE_UNORD) == 0
2381 && (compcode != COMPCODE_EQ)
2382 && (compcode != COMPCODE_ORD);
2384 /* In a short-circuited boolean expression the LHS might be
2385 such that the RHS, if evaluated, will never trap. For
2386 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2387 if neither x nor y is NaN. (This is a mixed blessing: for
2388 example, the expression above will never trap, hence
2389 optimizing it to x < y would be invalid). */
2390 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2391 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2392 rtrap = false;
2394 /* If the comparison was short-circuited, and only the RHS
2395 trapped, we may now generate a spurious trap. */
2396 if (rtrap && !ltrap
2397 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2398 return NULL_TREE;
2400 /* If we changed the conditions that cause a trap, we lose. */
2401 if ((ltrap || rtrap) != trap)
2402 return NULL_TREE;
2405 if (compcode == COMPCODE_TRUE)
2406 return constant_boolean_node (true, truth_type);
2407 else if (compcode == COMPCODE_FALSE)
2408 return constant_boolean_node (false, truth_type);
2409 else
2410 return fold_build2 (compcode_to_comparison (compcode),
2411 truth_type, ll_arg, lr_arg);
2414 /* Return nonzero if CODE is a tree code that represents a truth value. */
2416 static int
2417 truth_value_p (enum tree_code code)
2419 return (TREE_CODE_CLASS (code) == tcc_comparison
2420 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2421 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2422 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2425 /* Return nonzero if two operands (typically of the same tree node)
2426 are necessarily equal. If either argument has side-effects this
2427 function returns zero. FLAGS modifies behavior as follows:
2429 If OEP_ONLY_CONST is set, only return nonzero for constants.
2430 This function tests whether the operands are indistinguishable;
2431 it does not test whether they are equal using C's == operation.
2432 The distinction is important for IEEE floating point, because
2433 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2434 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2436 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2437 even though it may hold multiple values during a function.
2438 This is because a GCC tree node guarantees that nothing else is
2439 executed between the evaluation of its "operands" (which may often
2440 be evaluated in arbitrary order). Hence if the operands themselves
2441 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2442 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2443 unset means assuming isochronic (or instantaneous) tree equivalence.
2444 Unless comparing arbitrary expression trees, such as from different
2445 statements, this flag can usually be left unset.
2447 If OEP_PURE_SAME is set, then pure functions with identical arguments
2448 are considered the same. It is used when the caller has other ways
2449 to ensure that global memory is unchanged in between. */
2452 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2454 /* If either is ERROR_MARK, they aren't equal. */
2455 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2456 return 0;
2458 /* If both types don't have the same signedness, then we can't consider
2459 them equal. We must check this before the STRIP_NOPS calls
2460 because they may change the signedness of the arguments. */
2461 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2462 return 0;
2464 STRIP_NOPS (arg0);
2465 STRIP_NOPS (arg1);
2467 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2468 /* This is needed for conversions and for COMPONENT_REF.
2469 Might as well play it safe and always test this. */
2470 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2471 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2472 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2473 return 0;
2475 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2476 We don't care about side effects in that case because the SAVE_EXPR
2477 takes care of that for us. In all other cases, two expressions are
2478 equal if they have no side effects. If we have two identical
2479 expressions with side effects that should be treated the same due
2480 to the only side effects being identical SAVE_EXPR's, that will
2481 be detected in the recursive calls below. */
2482 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2483 && (TREE_CODE (arg0) == SAVE_EXPR
2484 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2485 return 1;
2487 /* Next handle constant cases, those for which we can return 1 even
2488 if ONLY_CONST is set. */
2489 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2490 switch (TREE_CODE (arg0))
2492 case INTEGER_CST:
2493 return (! TREE_CONSTANT_OVERFLOW (arg0)
2494 && ! TREE_CONSTANT_OVERFLOW (arg1)
2495 && tree_int_cst_equal (arg0, arg1));
2497 case REAL_CST:
2498 return (! TREE_CONSTANT_OVERFLOW (arg0)
2499 && ! TREE_CONSTANT_OVERFLOW (arg1)
2500 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2501 TREE_REAL_CST (arg1)));
2503 case VECTOR_CST:
2505 tree v1, v2;
2507 if (TREE_CONSTANT_OVERFLOW (arg0)
2508 || TREE_CONSTANT_OVERFLOW (arg1))
2509 return 0;
2511 v1 = TREE_VECTOR_CST_ELTS (arg0);
2512 v2 = TREE_VECTOR_CST_ELTS (arg1);
2513 while (v1 && v2)
2515 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2516 flags))
2517 return 0;
2518 v1 = TREE_CHAIN (v1);
2519 v2 = TREE_CHAIN (v2);
2522 return v1 == v2;
2525 case COMPLEX_CST:
2526 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2527 flags)
2528 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2529 flags));
2531 case STRING_CST:
2532 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2533 && ! memcmp (TREE_STRING_POINTER (arg0),
2534 TREE_STRING_POINTER (arg1),
2535 TREE_STRING_LENGTH (arg0)));
2537 case ADDR_EXPR:
2538 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2540 default:
2541 break;
2544 if (flags & OEP_ONLY_CONST)
2545 return 0;
2547 /* Define macros to test an operand from arg0 and arg1 for equality and a
2548 variant that allows null and views null as being different from any
2549 non-null value. In the latter case, if either is null, the both
2550 must be; otherwise, do the normal comparison. */
2551 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2552 TREE_OPERAND (arg1, N), flags)
2554 #define OP_SAME_WITH_NULL(N) \
2555 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2556 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2558 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2560 case tcc_unary:
2561 /* Two conversions are equal only if signedness and modes match. */
2562 switch (TREE_CODE (arg0))
2564 case NOP_EXPR:
2565 case CONVERT_EXPR:
2566 case FIX_CEIL_EXPR:
2567 case FIX_TRUNC_EXPR:
2568 case FIX_FLOOR_EXPR:
2569 case FIX_ROUND_EXPR:
2570 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2571 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2572 return 0;
2573 break;
2574 default:
2575 break;
2578 return OP_SAME (0);
2581 case tcc_comparison:
2582 case tcc_binary:
2583 if (OP_SAME (0) && OP_SAME (1))
2584 return 1;
2586 /* For commutative ops, allow the other order. */
2587 return (commutative_tree_code (TREE_CODE (arg0))
2588 && operand_equal_p (TREE_OPERAND (arg0, 0),
2589 TREE_OPERAND (arg1, 1), flags)
2590 && operand_equal_p (TREE_OPERAND (arg0, 1),
2591 TREE_OPERAND (arg1, 0), flags));
2593 case tcc_reference:
2594 /* If either of the pointer (or reference) expressions we are
2595 dereferencing contain a side effect, these cannot be equal. */
2596 if (TREE_SIDE_EFFECTS (arg0)
2597 || TREE_SIDE_EFFECTS (arg1))
2598 return 0;
2600 switch (TREE_CODE (arg0))
2602 case INDIRECT_REF:
2603 case ALIGN_INDIRECT_REF:
2604 case MISALIGNED_INDIRECT_REF:
2605 case REALPART_EXPR:
2606 case IMAGPART_EXPR:
2607 return OP_SAME (0);
2609 case ARRAY_REF:
2610 case ARRAY_RANGE_REF:
2611 /* Operands 2 and 3 may be null. */
2612 return (OP_SAME (0)
2613 && OP_SAME (1)
2614 && OP_SAME_WITH_NULL (2)
2615 && OP_SAME_WITH_NULL (3));
2617 case COMPONENT_REF:
2618 /* Handle operand 2 the same as for ARRAY_REF. */
2619 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2621 case BIT_FIELD_REF:
2622 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2624 default:
2625 return 0;
2628 case tcc_expression:
2629 switch (TREE_CODE (arg0))
2631 case ADDR_EXPR:
2632 case TRUTH_NOT_EXPR:
2633 return OP_SAME (0);
2635 case TRUTH_ANDIF_EXPR:
2636 case TRUTH_ORIF_EXPR:
2637 return OP_SAME (0) && OP_SAME (1);
2639 case TRUTH_AND_EXPR:
2640 case TRUTH_OR_EXPR:
2641 case TRUTH_XOR_EXPR:
2642 if (OP_SAME (0) && OP_SAME (1))
2643 return 1;
2645 /* Otherwise take into account this is a commutative operation. */
2646 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2647 TREE_OPERAND (arg1, 1), flags)
2648 && operand_equal_p (TREE_OPERAND (arg0, 1),
2649 TREE_OPERAND (arg1, 0), flags));
2651 case CALL_EXPR:
2652 /* If the CALL_EXPRs call different functions, then they
2653 clearly can not be equal. */
2654 if (!OP_SAME (0))
2655 return 0;
2658 unsigned int cef = call_expr_flags (arg0);
2659 if (flags & OEP_PURE_SAME)
2660 cef &= ECF_CONST | ECF_PURE;
2661 else
2662 cef &= ECF_CONST;
2663 if (!cef)
2664 return 0;
2667 /* Now see if all the arguments are the same. operand_equal_p
2668 does not handle TREE_LIST, so we walk the operands here
2669 feeding them to operand_equal_p. */
2670 arg0 = TREE_OPERAND (arg0, 1);
2671 arg1 = TREE_OPERAND (arg1, 1);
2672 while (arg0 && arg1)
2674 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2675 flags))
2676 return 0;
2678 arg0 = TREE_CHAIN (arg0);
2679 arg1 = TREE_CHAIN (arg1);
2682 /* If we get here and both argument lists are exhausted
2683 then the CALL_EXPRs are equal. */
2684 return ! (arg0 || arg1);
2686 default:
2687 return 0;
2690 case tcc_declaration:
2691 /* Consider __builtin_sqrt equal to sqrt. */
2692 return (TREE_CODE (arg0) == FUNCTION_DECL
2693 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2694 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2695 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2697 default:
2698 return 0;
2701 #undef OP_SAME
2702 #undef OP_SAME_WITH_NULL
2705 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2706 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2708 When in doubt, return 0. */
2710 static int
2711 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2713 int unsignedp1, unsignedpo;
2714 tree primarg0, primarg1, primother;
2715 unsigned int correct_width;
2717 if (operand_equal_p (arg0, arg1, 0))
2718 return 1;
2720 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2721 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2722 return 0;
2724 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2725 and see if the inner values are the same. This removes any
2726 signedness comparison, which doesn't matter here. */
2727 primarg0 = arg0, primarg1 = arg1;
2728 STRIP_NOPS (primarg0);
2729 STRIP_NOPS (primarg1);
2730 if (operand_equal_p (primarg0, primarg1, 0))
2731 return 1;
2733 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2734 actual comparison operand, ARG0.
2736 First throw away any conversions to wider types
2737 already present in the operands. */
2739 primarg1 = get_narrower (arg1, &unsignedp1);
2740 primother = get_narrower (other, &unsignedpo);
2742 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2743 if (unsignedp1 == unsignedpo
2744 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2745 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2747 tree type = TREE_TYPE (arg0);
2749 /* Make sure shorter operand is extended the right way
2750 to match the longer operand. */
2751 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2752 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2754 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2755 return 1;
2758 return 0;
2761 /* See if ARG is an expression that is either a comparison or is performing
2762 arithmetic on comparisons. The comparisons must only be comparing
2763 two different values, which will be stored in *CVAL1 and *CVAL2; if
2764 they are nonzero it means that some operands have already been found.
2765 No variables may be used anywhere else in the expression except in the
2766 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2767 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2769 If this is true, return 1. Otherwise, return zero. */
2771 static int
2772 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2774 enum tree_code code = TREE_CODE (arg);
2775 enum tree_code_class class = TREE_CODE_CLASS (code);
2777 /* We can handle some of the tcc_expression cases here. */
2778 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2779 class = tcc_unary;
2780 else if (class == tcc_expression
2781 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2782 || code == COMPOUND_EXPR))
2783 class = tcc_binary;
2785 else if (class == tcc_expression && code == SAVE_EXPR
2786 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2788 /* If we've already found a CVAL1 or CVAL2, this expression is
2789 two complex to handle. */
2790 if (*cval1 || *cval2)
2791 return 0;
2793 class = tcc_unary;
2794 *save_p = 1;
2797 switch (class)
2799 case tcc_unary:
2800 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2802 case tcc_binary:
2803 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2804 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2805 cval1, cval2, save_p));
2807 case tcc_constant:
2808 return 1;
2810 case tcc_expression:
2811 if (code == COND_EXPR)
2812 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2813 cval1, cval2, save_p)
2814 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2815 cval1, cval2, save_p)
2816 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2817 cval1, cval2, save_p));
2818 return 0;
2820 case tcc_comparison:
2821 /* First see if we can handle the first operand, then the second. For
2822 the second operand, we know *CVAL1 can't be zero. It must be that
2823 one side of the comparison is each of the values; test for the
2824 case where this isn't true by failing if the two operands
2825 are the same. */
2827 if (operand_equal_p (TREE_OPERAND (arg, 0),
2828 TREE_OPERAND (arg, 1), 0))
2829 return 0;
2831 if (*cval1 == 0)
2832 *cval1 = TREE_OPERAND (arg, 0);
2833 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2835 else if (*cval2 == 0)
2836 *cval2 = TREE_OPERAND (arg, 0);
2837 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2839 else
2840 return 0;
2842 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2844 else if (*cval2 == 0)
2845 *cval2 = TREE_OPERAND (arg, 1);
2846 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2848 else
2849 return 0;
2851 return 1;
2853 default:
2854 return 0;
2858 /* ARG is a tree that is known to contain just arithmetic operations and
2859 comparisons. Evaluate the operations in the tree substituting NEW0 for
2860 any occurrence of OLD0 as an operand of a comparison and likewise for
2861 NEW1 and OLD1. */
2863 static tree
2864 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2866 tree type = TREE_TYPE (arg);
2867 enum tree_code code = TREE_CODE (arg);
2868 enum tree_code_class class = TREE_CODE_CLASS (code);
2870 /* We can handle some of the tcc_expression cases here. */
2871 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2872 class = tcc_unary;
2873 else if (class == tcc_expression
2874 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2875 class = tcc_binary;
2877 switch (class)
2879 case tcc_unary:
2880 return fold_build1 (code, type,
2881 eval_subst (TREE_OPERAND (arg, 0),
2882 old0, new0, old1, new1));
2884 case tcc_binary:
2885 return fold_build2 (code, type,
2886 eval_subst (TREE_OPERAND (arg, 0),
2887 old0, new0, old1, new1),
2888 eval_subst (TREE_OPERAND (arg, 1),
2889 old0, new0, old1, new1));
2891 case tcc_expression:
2892 switch (code)
2894 case SAVE_EXPR:
2895 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2897 case COMPOUND_EXPR:
2898 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2900 case COND_EXPR:
2901 return fold_build3 (code, type,
2902 eval_subst (TREE_OPERAND (arg, 0),
2903 old0, new0, old1, new1),
2904 eval_subst (TREE_OPERAND (arg, 1),
2905 old0, new0, old1, new1),
2906 eval_subst (TREE_OPERAND (arg, 2),
2907 old0, new0, old1, new1));
2908 default:
2909 break;
2911 /* Fall through - ??? */
2913 case tcc_comparison:
2915 tree arg0 = TREE_OPERAND (arg, 0);
2916 tree arg1 = TREE_OPERAND (arg, 1);
2918 /* We need to check both for exact equality and tree equality. The
2919 former will be true if the operand has a side-effect. In that
2920 case, we know the operand occurred exactly once. */
2922 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2923 arg0 = new0;
2924 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2925 arg0 = new1;
2927 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2928 arg1 = new0;
2929 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2930 arg1 = new1;
2932 return fold_build2 (code, type, arg0, arg1);
2935 default:
2936 return arg;
2940 /* Return a tree for the case when the result of an expression is RESULT
2941 converted to TYPE and OMITTED was previously an operand of the expression
2942 but is now not needed (e.g., we folded OMITTED * 0).
2944 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2945 the conversion of RESULT to TYPE. */
2947 tree
2948 omit_one_operand (tree type, tree result, tree omitted)
2950 tree t = fold_convert (type, result);
2952 if (TREE_SIDE_EFFECTS (omitted))
2953 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2955 return non_lvalue (t);
2958 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2960 static tree
2961 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2963 tree t = fold_convert (type, result);
2965 if (TREE_SIDE_EFFECTS (omitted))
2966 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2968 return pedantic_non_lvalue (t);
2971 /* Return a tree for the case when the result of an expression is RESULT
2972 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2973 of the expression but are now not needed.
2975 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2976 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2977 evaluated before OMITTED2. Otherwise, if neither has side effects,
2978 just do the conversion of RESULT to TYPE. */
2980 tree
2981 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2983 tree t = fold_convert (type, result);
2985 if (TREE_SIDE_EFFECTS (omitted2))
2986 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2987 if (TREE_SIDE_EFFECTS (omitted1))
2988 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2990 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2994 /* Return a simplified tree node for the truth-negation of ARG. This
2995 never alters ARG itself. We assume that ARG is an operation that
2996 returns a truth value (0 or 1).
2998 FIXME: one would think we would fold the result, but it causes
2999 problems with the dominator optimizer. */
3000 tree
3001 invert_truthvalue (tree arg)
3003 tree type = TREE_TYPE (arg);
3004 enum tree_code code = TREE_CODE (arg);
3006 if (code == ERROR_MARK)
3007 return arg;
3009 /* If this is a comparison, we can simply invert it, except for
3010 floating-point non-equality comparisons, in which case we just
3011 enclose a TRUTH_NOT_EXPR around what we have. */
3013 if (TREE_CODE_CLASS (code) == tcc_comparison)
3015 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3016 if (FLOAT_TYPE_P (op_type)
3017 && flag_trapping_math
3018 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3019 && code != NE_EXPR && code != EQ_EXPR)
3020 return build1 (TRUTH_NOT_EXPR, type, arg);
3021 else
3023 code = invert_tree_comparison (code,
3024 HONOR_NANS (TYPE_MODE (op_type)));
3025 if (code == ERROR_MARK)
3026 return build1 (TRUTH_NOT_EXPR, type, arg);
3027 else
3028 return build2 (code, type,
3029 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3033 switch (code)
3035 case INTEGER_CST:
3036 return constant_boolean_node (integer_zerop (arg), type);
3038 case TRUTH_AND_EXPR:
3039 return build2 (TRUTH_OR_EXPR, type,
3040 invert_truthvalue (TREE_OPERAND (arg, 0)),
3041 invert_truthvalue (TREE_OPERAND (arg, 1)));
3043 case TRUTH_OR_EXPR:
3044 return build2 (TRUTH_AND_EXPR, type,
3045 invert_truthvalue (TREE_OPERAND (arg, 0)),
3046 invert_truthvalue (TREE_OPERAND (arg, 1)));
3048 case TRUTH_XOR_EXPR:
3049 /* Here we can invert either operand. We invert the first operand
3050 unless the second operand is a TRUTH_NOT_EXPR in which case our
3051 result is the XOR of the first operand with the inside of the
3052 negation of the second operand. */
3054 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3055 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3056 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3057 else
3058 return build2 (TRUTH_XOR_EXPR, type,
3059 invert_truthvalue (TREE_OPERAND (arg, 0)),
3060 TREE_OPERAND (arg, 1));
3062 case TRUTH_ANDIF_EXPR:
3063 return build2 (TRUTH_ORIF_EXPR, type,
3064 invert_truthvalue (TREE_OPERAND (arg, 0)),
3065 invert_truthvalue (TREE_OPERAND (arg, 1)));
3067 case TRUTH_ORIF_EXPR:
3068 return build2 (TRUTH_ANDIF_EXPR, type,
3069 invert_truthvalue (TREE_OPERAND (arg, 0)),
3070 invert_truthvalue (TREE_OPERAND (arg, 1)));
3072 case TRUTH_NOT_EXPR:
3073 return TREE_OPERAND (arg, 0);
3075 case COND_EXPR:
3077 tree arg1 = TREE_OPERAND (arg, 1);
3078 tree arg2 = TREE_OPERAND (arg, 2);
3079 /* A COND_EXPR may have a throw as one operand, which
3080 then has void type. Just leave void operands
3081 as they are. */
3082 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3083 VOID_TYPE_P (TREE_TYPE (arg1))
3084 ? arg1 : invert_truthvalue (arg1),
3085 VOID_TYPE_P (TREE_TYPE (arg2))
3086 ? arg2 : invert_truthvalue (arg2));
3089 case COMPOUND_EXPR:
3090 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3091 invert_truthvalue (TREE_OPERAND (arg, 1)));
3093 case NON_LVALUE_EXPR:
3094 return invert_truthvalue (TREE_OPERAND (arg, 0));
3096 case NOP_EXPR:
3097 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3098 break;
3100 case CONVERT_EXPR:
3101 case FLOAT_EXPR:
3102 return build1 (TREE_CODE (arg), type,
3103 invert_truthvalue (TREE_OPERAND (arg, 0)));
3105 case BIT_AND_EXPR:
3106 if (!integer_onep (TREE_OPERAND (arg, 1)))
3107 break;
3108 return build2 (EQ_EXPR, type, arg,
3109 fold_convert (type, integer_zero_node));
3111 case SAVE_EXPR:
3112 return build1 (TRUTH_NOT_EXPR, type, arg);
3114 case CLEANUP_POINT_EXPR:
3115 return build1 (CLEANUP_POINT_EXPR, type,
3116 invert_truthvalue (TREE_OPERAND (arg, 0)));
3118 default:
3119 break;
3121 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3122 return build1 (TRUTH_NOT_EXPR, type, arg);
3125 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3126 operands are another bit-wise operation with a common input. If so,
3127 distribute the bit operations to save an operation and possibly two if
3128 constants are involved. For example, convert
3129 (A | B) & (A | C) into A | (B & C)
3130 Further simplification will occur if B and C are constants.
3132 If this optimization cannot be done, 0 will be returned. */
3134 static tree
3135 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3137 tree common;
3138 tree left, right;
3140 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3141 || TREE_CODE (arg0) == code
3142 || (TREE_CODE (arg0) != BIT_AND_EXPR
3143 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3144 return 0;
3146 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3148 common = TREE_OPERAND (arg0, 0);
3149 left = TREE_OPERAND (arg0, 1);
3150 right = TREE_OPERAND (arg1, 1);
3152 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3154 common = TREE_OPERAND (arg0, 0);
3155 left = TREE_OPERAND (arg0, 1);
3156 right = TREE_OPERAND (arg1, 0);
3158 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3160 common = TREE_OPERAND (arg0, 1);
3161 left = TREE_OPERAND (arg0, 0);
3162 right = TREE_OPERAND (arg1, 1);
3164 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3166 common = TREE_OPERAND (arg0, 1);
3167 left = TREE_OPERAND (arg0, 0);
3168 right = TREE_OPERAND (arg1, 0);
3170 else
3171 return 0;
3173 return fold_build2 (TREE_CODE (arg0), type, common,
3174 fold_build2 (code, type, left, right));
3177 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3178 with code CODE. This optimization is unsafe. */
3179 static tree
3180 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3182 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3183 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3185 /* (A / C) +- (B / C) -> (A +- B) / C. */
3186 if (mul0 == mul1
3187 && operand_equal_p (TREE_OPERAND (arg0, 1),
3188 TREE_OPERAND (arg1, 1), 0))
3189 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3190 fold_build2 (code, type,
3191 TREE_OPERAND (arg0, 0),
3192 TREE_OPERAND (arg1, 0)),
3193 TREE_OPERAND (arg0, 1));
3195 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3196 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3197 TREE_OPERAND (arg1, 0), 0)
3198 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3199 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3201 REAL_VALUE_TYPE r0, r1;
3202 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3203 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3204 if (!mul0)
3205 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3206 if (!mul1)
3207 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3208 real_arithmetic (&r0, code, &r0, &r1);
3209 return fold_build2 (MULT_EXPR, type,
3210 TREE_OPERAND (arg0, 0),
3211 build_real (type, r0));
3214 return NULL_TREE;
3217 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3218 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3220 static tree
3221 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3222 int unsignedp)
3224 tree result;
3226 if (bitpos == 0)
3228 tree size = TYPE_SIZE (TREE_TYPE (inner));
3229 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3230 || POINTER_TYPE_P (TREE_TYPE (inner)))
3231 && host_integerp (size, 0)
3232 && tree_low_cst (size, 0) == bitsize)
3233 return fold_convert (type, inner);
3236 result = build3 (BIT_FIELD_REF, type, inner,
3237 size_int (bitsize), bitsize_int (bitpos));
3239 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3241 return result;
3244 /* Optimize a bit-field compare.
3246 There are two cases: First is a compare against a constant and the
3247 second is a comparison of two items where the fields are at the same
3248 bit position relative to the start of a chunk (byte, halfword, word)
3249 large enough to contain it. In these cases we can avoid the shift
3250 implicit in bitfield extractions.
3252 For constants, we emit a compare of the shifted constant with the
3253 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3254 compared. For two fields at the same position, we do the ANDs with the
3255 similar mask and compare the result of the ANDs.
3257 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3258 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3259 are the left and right operands of the comparison, respectively.
3261 If the optimization described above can be done, we return the resulting
3262 tree. Otherwise we return zero. */
3264 static tree
3265 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3266 tree lhs, tree rhs)
3268 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3269 tree type = TREE_TYPE (lhs);
3270 tree signed_type, unsigned_type;
3271 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3272 enum machine_mode lmode, rmode, nmode;
3273 int lunsignedp, runsignedp;
3274 int lvolatilep = 0, rvolatilep = 0;
3275 tree linner, rinner = NULL_TREE;
3276 tree mask;
3277 tree offset;
3279 /* Get all the information about the extractions being done. If the bit size
3280 if the same as the size of the underlying object, we aren't doing an
3281 extraction at all and so can do nothing. We also don't want to
3282 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3283 then will no longer be able to replace it. */
3284 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3285 &lunsignedp, &lvolatilep, false);
3286 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3287 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3288 return 0;
3290 if (!const_p)
3292 /* If this is not a constant, we can only do something if bit positions,
3293 sizes, and signedness are the same. */
3294 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3295 &runsignedp, &rvolatilep, false);
3297 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3298 || lunsignedp != runsignedp || offset != 0
3299 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3300 return 0;
3303 /* See if we can find a mode to refer to this field. We should be able to,
3304 but fail if we can't. */
3305 nmode = get_best_mode (lbitsize, lbitpos,
3306 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3307 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3308 TYPE_ALIGN (TREE_TYPE (rinner))),
3309 word_mode, lvolatilep || rvolatilep);
3310 if (nmode == VOIDmode)
3311 return 0;
3313 /* Set signed and unsigned types of the precision of this mode for the
3314 shifts below. */
3315 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3316 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3318 /* Compute the bit position and size for the new reference and our offset
3319 within it. If the new reference is the same size as the original, we
3320 won't optimize anything, so return zero. */
3321 nbitsize = GET_MODE_BITSIZE (nmode);
3322 nbitpos = lbitpos & ~ (nbitsize - 1);
3323 lbitpos -= nbitpos;
3324 if (nbitsize == lbitsize)
3325 return 0;
3327 if (BYTES_BIG_ENDIAN)
3328 lbitpos = nbitsize - lbitsize - lbitpos;
3330 /* Make the mask to be used against the extracted field. */
3331 mask = build_int_cst (unsigned_type, -1);
3332 mask = force_fit_type (mask, 0, false, false);
3333 mask = fold_convert (unsigned_type, mask);
3334 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3335 mask = const_binop (RSHIFT_EXPR, mask,
3336 size_int (nbitsize - lbitsize - lbitpos), 0);
3338 if (! const_p)
3339 /* If not comparing with constant, just rework the comparison
3340 and return. */
3341 return build2 (code, compare_type,
3342 build2 (BIT_AND_EXPR, unsigned_type,
3343 make_bit_field_ref (linner, unsigned_type,
3344 nbitsize, nbitpos, 1),
3345 mask),
3346 build2 (BIT_AND_EXPR, unsigned_type,
3347 make_bit_field_ref (rinner, unsigned_type,
3348 nbitsize, nbitpos, 1),
3349 mask));
3351 /* Otherwise, we are handling the constant case. See if the constant is too
3352 big for the field. Warn and return a tree of for 0 (false) if so. We do
3353 this not only for its own sake, but to avoid having to test for this
3354 error case below. If we didn't, we might generate wrong code.
3356 For unsigned fields, the constant shifted right by the field length should
3357 be all zero. For signed fields, the high-order bits should agree with
3358 the sign bit. */
3360 if (lunsignedp)
3362 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3363 fold_convert (unsigned_type, rhs),
3364 size_int (lbitsize), 0)))
3366 warning (0, "comparison is always %d due to width of bit-field",
3367 code == NE_EXPR);
3368 return constant_boolean_node (code == NE_EXPR, compare_type);
3371 else
3373 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3374 size_int (lbitsize - 1), 0);
3375 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3377 warning (0, "comparison is always %d due to width of bit-field",
3378 code == NE_EXPR);
3379 return constant_boolean_node (code == NE_EXPR, compare_type);
3383 /* Single-bit compares should always be against zero. */
3384 if (lbitsize == 1 && ! integer_zerop (rhs))
3386 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3387 rhs = fold_convert (type, integer_zero_node);
3390 /* Make a new bitfield reference, shift the constant over the
3391 appropriate number of bits and mask it with the computed mask
3392 (in case this was a signed field). If we changed it, make a new one. */
3393 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3394 if (lvolatilep)
3396 TREE_SIDE_EFFECTS (lhs) = 1;
3397 TREE_THIS_VOLATILE (lhs) = 1;
3400 rhs = const_binop (BIT_AND_EXPR,
3401 const_binop (LSHIFT_EXPR,
3402 fold_convert (unsigned_type, rhs),
3403 size_int (lbitpos), 0),
3404 mask, 0);
3406 return build2 (code, compare_type,
3407 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3408 rhs);
3411 /* Subroutine for fold_truthop: decode a field reference.
3413 If EXP is a comparison reference, we return the innermost reference.
3415 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3416 set to the starting bit number.
3418 If the innermost field can be completely contained in a mode-sized
3419 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3421 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3422 otherwise it is not changed.
3424 *PUNSIGNEDP is set to the signedness of the field.
3426 *PMASK is set to the mask used. This is either contained in a
3427 BIT_AND_EXPR or derived from the width of the field.
3429 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3431 Return 0 if this is not a component reference or is one that we can't
3432 do anything with. */
3434 static tree
3435 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3436 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3437 int *punsignedp, int *pvolatilep,
3438 tree *pmask, tree *pand_mask)
3440 tree outer_type = 0;
3441 tree and_mask = 0;
3442 tree mask, inner, offset;
3443 tree unsigned_type;
3444 unsigned int precision;
3446 /* All the optimizations using this function assume integer fields.
3447 There are problems with FP fields since the type_for_size call
3448 below can fail for, e.g., XFmode. */
3449 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3450 return 0;
3452 /* We are interested in the bare arrangement of bits, so strip everything
3453 that doesn't affect the machine mode. However, record the type of the
3454 outermost expression if it may matter below. */
3455 if (TREE_CODE (exp) == NOP_EXPR
3456 || TREE_CODE (exp) == CONVERT_EXPR
3457 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3458 outer_type = TREE_TYPE (exp);
3459 STRIP_NOPS (exp);
3461 if (TREE_CODE (exp) == BIT_AND_EXPR)
3463 and_mask = TREE_OPERAND (exp, 1);
3464 exp = TREE_OPERAND (exp, 0);
3465 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3466 if (TREE_CODE (and_mask) != INTEGER_CST)
3467 return 0;
3470 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3471 punsignedp, pvolatilep, false);
3472 if ((inner == exp && and_mask == 0)
3473 || *pbitsize < 0 || offset != 0
3474 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3475 return 0;
3477 /* If the number of bits in the reference is the same as the bitsize of
3478 the outer type, then the outer type gives the signedness. Otherwise
3479 (in case of a small bitfield) the signedness is unchanged. */
3480 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3481 *punsignedp = TYPE_UNSIGNED (outer_type);
3483 /* Compute the mask to access the bitfield. */
3484 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3485 precision = TYPE_PRECISION (unsigned_type);
3487 mask = build_int_cst (unsigned_type, -1);
3488 mask = force_fit_type (mask, 0, false, false);
3490 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3491 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3493 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3494 if (and_mask != 0)
3495 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3496 fold_convert (unsigned_type, and_mask), mask);
3498 *pmask = mask;
3499 *pand_mask = and_mask;
3500 return inner;
3503 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3504 bit positions. */
3506 static int
3507 all_ones_mask_p (tree mask, int size)
3509 tree type = TREE_TYPE (mask);
3510 unsigned int precision = TYPE_PRECISION (type);
3511 tree tmask;
3513 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3514 tmask = force_fit_type (tmask, 0, false, false);
3516 return
3517 tree_int_cst_equal (mask,
3518 const_binop (RSHIFT_EXPR,
3519 const_binop (LSHIFT_EXPR, tmask,
3520 size_int (precision - size),
3522 size_int (precision - size), 0));
3525 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3526 represents the sign bit of EXP's type. If EXP represents a sign
3527 or zero extension, also test VAL against the unextended type.
3528 The return value is the (sub)expression whose sign bit is VAL,
3529 or NULL_TREE otherwise. */
3531 static tree
3532 sign_bit_p (tree exp, tree val)
3534 unsigned HOST_WIDE_INT mask_lo, lo;
3535 HOST_WIDE_INT mask_hi, hi;
3536 int width;
3537 tree t;
3539 /* Tree EXP must have an integral type. */
3540 t = TREE_TYPE (exp);
3541 if (! INTEGRAL_TYPE_P (t))
3542 return NULL_TREE;
3544 /* Tree VAL must be an integer constant. */
3545 if (TREE_CODE (val) != INTEGER_CST
3546 || TREE_CONSTANT_OVERFLOW (val))
3547 return NULL_TREE;
3549 width = TYPE_PRECISION (t);
3550 if (width > HOST_BITS_PER_WIDE_INT)
3552 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3553 lo = 0;
3555 mask_hi = ((unsigned HOST_WIDE_INT) -1
3556 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3557 mask_lo = -1;
3559 else
3561 hi = 0;
3562 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3564 mask_hi = 0;
3565 mask_lo = ((unsigned HOST_WIDE_INT) -1
3566 >> (HOST_BITS_PER_WIDE_INT - width));
3569 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3570 treat VAL as if it were unsigned. */
3571 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3572 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3573 return exp;
3575 /* Handle extension from a narrower type. */
3576 if (TREE_CODE (exp) == NOP_EXPR
3577 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3578 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3580 return NULL_TREE;
3583 /* Subroutine for fold_truthop: determine if an operand is simple enough
3584 to be evaluated unconditionally. */
3586 static int
3587 simple_operand_p (tree exp)
3589 /* Strip any conversions that don't change the machine mode. */
3590 STRIP_NOPS (exp);
3592 return (CONSTANT_CLASS_P (exp)
3593 || TREE_CODE (exp) == SSA_NAME
3594 || (DECL_P (exp)
3595 && ! TREE_ADDRESSABLE (exp)
3596 && ! TREE_THIS_VOLATILE (exp)
3597 && ! DECL_NONLOCAL (exp)
3598 /* Don't regard global variables as simple. They may be
3599 allocated in ways unknown to the compiler (shared memory,
3600 #pragma weak, etc). */
3601 && ! TREE_PUBLIC (exp)
3602 && ! DECL_EXTERNAL (exp)
3603 /* Loading a static variable is unduly expensive, but global
3604 registers aren't expensive. */
3605 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3608 /* The following functions are subroutines to fold_range_test and allow it to
3609 try to change a logical combination of comparisons into a range test.
3611 For example, both
3612 X == 2 || X == 3 || X == 4 || X == 5
3614 X >= 2 && X <= 5
3615 are converted to
3616 (unsigned) (X - 2) <= 3
3618 We describe each set of comparisons as being either inside or outside
3619 a range, using a variable named like IN_P, and then describe the
3620 range with a lower and upper bound. If one of the bounds is omitted,
3621 it represents either the highest or lowest value of the type.
3623 In the comments below, we represent a range by two numbers in brackets
3624 preceded by a "+" to designate being inside that range, or a "-" to
3625 designate being outside that range, so the condition can be inverted by
3626 flipping the prefix. An omitted bound is represented by a "-". For
3627 example, "- [-, 10]" means being outside the range starting at the lowest
3628 possible value and ending at 10, in other words, being greater than 10.
3629 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3630 always false.
3632 We set up things so that the missing bounds are handled in a consistent
3633 manner so neither a missing bound nor "true" and "false" need to be
3634 handled using a special case. */
3636 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3637 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3638 and UPPER1_P are nonzero if the respective argument is an upper bound
3639 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3640 must be specified for a comparison. ARG1 will be converted to ARG0's
3641 type if both are specified. */
3643 static tree
3644 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3645 tree arg1, int upper1_p)
3647 tree tem;
3648 int result;
3649 int sgn0, sgn1;
3651 /* If neither arg represents infinity, do the normal operation.
3652 Else, if not a comparison, return infinity. Else handle the special
3653 comparison rules. Note that most of the cases below won't occur, but
3654 are handled for consistency. */
3656 if (arg0 != 0 && arg1 != 0)
3658 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3659 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3660 STRIP_NOPS (tem);
3661 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3664 if (TREE_CODE_CLASS (code) != tcc_comparison)
3665 return 0;
3667 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3668 for neither. In real maths, we cannot assume open ended ranges are
3669 the same. But, this is computer arithmetic, where numbers are finite.
3670 We can therefore make the transformation of any unbounded range with
3671 the value Z, Z being greater than any representable number. This permits
3672 us to treat unbounded ranges as equal. */
3673 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3674 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3675 switch (code)
3677 case EQ_EXPR:
3678 result = sgn0 == sgn1;
3679 break;
3680 case NE_EXPR:
3681 result = sgn0 != sgn1;
3682 break;
3683 case LT_EXPR:
3684 result = sgn0 < sgn1;
3685 break;
3686 case LE_EXPR:
3687 result = sgn0 <= sgn1;
3688 break;
3689 case GT_EXPR:
3690 result = sgn0 > sgn1;
3691 break;
3692 case GE_EXPR:
3693 result = sgn0 >= sgn1;
3694 break;
3695 default:
3696 gcc_unreachable ();
3699 return constant_boolean_node (result, type);
3702 /* Given EXP, a logical expression, set the range it is testing into
3703 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3704 actually being tested. *PLOW and *PHIGH will be made of the same type
3705 as the returned expression. If EXP is not a comparison, we will most
3706 likely not be returning a useful value and range. */
3708 static tree
3709 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3711 enum tree_code code;
3712 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3713 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3714 int in_p, n_in_p;
3715 tree low, high, n_low, n_high;
3717 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3718 and see if we can refine the range. Some of the cases below may not
3719 happen, but it doesn't seem worth worrying about this. We "continue"
3720 the outer loop when we've changed something; otherwise we "break"
3721 the switch, which will "break" the while. */
3723 in_p = 0;
3724 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3726 while (1)
3728 code = TREE_CODE (exp);
3729 exp_type = TREE_TYPE (exp);
3731 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3733 if (TREE_CODE_LENGTH (code) > 0)
3734 arg0 = TREE_OPERAND (exp, 0);
3735 if (TREE_CODE_CLASS (code) == tcc_comparison
3736 || TREE_CODE_CLASS (code) == tcc_unary
3737 || TREE_CODE_CLASS (code) == tcc_binary)
3738 arg0_type = TREE_TYPE (arg0);
3739 if (TREE_CODE_CLASS (code) == tcc_binary
3740 || TREE_CODE_CLASS (code) == tcc_comparison
3741 || (TREE_CODE_CLASS (code) == tcc_expression
3742 && TREE_CODE_LENGTH (code) > 1))
3743 arg1 = TREE_OPERAND (exp, 1);
3746 switch (code)
3748 case TRUTH_NOT_EXPR:
3749 in_p = ! in_p, exp = arg0;
3750 continue;
3752 case EQ_EXPR: case NE_EXPR:
3753 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3754 /* We can only do something if the range is testing for zero
3755 and if the second operand is an integer constant. Note that
3756 saying something is "in" the range we make is done by
3757 complementing IN_P since it will set in the initial case of
3758 being not equal to zero; "out" is leaving it alone. */
3759 if (low == 0 || high == 0
3760 || ! integer_zerop (low) || ! integer_zerop (high)
3761 || TREE_CODE (arg1) != INTEGER_CST)
3762 break;
3764 switch (code)
3766 case NE_EXPR: /* - [c, c] */
3767 low = high = arg1;
3768 break;
3769 case EQ_EXPR: /* + [c, c] */
3770 in_p = ! in_p, low = high = arg1;
3771 break;
3772 case GT_EXPR: /* - [-, c] */
3773 low = 0, high = arg1;
3774 break;
3775 case GE_EXPR: /* + [c, -] */
3776 in_p = ! in_p, low = arg1, high = 0;
3777 break;
3778 case LT_EXPR: /* - [c, -] */
3779 low = arg1, high = 0;
3780 break;
3781 case LE_EXPR: /* + [-, c] */
3782 in_p = ! in_p, low = 0, high = arg1;
3783 break;
3784 default:
3785 gcc_unreachable ();
3788 /* If this is an unsigned comparison, we also know that EXP is
3789 greater than or equal to zero. We base the range tests we make
3790 on that fact, so we record it here so we can parse existing
3791 range tests. We test arg0_type since often the return type
3792 of, e.g. EQ_EXPR, is boolean. */
3793 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3795 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3796 in_p, low, high, 1,
3797 fold_convert (arg0_type, integer_zero_node),
3798 NULL_TREE))
3799 break;
3801 in_p = n_in_p, low = n_low, high = n_high;
3803 /* If the high bound is missing, but we have a nonzero low
3804 bound, reverse the range so it goes from zero to the low bound
3805 minus 1. */
3806 if (high == 0 && low && ! integer_zerop (low))
3808 in_p = ! in_p;
3809 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3810 integer_one_node, 0);
3811 low = fold_convert (arg0_type, integer_zero_node);
3815 exp = arg0;
3816 continue;
3818 case NEGATE_EXPR:
3819 /* (-x) IN [a,b] -> x in [-b, -a] */
3820 n_low = range_binop (MINUS_EXPR, exp_type,
3821 fold_convert (exp_type, integer_zero_node),
3822 0, high, 1);
3823 n_high = range_binop (MINUS_EXPR, exp_type,
3824 fold_convert (exp_type, integer_zero_node),
3825 0, low, 0);
3826 low = n_low, high = n_high;
3827 exp = arg0;
3828 continue;
3830 case BIT_NOT_EXPR:
3831 /* ~ X -> -X - 1 */
3832 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3833 fold_convert (exp_type, integer_one_node));
3834 continue;
3836 case PLUS_EXPR: case MINUS_EXPR:
3837 if (TREE_CODE (arg1) != INTEGER_CST)
3838 break;
3840 /* If EXP is signed, any overflow in the computation is undefined,
3841 so we don't worry about it so long as our computations on
3842 the bounds don't overflow. For unsigned, overflow is defined
3843 and this is exactly the right thing. */
3844 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3845 arg0_type, low, 0, arg1, 0);
3846 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3847 arg0_type, high, 1, arg1, 0);
3848 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3849 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3850 break;
3852 /* Check for an unsigned range which has wrapped around the maximum
3853 value thus making n_high < n_low, and normalize it. */
3854 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3856 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3857 integer_one_node, 0);
3858 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3859 integer_one_node, 0);
3861 /* If the range is of the form +/- [ x+1, x ], we won't
3862 be able to normalize it. But then, it represents the
3863 whole range or the empty set, so make it
3864 +/- [ -, - ]. */
3865 if (tree_int_cst_equal (n_low, low)
3866 && tree_int_cst_equal (n_high, high))
3867 low = high = 0;
3868 else
3869 in_p = ! in_p;
3871 else
3872 low = n_low, high = n_high;
3874 exp = arg0;
3875 continue;
3877 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3878 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3879 break;
3881 if (! INTEGRAL_TYPE_P (arg0_type)
3882 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3883 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3884 break;
3886 n_low = low, n_high = high;
3888 if (n_low != 0)
3889 n_low = fold_convert (arg0_type, n_low);
3891 if (n_high != 0)
3892 n_high = fold_convert (arg0_type, n_high);
3895 /* If we're converting arg0 from an unsigned type, to exp,
3896 a signed type, we will be doing the comparison as unsigned.
3897 The tests above have already verified that LOW and HIGH
3898 are both positive.
3900 So we have to ensure that we will handle large unsigned
3901 values the same way that the current signed bounds treat
3902 negative values. */
3904 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3906 tree high_positive;
3907 tree equiv_type = lang_hooks.types.type_for_mode
3908 (TYPE_MODE (arg0_type), 1);
3910 /* A range without an upper bound is, naturally, unbounded.
3911 Since convert would have cropped a very large value, use
3912 the max value for the destination type. */
3913 high_positive
3914 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3915 : TYPE_MAX_VALUE (arg0_type);
3917 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3918 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3919 fold_convert (arg0_type,
3920 high_positive),
3921 fold_convert (arg0_type,
3922 integer_one_node));
3924 /* If the low bound is specified, "and" the range with the
3925 range for which the original unsigned value will be
3926 positive. */
3927 if (low != 0)
3929 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3930 1, n_low, n_high, 1,
3931 fold_convert (arg0_type,
3932 integer_zero_node),
3933 high_positive))
3934 break;
3936 in_p = (n_in_p == in_p);
3938 else
3940 /* Otherwise, "or" the range with the range of the input
3941 that will be interpreted as negative. */
3942 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3943 0, n_low, n_high, 1,
3944 fold_convert (arg0_type,
3945 integer_zero_node),
3946 high_positive))
3947 break;
3949 in_p = (in_p != n_in_p);
3953 exp = arg0;
3954 low = n_low, high = n_high;
3955 continue;
3957 default:
3958 break;
3961 break;
3964 /* If EXP is a constant, we can evaluate whether this is true or false. */
3965 if (TREE_CODE (exp) == INTEGER_CST)
3967 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3968 exp, 0, low, 0))
3969 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3970 exp, 1, high, 1)));
3971 low = high = 0;
3972 exp = 0;
3975 *pin_p = in_p, *plow = low, *phigh = high;
3976 return exp;
3979 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3980 type, TYPE, return an expression to test if EXP is in (or out of, depending
3981 on IN_P) the range. Return 0 if the test couldn't be created. */
3983 static tree
3984 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3986 tree etype = TREE_TYPE (exp);
3987 tree value;
3989 #ifdef HAVE_canonicalize_funcptr_for_compare
3990 /* Disable this optimization for function pointer expressions
3991 on targets that require function pointer canonicalization. */
3992 if (HAVE_canonicalize_funcptr_for_compare
3993 && TREE_CODE (etype) == POINTER_TYPE
3994 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
3995 return NULL_TREE;
3996 #endif
3998 if (! in_p)
4000 value = build_range_check (type, exp, 1, low, high);
4001 if (value != 0)
4002 return invert_truthvalue (value);
4004 return 0;
4007 if (low == 0 && high == 0)
4008 return fold_convert (type, integer_one_node);
4010 if (low == 0)
4011 return fold_build2 (LE_EXPR, type, exp,
4012 fold_convert (etype, high));
4014 if (high == 0)
4015 return fold_build2 (GE_EXPR, type, exp,
4016 fold_convert (etype, low));
4018 if (operand_equal_p (low, high, 0))
4019 return fold_build2 (EQ_EXPR, type, exp,
4020 fold_convert (etype, low));
4022 if (integer_zerop (low))
4024 if (! TYPE_UNSIGNED (etype))
4026 etype = lang_hooks.types.unsigned_type (etype);
4027 high = fold_convert (etype, high);
4028 exp = fold_convert (etype, exp);
4030 return build_range_check (type, exp, 1, 0, high);
4033 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4034 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4036 unsigned HOST_WIDE_INT lo;
4037 HOST_WIDE_INT hi;
4038 int prec;
4040 prec = TYPE_PRECISION (etype);
4041 if (prec <= HOST_BITS_PER_WIDE_INT)
4043 hi = 0;
4044 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4046 else
4048 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4049 lo = (unsigned HOST_WIDE_INT) -1;
4052 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4054 if (TYPE_UNSIGNED (etype))
4056 etype = lang_hooks.types.signed_type (etype);
4057 exp = fold_convert (etype, exp);
4059 return fold_build2 (GT_EXPR, type, exp,
4060 fold_convert (etype, integer_zero_node));
4064 value = const_binop (MINUS_EXPR, high, low, 0);
4065 if (value != 0 && (!flag_wrapv || TREE_OVERFLOW (value))
4066 && ! TYPE_UNSIGNED (etype))
4068 tree utype, minv, maxv;
4070 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4071 for the type in question, as we rely on this here. */
4072 switch (TREE_CODE (etype))
4074 case INTEGER_TYPE:
4075 case ENUMERAL_TYPE:
4076 case CHAR_TYPE:
4077 /* There is no requirement that LOW be within the range of ETYPE
4078 if the latter is a subtype. It must, however, be within the base
4079 type of ETYPE. So be sure we do the subtraction in that type. */
4080 if (TREE_TYPE (etype))
4081 etype = TREE_TYPE (etype);
4082 utype = lang_hooks.types.unsigned_type (etype);
4083 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4084 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4085 integer_one_node, 1);
4086 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4087 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4088 minv, 1, maxv, 1)))
4090 etype = utype;
4091 high = fold_convert (etype, high);
4092 low = fold_convert (etype, low);
4093 exp = fold_convert (etype, exp);
4094 value = const_binop (MINUS_EXPR, high, low, 0);
4096 break;
4097 default:
4098 break;
4102 if (value != 0 && ! TREE_OVERFLOW (value))
4104 /* There is no requirement that LOW be within the range of ETYPE
4105 if the latter is a subtype. It must, however, be within the base
4106 type of ETYPE. So be sure we do the subtraction in that type. */
4107 if (INTEGRAL_TYPE_P (etype) && TREE_TYPE (etype))
4109 etype = TREE_TYPE (etype);
4110 exp = fold_convert (etype, exp);
4111 low = fold_convert (etype, low);
4112 value = fold_convert (etype, value);
4115 return build_range_check (type,
4116 fold_build2 (MINUS_EXPR, etype, exp, low),
4117 1, build_int_cst (etype, 0), value);
4120 return 0;
4123 /* Given two ranges, see if we can merge them into one. Return 1 if we
4124 can, 0 if we can't. Set the output range into the specified parameters. */
4126 static int
4127 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4128 tree high0, int in1_p, tree low1, tree high1)
4130 int no_overlap;
4131 int subset;
4132 int temp;
4133 tree tem;
4134 int in_p;
4135 tree low, high;
4136 int lowequal = ((low0 == 0 && low1 == 0)
4137 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4138 low0, 0, low1, 0)));
4139 int highequal = ((high0 == 0 && high1 == 0)
4140 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4141 high0, 1, high1, 1)));
4143 /* Make range 0 be the range that starts first, or ends last if they
4144 start at the same value. Swap them if it isn't. */
4145 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4146 low0, 0, low1, 0))
4147 || (lowequal
4148 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4149 high1, 1, high0, 1))))
4151 temp = in0_p, in0_p = in1_p, in1_p = temp;
4152 tem = low0, low0 = low1, low1 = tem;
4153 tem = high0, high0 = high1, high1 = tem;
4156 /* Now flag two cases, whether the ranges are disjoint or whether the
4157 second range is totally subsumed in the first. Note that the tests
4158 below are simplified by the ones above. */
4159 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4160 high0, 1, low1, 0));
4161 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4162 high1, 1, high0, 1));
4164 /* We now have four cases, depending on whether we are including or
4165 excluding the two ranges. */
4166 if (in0_p && in1_p)
4168 /* If they don't overlap, the result is false. If the second range
4169 is a subset it is the result. Otherwise, the range is from the start
4170 of the second to the end of the first. */
4171 if (no_overlap)
4172 in_p = 0, low = high = 0;
4173 else if (subset)
4174 in_p = 1, low = low1, high = high1;
4175 else
4176 in_p = 1, low = low1, high = high0;
4179 else if (in0_p && ! in1_p)
4181 /* If they don't overlap, the result is the first range. If they are
4182 equal, the result is false. If the second range is a subset of the
4183 first, and the ranges begin at the same place, we go from just after
4184 the end of the first range to the end of the second. If the second
4185 range is not a subset of the first, or if it is a subset and both
4186 ranges end at the same place, the range starts at the start of the
4187 first range and ends just before the second range.
4188 Otherwise, we can't describe this as a single range. */
4189 if (no_overlap)
4190 in_p = 1, low = low0, high = high0;
4191 else if (lowequal && highequal)
4192 in_p = 0, low = high = 0;
4193 else if (subset && lowequal)
4195 in_p = 1, high = high0;
4196 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4197 integer_one_node, 0);
4199 else if (! subset || highequal)
4201 in_p = 1, low = low0;
4202 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4203 integer_one_node, 0);
4205 else
4206 return 0;
4209 else if (! in0_p && in1_p)
4211 /* If they don't overlap, the result is the second range. If the second
4212 is a subset of the first, the result is false. Otherwise,
4213 the range starts just after the first range and ends at the
4214 end of the second. */
4215 if (no_overlap)
4216 in_p = 1, low = low1, high = high1;
4217 else if (subset || highequal)
4218 in_p = 0, low = high = 0;
4219 else
4221 in_p = 1, high = high1;
4222 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4223 integer_one_node, 0);
4227 else
4229 /* The case where we are excluding both ranges. Here the complex case
4230 is if they don't overlap. In that case, the only time we have a
4231 range is if they are adjacent. If the second is a subset of the
4232 first, the result is the first. Otherwise, the range to exclude
4233 starts at the beginning of the first range and ends at the end of the
4234 second. */
4235 if (no_overlap)
4237 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4238 range_binop (PLUS_EXPR, NULL_TREE,
4239 high0, 1,
4240 integer_one_node, 1),
4241 1, low1, 0)))
4242 in_p = 0, low = low0, high = high1;
4243 else
4245 /* Canonicalize - [min, x] into - [-, x]. */
4246 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4247 switch (TREE_CODE (TREE_TYPE (low0)))
4249 case ENUMERAL_TYPE:
4250 if (TYPE_PRECISION (TREE_TYPE (low0))
4251 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4252 break;
4253 /* FALLTHROUGH */
4254 case INTEGER_TYPE:
4255 case CHAR_TYPE:
4256 if (tree_int_cst_equal (low0,
4257 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4258 low0 = 0;
4259 break;
4260 case POINTER_TYPE:
4261 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4262 && integer_zerop (low0))
4263 low0 = 0;
4264 break;
4265 default:
4266 break;
4269 /* Canonicalize - [x, max] into - [x, -]. */
4270 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4271 switch (TREE_CODE (TREE_TYPE (high1)))
4273 case ENUMERAL_TYPE:
4274 if (TYPE_PRECISION (TREE_TYPE (high1))
4275 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4276 break;
4277 /* FALLTHROUGH */
4278 case INTEGER_TYPE:
4279 case CHAR_TYPE:
4280 if (tree_int_cst_equal (high1,
4281 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4282 high1 = 0;
4283 break;
4284 case POINTER_TYPE:
4285 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4286 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4287 high1, 1,
4288 integer_one_node, 1)))
4289 high1 = 0;
4290 break;
4291 default:
4292 break;
4295 /* The ranges might be also adjacent between the maximum and
4296 minimum values of the given type. For
4297 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4298 return + [x + 1, y - 1]. */
4299 if (low0 == 0 && high1 == 0)
4301 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4302 integer_one_node, 1);
4303 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4304 integer_one_node, 0);
4305 if (low == 0 || high == 0)
4306 return 0;
4308 in_p = 1;
4310 else
4311 return 0;
4314 else if (subset)
4315 in_p = 0, low = low0, high = high0;
4316 else
4317 in_p = 0, low = low0, high = high1;
4320 *pin_p = in_p, *plow = low, *phigh = high;
4321 return 1;
4325 /* Subroutine of fold, looking inside expressions of the form
4326 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4327 of the COND_EXPR. This function is being used also to optimize
4328 A op B ? C : A, by reversing the comparison first.
4330 Return a folded expression whose code is not a COND_EXPR
4331 anymore, or NULL_TREE if no folding opportunity is found. */
4333 static tree
4334 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4336 enum tree_code comp_code = TREE_CODE (arg0);
4337 tree arg00 = TREE_OPERAND (arg0, 0);
4338 tree arg01 = TREE_OPERAND (arg0, 1);
4339 tree arg1_type = TREE_TYPE (arg1);
4340 tree tem;
4342 STRIP_NOPS (arg1);
4343 STRIP_NOPS (arg2);
4345 /* If we have A op 0 ? A : -A, consider applying the following
4346 transformations:
4348 A == 0? A : -A same as -A
4349 A != 0? A : -A same as A
4350 A >= 0? A : -A same as abs (A)
4351 A > 0? A : -A same as abs (A)
4352 A <= 0? A : -A same as -abs (A)
4353 A < 0? A : -A same as -abs (A)
4355 None of these transformations work for modes with signed
4356 zeros. If A is +/-0, the first two transformations will
4357 change the sign of the result (from +0 to -0, or vice
4358 versa). The last four will fix the sign of the result,
4359 even though the original expressions could be positive or
4360 negative, depending on the sign of A.
4362 Note that all these transformations are correct if A is
4363 NaN, since the two alternatives (A and -A) are also NaNs. */
4364 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4365 ? real_zerop (arg01)
4366 : integer_zerop (arg01))
4367 && ((TREE_CODE (arg2) == NEGATE_EXPR
4368 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4369 /* In the case that A is of the form X-Y, '-A' (arg2) may
4370 have already been folded to Y-X, check for that. */
4371 || (TREE_CODE (arg1) == MINUS_EXPR
4372 && TREE_CODE (arg2) == MINUS_EXPR
4373 && operand_equal_p (TREE_OPERAND (arg1, 0),
4374 TREE_OPERAND (arg2, 1), 0)
4375 && operand_equal_p (TREE_OPERAND (arg1, 1),
4376 TREE_OPERAND (arg2, 0), 0))))
4377 switch (comp_code)
4379 case EQ_EXPR:
4380 case UNEQ_EXPR:
4381 tem = fold_convert (arg1_type, arg1);
4382 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4383 case NE_EXPR:
4384 case LTGT_EXPR:
4385 return pedantic_non_lvalue (fold_convert (type, arg1));
4386 case UNGE_EXPR:
4387 case UNGT_EXPR:
4388 if (flag_trapping_math)
4389 break;
4390 /* Fall through. */
4391 case GE_EXPR:
4392 case GT_EXPR:
4393 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4394 arg1 = fold_convert (lang_hooks.types.signed_type
4395 (TREE_TYPE (arg1)), arg1);
4396 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4397 return pedantic_non_lvalue (fold_convert (type, tem));
4398 case UNLE_EXPR:
4399 case UNLT_EXPR:
4400 if (flag_trapping_math)
4401 break;
4402 case LE_EXPR:
4403 case LT_EXPR:
4404 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4405 arg1 = fold_convert (lang_hooks.types.signed_type
4406 (TREE_TYPE (arg1)), arg1);
4407 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4408 return negate_expr (fold_convert (type, tem));
4409 default:
4410 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4411 break;
4414 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4415 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4416 both transformations are correct when A is NaN: A != 0
4417 is then true, and A == 0 is false. */
4419 if (integer_zerop (arg01) && integer_zerop (arg2))
4421 if (comp_code == NE_EXPR)
4422 return pedantic_non_lvalue (fold_convert (type, arg1));
4423 else if (comp_code == EQ_EXPR)
4424 return fold_convert (type, integer_zero_node);
4427 /* Try some transformations of A op B ? A : B.
4429 A == B? A : B same as B
4430 A != B? A : B same as A
4431 A >= B? A : B same as max (A, B)
4432 A > B? A : B same as max (B, A)
4433 A <= B? A : B same as min (A, B)
4434 A < B? A : B same as min (B, A)
4436 As above, these transformations don't work in the presence
4437 of signed zeros. For example, if A and B are zeros of
4438 opposite sign, the first two transformations will change
4439 the sign of the result. In the last four, the original
4440 expressions give different results for (A=+0, B=-0) and
4441 (A=-0, B=+0), but the transformed expressions do not.
4443 The first two transformations are correct if either A or B
4444 is a NaN. In the first transformation, the condition will
4445 be false, and B will indeed be chosen. In the case of the
4446 second transformation, the condition A != B will be true,
4447 and A will be chosen.
4449 The conversions to max() and min() are not correct if B is
4450 a number and A is not. The conditions in the original
4451 expressions will be false, so all four give B. The min()
4452 and max() versions would give a NaN instead. */
4453 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4454 /* Avoid these transformations if the COND_EXPR may be used
4455 as an lvalue in the C++ front-end. PR c++/19199. */
4456 && (in_gimple_form
4457 || strcmp (lang_hooks.name, "GNU C++") != 0
4458 || ! maybe_lvalue_p (arg1)
4459 || ! maybe_lvalue_p (arg2)))
4461 tree comp_op0 = arg00;
4462 tree comp_op1 = arg01;
4463 tree comp_type = TREE_TYPE (comp_op0);
4465 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4466 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4468 comp_type = type;
4469 comp_op0 = arg1;
4470 comp_op1 = arg2;
4473 switch (comp_code)
4475 case EQ_EXPR:
4476 return pedantic_non_lvalue (fold_convert (type, arg2));
4477 case NE_EXPR:
4478 return pedantic_non_lvalue (fold_convert (type, arg1));
4479 case LE_EXPR:
4480 case LT_EXPR:
4481 case UNLE_EXPR:
4482 case UNLT_EXPR:
4483 /* In C++ a ?: expression can be an lvalue, so put the
4484 operand which will be used if they are equal first
4485 so that we can convert this back to the
4486 corresponding COND_EXPR. */
4487 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4489 comp_op0 = fold_convert (comp_type, comp_op0);
4490 comp_op1 = fold_convert (comp_type, comp_op1);
4491 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4492 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4493 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4494 return pedantic_non_lvalue (fold_convert (type, tem));
4496 break;
4497 case GE_EXPR:
4498 case GT_EXPR:
4499 case UNGE_EXPR:
4500 case UNGT_EXPR:
4501 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4503 comp_op0 = fold_convert (comp_type, comp_op0);
4504 comp_op1 = fold_convert (comp_type, comp_op1);
4505 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4506 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4507 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4508 return pedantic_non_lvalue (fold_convert (type, tem));
4510 break;
4511 case UNEQ_EXPR:
4512 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4513 return pedantic_non_lvalue (fold_convert (type, arg2));
4514 break;
4515 case LTGT_EXPR:
4516 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4517 return pedantic_non_lvalue (fold_convert (type, arg1));
4518 break;
4519 default:
4520 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4521 break;
4525 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4526 we might still be able to simplify this. For example,
4527 if C1 is one less or one more than C2, this might have started
4528 out as a MIN or MAX and been transformed by this function.
4529 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4531 if (INTEGRAL_TYPE_P (type)
4532 && TREE_CODE (arg01) == INTEGER_CST
4533 && TREE_CODE (arg2) == INTEGER_CST)
4534 switch (comp_code)
4536 case EQ_EXPR:
4537 /* We can replace A with C1 in this case. */
4538 arg1 = fold_convert (type, arg01);
4539 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4541 case LT_EXPR:
4542 /* If C1 is C2 + 1, this is min(A, C2). */
4543 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4544 OEP_ONLY_CONST)
4545 && operand_equal_p (arg01,
4546 const_binop (PLUS_EXPR, arg2,
4547 integer_one_node, 0),
4548 OEP_ONLY_CONST))
4549 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4550 type, arg1, arg2));
4551 break;
4553 case LE_EXPR:
4554 /* If C1 is C2 - 1, this is min(A, C2). */
4555 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4556 OEP_ONLY_CONST)
4557 && operand_equal_p (arg01,
4558 const_binop (MINUS_EXPR, arg2,
4559 integer_one_node, 0),
4560 OEP_ONLY_CONST))
4561 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4562 type, arg1, arg2));
4563 break;
4565 case GT_EXPR:
4566 /* If C1 is C2 - 1, this is max(A, C2). */
4567 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4568 OEP_ONLY_CONST)
4569 && operand_equal_p (arg01,
4570 const_binop (MINUS_EXPR, arg2,
4571 integer_one_node, 0),
4572 OEP_ONLY_CONST))
4573 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4574 type, arg1, arg2));
4575 break;
4577 case GE_EXPR:
4578 /* If C1 is C2 + 1, this is max(A, C2). */
4579 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4580 OEP_ONLY_CONST)
4581 && operand_equal_p (arg01,
4582 const_binop (PLUS_EXPR, arg2,
4583 integer_one_node, 0),
4584 OEP_ONLY_CONST))
4585 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4586 type, arg1, arg2));
4587 break;
4588 case NE_EXPR:
4589 break;
4590 default:
4591 gcc_unreachable ();
4594 return NULL_TREE;
4599 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4600 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4601 #endif
4603 /* EXP is some logical combination of boolean tests. See if we can
4604 merge it into some range test. Return the new tree if so. */
4606 static tree
4607 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4609 int or_op = (code == TRUTH_ORIF_EXPR
4610 || code == TRUTH_OR_EXPR);
4611 int in0_p, in1_p, in_p;
4612 tree low0, low1, low, high0, high1, high;
4613 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4614 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4615 tree tem;
4617 /* If this is an OR operation, invert both sides; we will invert
4618 again at the end. */
4619 if (or_op)
4620 in0_p = ! in0_p, in1_p = ! in1_p;
4622 /* If both expressions are the same, if we can merge the ranges, and we
4623 can build the range test, return it or it inverted. If one of the
4624 ranges is always true or always false, consider it to be the same
4625 expression as the other. */
4626 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4627 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4628 in1_p, low1, high1)
4629 && 0 != (tem = (build_range_check (type,
4630 lhs != 0 ? lhs
4631 : rhs != 0 ? rhs : integer_zero_node,
4632 in_p, low, high))))
4633 return or_op ? invert_truthvalue (tem) : tem;
4635 /* On machines where the branch cost is expensive, if this is a
4636 short-circuited branch and the underlying object on both sides
4637 is the same, make a non-short-circuit operation. */
4638 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4639 && lhs != 0 && rhs != 0
4640 && (code == TRUTH_ANDIF_EXPR
4641 || code == TRUTH_ORIF_EXPR)
4642 && operand_equal_p (lhs, rhs, 0))
4644 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4645 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4646 which cases we can't do this. */
4647 if (simple_operand_p (lhs))
4648 return build2 (code == TRUTH_ANDIF_EXPR
4649 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4650 type, op0, op1);
4652 else if (lang_hooks.decls.global_bindings_p () == 0
4653 && ! CONTAINS_PLACEHOLDER_P (lhs))
4655 tree common = save_expr (lhs);
4657 if (0 != (lhs = build_range_check (type, common,
4658 or_op ? ! in0_p : in0_p,
4659 low0, high0))
4660 && (0 != (rhs = build_range_check (type, common,
4661 or_op ? ! in1_p : in1_p,
4662 low1, high1))))
4663 return build2 (code == TRUTH_ANDIF_EXPR
4664 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4665 type, lhs, rhs);
4669 return 0;
4672 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4673 bit value. Arrange things so the extra bits will be set to zero if and
4674 only if C is signed-extended to its full width. If MASK is nonzero,
4675 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4677 static tree
4678 unextend (tree c, int p, int unsignedp, tree mask)
4680 tree type = TREE_TYPE (c);
4681 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4682 tree temp;
4684 if (p == modesize || unsignedp)
4685 return c;
4687 /* We work by getting just the sign bit into the low-order bit, then
4688 into the high-order bit, then sign-extend. We then XOR that value
4689 with C. */
4690 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4691 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4693 /* We must use a signed type in order to get an arithmetic right shift.
4694 However, we must also avoid introducing accidental overflows, so that
4695 a subsequent call to integer_zerop will work. Hence we must
4696 do the type conversion here. At this point, the constant is either
4697 zero or one, and the conversion to a signed type can never overflow.
4698 We could get an overflow if this conversion is done anywhere else. */
4699 if (TYPE_UNSIGNED (type))
4700 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4702 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4703 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4704 if (mask != 0)
4705 temp = const_binop (BIT_AND_EXPR, temp,
4706 fold_convert (TREE_TYPE (c), mask), 0);
4707 /* If necessary, convert the type back to match the type of C. */
4708 if (TYPE_UNSIGNED (type))
4709 temp = fold_convert (type, temp);
4711 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4714 /* Find ways of folding logical expressions of LHS and RHS:
4715 Try to merge two comparisons to the same innermost item.
4716 Look for range tests like "ch >= '0' && ch <= '9'".
4717 Look for combinations of simple terms on machines with expensive branches
4718 and evaluate the RHS unconditionally.
4720 For example, if we have p->a == 2 && p->b == 4 and we can make an
4721 object large enough to span both A and B, we can do this with a comparison
4722 against the object ANDed with the a mask.
4724 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4725 operations to do this with one comparison.
4727 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4728 function and the one above.
4730 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4731 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4733 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4734 two operands.
4736 We return the simplified tree or 0 if no optimization is possible. */
4738 static tree
4739 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4741 /* If this is the "or" of two comparisons, we can do something if
4742 the comparisons are NE_EXPR. If this is the "and", we can do something
4743 if the comparisons are EQ_EXPR. I.e.,
4744 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4746 WANTED_CODE is this operation code. For single bit fields, we can
4747 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4748 comparison for one-bit fields. */
4750 enum tree_code wanted_code;
4751 enum tree_code lcode, rcode;
4752 tree ll_arg, lr_arg, rl_arg, rr_arg;
4753 tree ll_inner, lr_inner, rl_inner, rr_inner;
4754 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4755 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4756 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4757 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4758 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4759 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4760 enum machine_mode lnmode, rnmode;
4761 tree ll_mask, lr_mask, rl_mask, rr_mask;
4762 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4763 tree l_const, r_const;
4764 tree lntype, rntype, result;
4765 int first_bit, end_bit;
4766 int volatilep;
4768 /* Start by getting the comparison codes. Fail if anything is volatile.
4769 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4770 it were surrounded with a NE_EXPR. */
4772 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4773 return 0;
4775 lcode = TREE_CODE (lhs);
4776 rcode = TREE_CODE (rhs);
4778 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4780 lhs = build2 (NE_EXPR, truth_type, lhs,
4781 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4782 lcode = NE_EXPR;
4785 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4787 rhs = build2 (NE_EXPR, truth_type, rhs,
4788 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4789 rcode = NE_EXPR;
4792 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4793 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4794 return 0;
4796 ll_arg = TREE_OPERAND (lhs, 0);
4797 lr_arg = TREE_OPERAND (lhs, 1);
4798 rl_arg = TREE_OPERAND (rhs, 0);
4799 rr_arg = TREE_OPERAND (rhs, 1);
4801 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4802 if (simple_operand_p (ll_arg)
4803 && simple_operand_p (lr_arg))
4805 tree result;
4806 if (operand_equal_p (ll_arg, rl_arg, 0)
4807 && operand_equal_p (lr_arg, rr_arg, 0))
4809 result = combine_comparisons (code, lcode, rcode,
4810 truth_type, ll_arg, lr_arg);
4811 if (result)
4812 return result;
4814 else if (operand_equal_p (ll_arg, rr_arg, 0)
4815 && operand_equal_p (lr_arg, rl_arg, 0))
4817 result = combine_comparisons (code, lcode,
4818 swap_tree_comparison (rcode),
4819 truth_type, ll_arg, lr_arg);
4820 if (result)
4821 return result;
4825 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4826 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4828 /* If the RHS can be evaluated unconditionally and its operands are
4829 simple, it wins to evaluate the RHS unconditionally on machines
4830 with expensive branches. In this case, this isn't a comparison
4831 that can be merged. Avoid doing this if the RHS is a floating-point
4832 comparison since those can trap. */
4834 if (BRANCH_COST >= 2
4835 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4836 && simple_operand_p (rl_arg)
4837 && simple_operand_p (rr_arg))
4839 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4840 if (code == TRUTH_OR_EXPR
4841 && lcode == NE_EXPR && integer_zerop (lr_arg)
4842 && rcode == NE_EXPR && integer_zerop (rr_arg)
4843 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4844 return build2 (NE_EXPR, truth_type,
4845 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4846 ll_arg, rl_arg),
4847 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4849 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4850 if (code == TRUTH_AND_EXPR
4851 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4852 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4853 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4854 return build2 (EQ_EXPR, truth_type,
4855 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4856 ll_arg, rl_arg),
4857 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4859 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4860 return build2 (code, truth_type, lhs, rhs);
4863 /* See if the comparisons can be merged. Then get all the parameters for
4864 each side. */
4866 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4867 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4868 return 0;
4870 volatilep = 0;
4871 ll_inner = decode_field_reference (ll_arg,
4872 &ll_bitsize, &ll_bitpos, &ll_mode,
4873 &ll_unsignedp, &volatilep, &ll_mask,
4874 &ll_and_mask);
4875 lr_inner = decode_field_reference (lr_arg,
4876 &lr_bitsize, &lr_bitpos, &lr_mode,
4877 &lr_unsignedp, &volatilep, &lr_mask,
4878 &lr_and_mask);
4879 rl_inner = decode_field_reference (rl_arg,
4880 &rl_bitsize, &rl_bitpos, &rl_mode,
4881 &rl_unsignedp, &volatilep, &rl_mask,
4882 &rl_and_mask);
4883 rr_inner = decode_field_reference (rr_arg,
4884 &rr_bitsize, &rr_bitpos, &rr_mode,
4885 &rr_unsignedp, &volatilep, &rr_mask,
4886 &rr_and_mask);
4888 /* It must be true that the inner operation on the lhs of each
4889 comparison must be the same if we are to be able to do anything.
4890 Then see if we have constants. If not, the same must be true for
4891 the rhs's. */
4892 if (volatilep || ll_inner == 0 || rl_inner == 0
4893 || ! operand_equal_p (ll_inner, rl_inner, 0))
4894 return 0;
4896 if (TREE_CODE (lr_arg) == INTEGER_CST
4897 && TREE_CODE (rr_arg) == INTEGER_CST)
4898 l_const = lr_arg, r_const = rr_arg;
4899 else if (lr_inner == 0 || rr_inner == 0
4900 || ! operand_equal_p (lr_inner, rr_inner, 0))
4901 return 0;
4902 else
4903 l_const = r_const = 0;
4905 /* If either comparison code is not correct for our logical operation,
4906 fail. However, we can convert a one-bit comparison against zero into
4907 the opposite comparison against that bit being set in the field. */
4909 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4910 if (lcode != wanted_code)
4912 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4914 /* Make the left operand unsigned, since we are only interested
4915 in the value of one bit. Otherwise we are doing the wrong
4916 thing below. */
4917 ll_unsignedp = 1;
4918 l_const = ll_mask;
4920 else
4921 return 0;
4924 /* This is analogous to the code for l_const above. */
4925 if (rcode != wanted_code)
4927 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4929 rl_unsignedp = 1;
4930 r_const = rl_mask;
4932 else
4933 return 0;
4936 /* After this point all optimizations will generate bit-field
4937 references, which we might not want. */
4938 if (! lang_hooks.can_use_bit_fields_p ())
4939 return 0;
4941 /* See if we can find a mode that contains both fields being compared on
4942 the left. If we can't, fail. Otherwise, update all constants and masks
4943 to be relative to a field of that size. */
4944 first_bit = MIN (ll_bitpos, rl_bitpos);
4945 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4946 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4947 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4948 volatilep);
4949 if (lnmode == VOIDmode)
4950 return 0;
4952 lnbitsize = GET_MODE_BITSIZE (lnmode);
4953 lnbitpos = first_bit & ~ (lnbitsize - 1);
4954 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4955 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4957 if (BYTES_BIG_ENDIAN)
4959 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4960 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4963 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4964 size_int (xll_bitpos), 0);
4965 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4966 size_int (xrl_bitpos), 0);
4968 if (l_const)
4970 l_const = fold_convert (lntype, l_const);
4971 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4972 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4973 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4974 fold_build1 (BIT_NOT_EXPR,
4975 lntype, ll_mask),
4976 0)))
4978 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4980 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4983 if (r_const)
4985 r_const = fold_convert (lntype, r_const);
4986 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4987 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4988 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4989 fold_build1 (BIT_NOT_EXPR,
4990 lntype, rl_mask),
4991 0)))
4993 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4995 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4999 /* If the right sides are not constant, do the same for it. Also,
5000 disallow this optimization if a size or signedness mismatch occurs
5001 between the left and right sides. */
5002 if (l_const == 0)
5004 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5005 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5006 /* Make sure the two fields on the right
5007 correspond to the left without being swapped. */
5008 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5009 return 0;
5011 first_bit = MIN (lr_bitpos, rr_bitpos);
5012 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5013 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5014 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5015 volatilep);
5016 if (rnmode == VOIDmode)
5017 return 0;
5019 rnbitsize = GET_MODE_BITSIZE (rnmode);
5020 rnbitpos = first_bit & ~ (rnbitsize - 1);
5021 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5022 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5024 if (BYTES_BIG_ENDIAN)
5026 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5027 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5030 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5031 size_int (xlr_bitpos), 0);
5032 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5033 size_int (xrr_bitpos), 0);
5035 /* Make a mask that corresponds to both fields being compared.
5036 Do this for both items being compared. If the operands are the
5037 same size and the bits being compared are in the same position
5038 then we can do this by masking both and comparing the masked
5039 results. */
5040 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5041 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5042 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5044 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5045 ll_unsignedp || rl_unsignedp);
5046 if (! all_ones_mask_p (ll_mask, lnbitsize))
5047 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5049 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5050 lr_unsignedp || rr_unsignedp);
5051 if (! all_ones_mask_p (lr_mask, rnbitsize))
5052 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5054 return build2 (wanted_code, truth_type, lhs, rhs);
5057 /* There is still another way we can do something: If both pairs of
5058 fields being compared are adjacent, we may be able to make a wider
5059 field containing them both.
5061 Note that we still must mask the lhs/rhs expressions. Furthermore,
5062 the mask must be shifted to account for the shift done by
5063 make_bit_field_ref. */
5064 if ((ll_bitsize + ll_bitpos == rl_bitpos
5065 && lr_bitsize + lr_bitpos == rr_bitpos)
5066 || (ll_bitpos == rl_bitpos + rl_bitsize
5067 && lr_bitpos == rr_bitpos + rr_bitsize))
5069 tree type;
5071 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5072 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5073 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5074 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5076 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5077 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5078 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5079 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5081 /* Convert to the smaller type before masking out unwanted bits. */
5082 type = lntype;
5083 if (lntype != rntype)
5085 if (lnbitsize > rnbitsize)
5087 lhs = fold_convert (rntype, lhs);
5088 ll_mask = fold_convert (rntype, ll_mask);
5089 type = rntype;
5091 else if (lnbitsize < rnbitsize)
5093 rhs = fold_convert (lntype, rhs);
5094 lr_mask = fold_convert (lntype, lr_mask);
5095 type = lntype;
5099 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5100 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5102 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5103 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5105 return build2 (wanted_code, truth_type, lhs, rhs);
5108 return 0;
5111 /* Handle the case of comparisons with constants. If there is something in
5112 common between the masks, those bits of the constants must be the same.
5113 If not, the condition is always false. Test for this to avoid generating
5114 incorrect code below. */
5115 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5116 if (! integer_zerop (result)
5117 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5118 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5120 if (wanted_code == NE_EXPR)
5122 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5123 return constant_boolean_node (true, truth_type);
5125 else
5127 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5128 return constant_boolean_node (false, truth_type);
5132 /* Construct the expression we will return. First get the component
5133 reference we will make. Unless the mask is all ones the width of
5134 that field, perform the mask operation. Then compare with the
5135 merged constant. */
5136 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5137 ll_unsignedp || rl_unsignedp);
5139 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5140 if (! all_ones_mask_p (ll_mask, lnbitsize))
5141 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5143 return build2 (wanted_code, truth_type, result,
5144 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5147 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5148 constant. */
5150 static tree
5151 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5153 tree arg0 = op0;
5154 enum tree_code op_code;
5155 tree comp_const = op1;
5156 tree minmax_const;
5157 int consts_equal, consts_lt;
5158 tree inner;
5160 STRIP_SIGN_NOPS (arg0);
5162 op_code = TREE_CODE (arg0);
5163 minmax_const = TREE_OPERAND (arg0, 1);
5164 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5165 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5166 inner = TREE_OPERAND (arg0, 0);
5168 /* If something does not permit us to optimize, return the original tree. */
5169 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5170 || TREE_CODE (comp_const) != INTEGER_CST
5171 || TREE_CONSTANT_OVERFLOW (comp_const)
5172 || TREE_CODE (minmax_const) != INTEGER_CST
5173 || TREE_CONSTANT_OVERFLOW (minmax_const))
5174 return NULL_TREE;
5176 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5177 and GT_EXPR, doing the rest with recursive calls using logical
5178 simplifications. */
5179 switch (code)
5181 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5183 /* FIXME: We should be able to invert code without building a
5184 scratch tree node, but doing so would require us to
5185 duplicate a part of invert_truthvalue here. */
5186 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5187 tem = optimize_minmax_comparison (TREE_CODE (tem),
5188 TREE_TYPE (tem),
5189 TREE_OPERAND (tem, 0),
5190 TREE_OPERAND (tem, 1));
5191 return invert_truthvalue (tem);
5194 case GE_EXPR:
5195 return
5196 fold_build2 (TRUTH_ORIF_EXPR, type,
5197 optimize_minmax_comparison
5198 (EQ_EXPR, type, arg0, comp_const),
5199 optimize_minmax_comparison
5200 (GT_EXPR, type, arg0, comp_const));
5202 case EQ_EXPR:
5203 if (op_code == MAX_EXPR && consts_equal)
5204 /* MAX (X, 0) == 0 -> X <= 0 */
5205 return fold_build2 (LE_EXPR, type, inner, comp_const);
5207 else if (op_code == MAX_EXPR && consts_lt)
5208 /* MAX (X, 0) == 5 -> X == 5 */
5209 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5211 else if (op_code == MAX_EXPR)
5212 /* MAX (X, 0) == -1 -> false */
5213 return omit_one_operand (type, integer_zero_node, inner);
5215 else if (consts_equal)
5216 /* MIN (X, 0) == 0 -> X >= 0 */
5217 return fold_build2 (GE_EXPR, type, inner, comp_const);
5219 else if (consts_lt)
5220 /* MIN (X, 0) == 5 -> false */
5221 return omit_one_operand (type, integer_zero_node, inner);
5223 else
5224 /* MIN (X, 0) == -1 -> X == -1 */
5225 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5227 case GT_EXPR:
5228 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5229 /* MAX (X, 0) > 0 -> X > 0
5230 MAX (X, 0) > 5 -> X > 5 */
5231 return fold_build2 (GT_EXPR, type, inner, comp_const);
5233 else if (op_code == MAX_EXPR)
5234 /* MAX (X, 0) > -1 -> true */
5235 return omit_one_operand (type, integer_one_node, inner);
5237 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5238 /* MIN (X, 0) > 0 -> false
5239 MIN (X, 0) > 5 -> false */
5240 return omit_one_operand (type, integer_zero_node, inner);
5242 else
5243 /* MIN (X, 0) > -1 -> X > -1 */
5244 return fold_build2 (GT_EXPR, type, inner, comp_const);
5246 default:
5247 return NULL_TREE;
5251 /* T is an integer expression that is being multiplied, divided, or taken a
5252 modulus (CODE says which and what kind of divide or modulus) by a
5253 constant C. See if we can eliminate that operation by folding it with
5254 other operations already in T. WIDE_TYPE, if non-null, is a type that
5255 should be used for the computation if wider than our type.
5257 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5258 (X * 2) + (Y * 4). We must, however, be assured that either the original
5259 expression would not overflow or that overflow is undefined for the type
5260 in the language in question.
5262 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5263 the machine has a multiply-accumulate insn or that this is part of an
5264 addressing calculation.
5266 If we return a non-null expression, it is an equivalent form of the
5267 original computation, but need not be in the original type. */
5269 static tree
5270 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5272 /* To avoid exponential search depth, refuse to allow recursion past
5273 three levels. Beyond that (1) it's highly unlikely that we'll find
5274 something interesting and (2) we've probably processed it before
5275 when we built the inner expression. */
5277 static int depth;
5278 tree ret;
5280 if (depth > 3)
5281 return NULL;
5283 depth++;
5284 ret = extract_muldiv_1 (t, c, code, wide_type);
5285 depth--;
5287 return ret;
5290 static tree
5291 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5293 tree type = TREE_TYPE (t);
5294 enum tree_code tcode = TREE_CODE (t);
5295 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5296 > GET_MODE_SIZE (TYPE_MODE (type)))
5297 ? wide_type : type);
5298 tree t1, t2;
5299 int same_p = tcode == code;
5300 tree op0 = NULL_TREE, op1 = NULL_TREE;
5302 /* Don't deal with constants of zero here; they confuse the code below. */
5303 if (integer_zerop (c))
5304 return NULL_TREE;
5306 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5307 op0 = TREE_OPERAND (t, 0);
5309 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5310 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5312 /* Note that we need not handle conditional operations here since fold
5313 already handles those cases. So just do arithmetic here. */
5314 switch (tcode)
5316 case INTEGER_CST:
5317 /* For a constant, we can always simplify if we are a multiply
5318 or (for divide and modulus) if it is a multiple of our constant. */
5319 if (code == MULT_EXPR
5320 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5321 return const_binop (code, fold_convert (ctype, t),
5322 fold_convert (ctype, c), 0);
5323 break;
5325 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5326 /* If op0 is an expression ... */
5327 if ((COMPARISON_CLASS_P (op0)
5328 || UNARY_CLASS_P (op0)
5329 || BINARY_CLASS_P (op0)
5330 || EXPRESSION_CLASS_P (op0))
5331 /* ... and is unsigned, and its type is smaller than ctype,
5332 then we cannot pass through as widening. */
5333 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5334 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5335 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5336 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5337 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5338 /* ... or this is a truncation (t is narrower than op0),
5339 then we cannot pass through this narrowing. */
5340 || (GET_MODE_SIZE (TYPE_MODE (type))
5341 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5342 /* ... or signedness changes for division or modulus,
5343 then we cannot pass through this conversion. */
5344 || (code != MULT_EXPR
5345 && (TYPE_UNSIGNED (ctype)
5346 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5347 break;
5349 /* Pass the constant down and see if we can make a simplification. If
5350 we can, replace this expression with the inner simplification for
5351 possible later conversion to our or some other type. */
5352 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5353 && TREE_CODE (t2) == INTEGER_CST
5354 && ! TREE_CONSTANT_OVERFLOW (t2)
5355 && (0 != (t1 = extract_muldiv (op0, t2, code,
5356 code == MULT_EXPR
5357 ? ctype : NULL_TREE))))
5358 return t1;
5359 break;
5361 case ABS_EXPR:
5362 /* If widening the type changes it from signed to unsigned, then we
5363 must avoid building ABS_EXPR itself as unsigned. */
5364 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5366 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5367 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5369 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5370 return fold_convert (ctype, t1);
5372 break;
5374 /* FALLTHROUGH */
5375 case NEGATE_EXPR:
5376 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5377 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5378 break;
5380 case MIN_EXPR: case MAX_EXPR:
5381 /* If widening the type changes the signedness, then we can't perform
5382 this optimization as that changes the result. */
5383 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5384 break;
5386 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5387 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5388 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5390 if (tree_int_cst_sgn (c) < 0)
5391 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5393 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5394 fold_convert (ctype, t2));
5396 break;
5398 case LSHIFT_EXPR: case RSHIFT_EXPR:
5399 /* If the second operand is constant, this is a multiplication
5400 or floor division, by a power of two, so we can treat it that
5401 way unless the multiplier or divisor overflows. Signed
5402 left-shift overflow is implementation-defined rather than
5403 undefined in C90, so do not convert signed left shift into
5404 multiplication. */
5405 if (TREE_CODE (op1) == INTEGER_CST
5406 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5407 /* const_binop may not detect overflow correctly,
5408 so check for it explicitly here. */
5409 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5410 && TREE_INT_CST_HIGH (op1) == 0
5411 && 0 != (t1 = fold_convert (ctype,
5412 const_binop (LSHIFT_EXPR,
5413 size_one_node,
5414 op1, 0)))
5415 && ! TREE_OVERFLOW (t1))
5416 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5417 ? MULT_EXPR : FLOOR_DIV_EXPR,
5418 ctype, fold_convert (ctype, op0), t1),
5419 c, code, wide_type);
5420 break;
5422 case PLUS_EXPR: case MINUS_EXPR:
5423 /* See if we can eliminate the operation on both sides. If we can, we
5424 can return a new PLUS or MINUS. If we can't, the only remaining
5425 cases where we can do anything are if the second operand is a
5426 constant. */
5427 t1 = extract_muldiv (op0, c, code, wide_type);
5428 t2 = extract_muldiv (op1, c, code, wide_type);
5429 if (t1 != 0 && t2 != 0
5430 && (code == MULT_EXPR
5431 /* If not multiplication, we can only do this if both operands
5432 are divisible by c. */
5433 || (multiple_of_p (ctype, op0, c)
5434 && multiple_of_p (ctype, op1, c))))
5435 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5436 fold_convert (ctype, t2));
5438 /* If this was a subtraction, negate OP1 and set it to be an addition.
5439 This simplifies the logic below. */
5440 if (tcode == MINUS_EXPR)
5441 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5443 if (TREE_CODE (op1) != INTEGER_CST)
5444 break;
5446 /* If either OP1 or C are negative, this optimization is not safe for
5447 some of the division and remainder types while for others we need
5448 to change the code. */
5449 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5451 if (code == CEIL_DIV_EXPR)
5452 code = FLOOR_DIV_EXPR;
5453 else if (code == FLOOR_DIV_EXPR)
5454 code = CEIL_DIV_EXPR;
5455 else if (code != MULT_EXPR
5456 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5457 break;
5460 /* If it's a multiply or a division/modulus operation of a multiple
5461 of our constant, do the operation and verify it doesn't overflow. */
5462 if (code == MULT_EXPR
5463 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5465 op1 = const_binop (code, fold_convert (ctype, op1),
5466 fold_convert (ctype, c), 0);
5467 /* We allow the constant to overflow with wrapping semantics. */
5468 if (op1 == 0
5469 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5470 break;
5472 else
5473 break;
5475 /* If we have an unsigned type is not a sizetype, we cannot widen
5476 the operation since it will change the result if the original
5477 computation overflowed. */
5478 if (TYPE_UNSIGNED (ctype)
5479 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5480 && ctype != type)
5481 break;
5483 /* If we were able to eliminate our operation from the first side,
5484 apply our operation to the second side and reform the PLUS. */
5485 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5486 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5488 /* The last case is if we are a multiply. In that case, we can
5489 apply the distributive law to commute the multiply and addition
5490 if the multiplication of the constants doesn't overflow. */
5491 if (code == MULT_EXPR)
5492 return fold_build2 (tcode, ctype,
5493 fold_build2 (code, ctype,
5494 fold_convert (ctype, op0),
5495 fold_convert (ctype, c)),
5496 op1);
5498 break;
5500 case MULT_EXPR:
5501 /* We have a special case here if we are doing something like
5502 (C * 8) % 4 since we know that's zero. */
5503 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5504 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5505 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5506 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5507 return omit_one_operand (type, integer_zero_node, op0);
5509 /* ... fall through ... */
5511 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5512 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5513 /* If we can extract our operation from the LHS, do so and return a
5514 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5515 do something only if the second operand is a constant. */
5516 if (same_p
5517 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5518 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5519 fold_convert (ctype, op1));
5520 else if (tcode == MULT_EXPR && code == MULT_EXPR
5521 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5522 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5523 fold_convert (ctype, t1));
5524 else if (TREE_CODE (op1) != INTEGER_CST)
5525 return 0;
5527 /* If these are the same operation types, we can associate them
5528 assuming no overflow. */
5529 if (tcode == code
5530 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5531 fold_convert (ctype, c), 0))
5532 && ! TREE_OVERFLOW (t1))
5533 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5535 /* If these operations "cancel" each other, we have the main
5536 optimizations of this pass, which occur when either constant is a
5537 multiple of the other, in which case we replace this with either an
5538 operation or CODE or TCODE.
5540 If we have an unsigned type that is not a sizetype, we cannot do
5541 this since it will change the result if the original computation
5542 overflowed. */
5543 if ((! TYPE_UNSIGNED (ctype)
5544 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5545 && ! flag_wrapv
5546 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5547 || (tcode == MULT_EXPR
5548 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5549 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5551 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5552 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5553 fold_convert (ctype,
5554 const_binop (TRUNC_DIV_EXPR,
5555 op1, c, 0)));
5556 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5557 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5558 fold_convert (ctype,
5559 const_binop (TRUNC_DIV_EXPR,
5560 c, op1, 0)));
5562 break;
5564 default:
5565 break;
5568 return 0;
5571 /* Return a node which has the indicated constant VALUE (either 0 or
5572 1), and is of the indicated TYPE. */
5574 tree
5575 constant_boolean_node (int value, tree type)
5577 if (type == integer_type_node)
5578 return value ? integer_one_node : integer_zero_node;
5579 else if (type == boolean_type_node)
5580 return value ? boolean_true_node : boolean_false_node;
5581 else
5582 return build_int_cst (type, value);
5586 /* Return true if expr looks like an ARRAY_REF and set base and
5587 offset to the appropriate trees. If there is no offset,
5588 offset is set to NULL_TREE. Base will be canonicalized to
5589 something you can get the element type from using
5590 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5591 in bytes to the base. */
5593 static bool
5594 extract_array_ref (tree expr, tree *base, tree *offset)
5596 /* One canonical form is a PLUS_EXPR with the first
5597 argument being an ADDR_EXPR with a possible NOP_EXPR
5598 attached. */
5599 if (TREE_CODE (expr) == PLUS_EXPR)
5601 tree op0 = TREE_OPERAND (expr, 0);
5602 tree inner_base, dummy1;
5603 /* Strip NOP_EXPRs here because the C frontends and/or
5604 folders present us (int *)&x.a + 4B possibly. */
5605 STRIP_NOPS (op0);
5606 if (extract_array_ref (op0, &inner_base, &dummy1))
5608 *base = inner_base;
5609 if (dummy1 == NULL_TREE)
5610 *offset = TREE_OPERAND (expr, 1);
5611 else
5612 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5613 dummy1, TREE_OPERAND (expr, 1));
5614 return true;
5617 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5618 which we transform into an ADDR_EXPR with appropriate
5619 offset. For other arguments to the ADDR_EXPR we assume
5620 zero offset and as such do not care about the ADDR_EXPR
5621 type and strip possible nops from it. */
5622 else if (TREE_CODE (expr) == ADDR_EXPR)
5624 tree op0 = TREE_OPERAND (expr, 0);
5625 if (TREE_CODE (op0) == ARRAY_REF)
5627 tree idx = TREE_OPERAND (op0, 1);
5628 *base = TREE_OPERAND (op0, 0);
5629 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5630 array_ref_element_size (op0));
5632 else
5634 /* Handle array-to-pointer decay as &a. */
5635 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5636 *base = TREE_OPERAND (expr, 0);
5637 else
5638 *base = expr;
5639 *offset = NULL_TREE;
5641 return true;
5643 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5644 else if (SSA_VAR_P (expr)
5645 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5647 *base = expr;
5648 *offset = NULL_TREE;
5649 return true;
5652 return false;
5656 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5657 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5658 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5659 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5660 COND is the first argument to CODE; otherwise (as in the example
5661 given here), it is the second argument. TYPE is the type of the
5662 original expression. Return NULL_TREE if no simplification is
5663 possible. */
5665 static tree
5666 fold_binary_op_with_conditional_arg (enum tree_code code,
5667 tree type, tree op0, tree op1,
5668 tree cond, tree arg, int cond_first_p)
5670 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5671 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5672 tree test, true_value, false_value;
5673 tree lhs = NULL_TREE;
5674 tree rhs = NULL_TREE;
5676 /* This transformation is only worthwhile if we don't have to wrap
5677 arg in a SAVE_EXPR, and the operation can be simplified on at least
5678 one of the branches once its pushed inside the COND_EXPR. */
5679 if (!TREE_CONSTANT (arg))
5680 return NULL_TREE;
5682 if (TREE_CODE (cond) == COND_EXPR)
5684 test = TREE_OPERAND (cond, 0);
5685 true_value = TREE_OPERAND (cond, 1);
5686 false_value = TREE_OPERAND (cond, 2);
5687 /* If this operand throws an expression, then it does not make
5688 sense to try to perform a logical or arithmetic operation
5689 involving it. */
5690 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5691 lhs = true_value;
5692 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5693 rhs = false_value;
5695 else
5697 tree testtype = TREE_TYPE (cond);
5698 test = cond;
5699 true_value = constant_boolean_node (true, testtype);
5700 false_value = constant_boolean_node (false, testtype);
5703 arg = fold_convert (arg_type, arg);
5704 if (lhs == 0)
5706 true_value = fold_convert (cond_type, true_value);
5707 if (cond_first_p)
5708 lhs = fold_build2 (code, type, true_value, arg);
5709 else
5710 lhs = fold_build2 (code, type, arg, true_value);
5712 if (rhs == 0)
5714 false_value = fold_convert (cond_type, false_value);
5715 if (cond_first_p)
5716 rhs = fold_build2 (code, type, false_value, arg);
5717 else
5718 rhs = fold_build2 (code, type, arg, false_value);
5721 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5722 return fold_convert (type, test);
5726 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5728 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5729 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5730 ADDEND is the same as X.
5732 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5733 and finite. The problematic cases are when X is zero, and its mode
5734 has signed zeros. In the case of rounding towards -infinity,
5735 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5736 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5738 static bool
5739 fold_real_zero_addition_p (tree type, tree addend, int negate)
5741 if (!real_zerop (addend))
5742 return false;
5744 /* Don't allow the fold with -fsignaling-nans. */
5745 if (HONOR_SNANS (TYPE_MODE (type)))
5746 return false;
5748 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5749 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5750 return true;
5752 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5753 if (TREE_CODE (addend) == REAL_CST
5754 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5755 negate = !negate;
5757 /* The mode has signed zeros, and we have to honor their sign.
5758 In this situation, there is only one case we can return true for.
5759 X - 0 is the same as X unless rounding towards -infinity is
5760 supported. */
5761 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5764 /* Subroutine of fold() that checks comparisons of built-in math
5765 functions against real constants.
5767 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5768 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5769 is the type of the result and ARG0 and ARG1 are the operands of the
5770 comparison. ARG1 must be a TREE_REAL_CST.
5772 The function returns the constant folded tree if a simplification
5773 can be made, and NULL_TREE otherwise. */
5775 static tree
5776 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5777 tree type, tree arg0, tree arg1)
5779 REAL_VALUE_TYPE c;
5781 if (BUILTIN_SQRT_P (fcode))
5783 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5784 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5786 c = TREE_REAL_CST (arg1);
5787 if (REAL_VALUE_NEGATIVE (c))
5789 /* sqrt(x) < y is always false, if y is negative. */
5790 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5791 return omit_one_operand (type, integer_zero_node, arg);
5793 /* sqrt(x) > y is always true, if y is negative and we
5794 don't care about NaNs, i.e. negative values of x. */
5795 if (code == NE_EXPR || !HONOR_NANS (mode))
5796 return omit_one_operand (type, integer_one_node, arg);
5798 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5799 return fold_build2 (GE_EXPR, type, arg,
5800 build_real (TREE_TYPE (arg), dconst0));
5802 else if (code == GT_EXPR || code == GE_EXPR)
5804 REAL_VALUE_TYPE c2;
5806 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5807 real_convert (&c2, mode, &c2);
5809 if (REAL_VALUE_ISINF (c2))
5811 /* sqrt(x) > y is x == +Inf, when y is very large. */
5812 if (HONOR_INFINITIES (mode))
5813 return fold_build2 (EQ_EXPR, type, arg,
5814 build_real (TREE_TYPE (arg), c2));
5816 /* sqrt(x) > y is always false, when y is very large
5817 and we don't care about infinities. */
5818 return omit_one_operand (type, integer_zero_node, arg);
5821 /* sqrt(x) > c is the same as x > c*c. */
5822 return fold_build2 (code, type, arg,
5823 build_real (TREE_TYPE (arg), c2));
5825 else if (code == LT_EXPR || code == LE_EXPR)
5827 REAL_VALUE_TYPE c2;
5829 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5830 real_convert (&c2, mode, &c2);
5832 if (REAL_VALUE_ISINF (c2))
5834 /* sqrt(x) < y is always true, when y is a very large
5835 value and we don't care about NaNs or Infinities. */
5836 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5837 return omit_one_operand (type, integer_one_node, arg);
5839 /* sqrt(x) < y is x != +Inf when y is very large and we
5840 don't care about NaNs. */
5841 if (! HONOR_NANS (mode))
5842 return fold_build2 (NE_EXPR, type, arg,
5843 build_real (TREE_TYPE (arg), c2));
5845 /* sqrt(x) < y is x >= 0 when y is very large and we
5846 don't care about Infinities. */
5847 if (! HONOR_INFINITIES (mode))
5848 return fold_build2 (GE_EXPR, type, arg,
5849 build_real (TREE_TYPE (arg), dconst0));
5851 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5852 if (lang_hooks.decls.global_bindings_p () != 0
5853 || CONTAINS_PLACEHOLDER_P (arg))
5854 return NULL_TREE;
5856 arg = save_expr (arg);
5857 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5858 fold_build2 (GE_EXPR, type, arg,
5859 build_real (TREE_TYPE (arg),
5860 dconst0)),
5861 fold_build2 (NE_EXPR, type, arg,
5862 build_real (TREE_TYPE (arg),
5863 c2)));
5866 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5867 if (! HONOR_NANS (mode))
5868 return fold_build2 (code, type, arg,
5869 build_real (TREE_TYPE (arg), c2));
5871 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5872 if (lang_hooks.decls.global_bindings_p () == 0
5873 && ! CONTAINS_PLACEHOLDER_P (arg))
5875 arg = save_expr (arg);
5876 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5877 fold_build2 (GE_EXPR, type, arg,
5878 build_real (TREE_TYPE (arg),
5879 dconst0)),
5880 fold_build2 (code, type, arg,
5881 build_real (TREE_TYPE (arg),
5882 c2)));
5887 return NULL_TREE;
5890 /* Subroutine of fold() that optimizes comparisons against Infinities,
5891 either +Inf or -Inf.
5893 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5894 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5895 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5897 The function returns the constant folded tree if a simplification
5898 can be made, and NULL_TREE otherwise. */
5900 static tree
5901 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5903 enum machine_mode mode;
5904 REAL_VALUE_TYPE max;
5905 tree temp;
5906 bool neg;
5908 mode = TYPE_MODE (TREE_TYPE (arg0));
5910 /* For negative infinity swap the sense of the comparison. */
5911 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5912 if (neg)
5913 code = swap_tree_comparison (code);
5915 switch (code)
5917 case GT_EXPR:
5918 /* x > +Inf is always false, if with ignore sNANs. */
5919 if (HONOR_SNANS (mode))
5920 return NULL_TREE;
5921 return omit_one_operand (type, integer_zero_node, arg0);
5923 case LE_EXPR:
5924 /* x <= +Inf is always true, if we don't case about NaNs. */
5925 if (! HONOR_NANS (mode))
5926 return omit_one_operand (type, integer_one_node, arg0);
5928 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5929 if (lang_hooks.decls.global_bindings_p () == 0
5930 && ! CONTAINS_PLACEHOLDER_P (arg0))
5932 arg0 = save_expr (arg0);
5933 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5935 break;
5937 case EQ_EXPR:
5938 case GE_EXPR:
5939 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5940 real_maxval (&max, neg, mode);
5941 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5942 arg0, build_real (TREE_TYPE (arg0), max));
5944 case LT_EXPR:
5945 /* x < +Inf is always equal to x <= DBL_MAX. */
5946 real_maxval (&max, neg, mode);
5947 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5948 arg0, build_real (TREE_TYPE (arg0), max));
5950 case NE_EXPR:
5951 /* x != +Inf is always equal to !(x > DBL_MAX). */
5952 real_maxval (&max, neg, mode);
5953 if (! HONOR_NANS (mode))
5954 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5955 arg0, build_real (TREE_TYPE (arg0), max));
5957 /* The transformation below creates non-gimple code and thus is
5958 not appropriate if we are in gimple form. */
5959 if (in_gimple_form)
5960 return NULL_TREE;
5962 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5963 arg0, build_real (TREE_TYPE (arg0), max));
5964 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5966 default:
5967 break;
5970 return NULL_TREE;
5973 /* Subroutine of fold() that optimizes comparisons of a division by
5974 a nonzero integer constant against an integer constant, i.e.
5975 X/C1 op C2.
5977 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5978 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5979 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5981 The function returns the constant folded tree if a simplification
5982 can be made, and NULL_TREE otherwise. */
5984 static tree
5985 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5987 tree prod, tmp, hi, lo;
5988 tree arg00 = TREE_OPERAND (arg0, 0);
5989 tree arg01 = TREE_OPERAND (arg0, 1);
5990 unsigned HOST_WIDE_INT lpart;
5991 HOST_WIDE_INT hpart;
5992 int overflow;
5994 /* We have to do this the hard way to detect unsigned overflow.
5995 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5996 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5997 TREE_INT_CST_HIGH (arg01),
5998 TREE_INT_CST_LOW (arg1),
5999 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
6000 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6001 prod = force_fit_type (prod, -1, overflow, false);
6003 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
6005 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6006 lo = prod;
6008 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6009 overflow = add_double (TREE_INT_CST_LOW (prod),
6010 TREE_INT_CST_HIGH (prod),
6011 TREE_INT_CST_LOW (tmp),
6012 TREE_INT_CST_HIGH (tmp),
6013 &lpart, &hpart);
6014 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6015 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6016 TREE_CONSTANT_OVERFLOW (prod));
6018 else if (tree_int_cst_sgn (arg01) >= 0)
6020 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6021 switch (tree_int_cst_sgn (arg1))
6023 case -1:
6024 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6025 hi = prod;
6026 break;
6028 case 0:
6029 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6030 hi = tmp;
6031 break;
6033 case 1:
6034 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6035 lo = prod;
6036 break;
6038 default:
6039 gcc_unreachable ();
6042 else
6044 /* A negative divisor reverses the relational operators. */
6045 code = swap_tree_comparison (code);
6047 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6048 switch (tree_int_cst_sgn (arg1))
6050 case -1:
6051 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6052 lo = prod;
6053 break;
6055 case 0:
6056 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6057 lo = tmp;
6058 break;
6060 case 1:
6061 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6062 hi = prod;
6063 break;
6065 default:
6066 gcc_unreachable ();
6070 switch (code)
6072 case EQ_EXPR:
6073 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6074 return omit_one_operand (type, integer_zero_node, arg00);
6075 if (TREE_OVERFLOW (hi))
6076 return fold_build2 (GE_EXPR, type, arg00, lo);
6077 if (TREE_OVERFLOW (lo))
6078 return fold_build2 (LE_EXPR, type, arg00, hi);
6079 return build_range_check (type, arg00, 1, lo, hi);
6081 case NE_EXPR:
6082 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6083 return omit_one_operand (type, integer_one_node, arg00);
6084 if (TREE_OVERFLOW (hi))
6085 return fold_build2 (LT_EXPR, type, arg00, lo);
6086 if (TREE_OVERFLOW (lo))
6087 return fold_build2 (GT_EXPR, type, arg00, hi);
6088 return build_range_check (type, arg00, 0, lo, hi);
6090 case LT_EXPR:
6091 if (TREE_OVERFLOW (lo))
6092 return omit_one_operand (type, integer_zero_node, arg00);
6093 return fold_build2 (LT_EXPR, type, arg00, lo);
6095 case LE_EXPR:
6096 if (TREE_OVERFLOW (hi))
6097 return omit_one_operand (type, integer_one_node, arg00);
6098 return fold_build2 (LE_EXPR, type, arg00, hi);
6100 case GT_EXPR:
6101 if (TREE_OVERFLOW (hi))
6102 return omit_one_operand (type, integer_zero_node, arg00);
6103 return fold_build2 (GT_EXPR, type, arg00, hi);
6105 case GE_EXPR:
6106 if (TREE_OVERFLOW (lo))
6107 return omit_one_operand (type, integer_one_node, arg00);
6108 return fold_build2 (GE_EXPR, type, arg00, lo);
6110 default:
6111 break;
6114 return NULL_TREE;
6118 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6119 equality/inequality test, then return a simplified form of the test
6120 using a sign testing. Otherwise return NULL. TYPE is the desired
6121 result type. */
6123 static tree
6124 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6125 tree result_type)
6127 /* If this is testing a single bit, we can optimize the test. */
6128 if ((code == NE_EXPR || code == EQ_EXPR)
6129 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6130 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6132 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6133 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6134 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6136 if (arg00 != NULL_TREE
6137 /* This is only a win if casting to a signed type is cheap,
6138 i.e. when arg00's type is not a partial mode. */
6139 && TYPE_PRECISION (TREE_TYPE (arg00))
6140 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6142 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6143 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6144 result_type, fold_convert (stype, arg00),
6145 fold_convert (stype, integer_zero_node));
6149 return NULL_TREE;
6152 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6153 equality/inequality test, then return a simplified form of
6154 the test using shifts and logical operations. Otherwise return
6155 NULL. TYPE is the desired result type. */
6157 tree
6158 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6159 tree result_type)
6161 /* If this is testing a single bit, we can optimize the test. */
6162 if ((code == NE_EXPR || code == EQ_EXPR)
6163 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6164 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6166 tree inner = TREE_OPERAND (arg0, 0);
6167 tree type = TREE_TYPE (arg0);
6168 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6169 enum machine_mode operand_mode = TYPE_MODE (type);
6170 int ops_unsigned;
6171 tree signed_type, unsigned_type, intermediate_type;
6172 tree tem;
6174 /* First, see if we can fold the single bit test into a sign-bit
6175 test. */
6176 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6177 result_type);
6178 if (tem)
6179 return tem;
6181 /* Otherwise we have (A & C) != 0 where C is a single bit,
6182 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6183 Similarly for (A & C) == 0. */
6185 /* If INNER is a right shift of a constant and it plus BITNUM does
6186 not overflow, adjust BITNUM and INNER. */
6187 if (TREE_CODE (inner) == RSHIFT_EXPR
6188 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6189 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6190 && bitnum < TYPE_PRECISION (type)
6191 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6192 bitnum - TYPE_PRECISION (type)))
6194 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6195 inner = TREE_OPERAND (inner, 0);
6198 /* If we are going to be able to omit the AND below, we must do our
6199 operations as unsigned. If we must use the AND, we have a choice.
6200 Normally unsigned is faster, but for some machines signed is. */
6201 #ifdef LOAD_EXTEND_OP
6202 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6203 && !flag_syntax_only) ? 0 : 1;
6204 #else
6205 ops_unsigned = 1;
6206 #endif
6208 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6209 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6210 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6211 inner = fold_convert (intermediate_type, inner);
6213 if (bitnum != 0)
6214 inner = build2 (RSHIFT_EXPR, intermediate_type,
6215 inner, size_int (bitnum));
6217 if (code == EQ_EXPR)
6218 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6219 inner, integer_one_node);
6221 /* Put the AND last so it can combine with more things. */
6222 inner = build2 (BIT_AND_EXPR, intermediate_type,
6223 inner, integer_one_node);
6225 /* Make sure to return the proper type. */
6226 inner = fold_convert (result_type, inner);
6228 return inner;
6230 return NULL_TREE;
6233 /* Check whether we are allowed to reorder operands arg0 and arg1,
6234 such that the evaluation of arg1 occurs before arg0. */
6236 static bool
6237 reorder_operands_p (tree arg0, tree arg1)
6239 if (! flag_evaluation_order)
6240 return true;
6241 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6242 return true;
6243 return ! TREE_SIDE_EFFECTS (arg0)
6244 && ! TREE_SIDE_EFFECTS (arg1);
6247 /* Test whether it is preferable two swap two operands, ARG0 and
6248 ARG1, for example because ARG0 is an integer constant and ARG1
6249 isn't. If REORDER is true, only recommend swapping if we can
6250 evaluate the operands in reverse order. */
6252 bool
6253 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6255 STRIP_SIGN_NOPS (arg0);
6256 STRIP_SIGN_NOPS (arg1);
6258 if (TREE_CODE (arg1) == INTEGER_CST)
6259 return 0;
6260 if (TREE_CODE (arg0) == INTEGER_CST)
6261 return 1;
6263 if (TREE_CODE (arg1) == REAL_CST)
6264 return 0;
6265 if (TREE_CODE (arg0) == REAL_CST)
6266 return 1;
6268 if (TREE_CODE (arg1) == COMPLEX_CST)
6269 return 0;
6270 if (TREE_CODE (arg0) == COMPLEX_CST)
6271 return 1;
6273 if (TREE_CONSTANT (arg1))
6274 return 0;
6275 if (TREE_CONSTANT (arg0))
6276 return 1;
6278 if (optimize_size)
6279 return 0;
6281 if (reorder && flag_evaluation_order
6282 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6283 return 0;
6285 if (DECL_P (arg1))
6286 return 0;
6287 if (DECL_P (arg0))
6288 return 1;
6290 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6291 for commutative and comparison operators. Ensuring a canonical
6292 form allows the optimizers to find additional redundancies without
6293 having to explicitly check for both orderings. */
6294 if (TREE_CODE (arg0) == SSA_NAME
6295 && TREE_CODE (arg1) == SSA_NAME
6296 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6297 return 1;
6299 return 0;
6302 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6303 ARG0 is extended to a wider type. */
6305 static tree
6306 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6308 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6309 tree arg1_unw;
6310 tree shorter_type, outer_type;
6311 tree min, max;
6312 bool above, below;
6314 if (arg0_unw == arg0)
6315 return NULL_TREE;
6316 shorter_type = TREE_TYPE (arg0_unw);
6318 #ifdef HAVE_canonicalize_funcptr_for_compare
6319 /* Disable this optimization if we're casting a function pointer
6320 type on targets that require function pointer canonicalization. */
6321 if (HAVE_canonicalize_funcptr_for_compare
6322 && TREE_CODE (shorter_type) == POINTER_TYPE
6323 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6324 return NULL_TREE;
6325 #endif
6327 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6328 return NULL_TREE;
6330 arg1_unw = get_unwidened (arg1, shorter_type);
6332 /* If possible, express the comparison in the shorter mode. */
6333 if ((code == EQ_EXPR || code == NE_EXPR
6334 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6335 && (TREE_TYPE (arg1_unw) == shorter_type
6336 || (TREE_CODE (arg1_unw) == INTEGER_CST
6337 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6338 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6339 && int_fits_type_p (arg1_unw, shorter_type))))
6340 return fold_build2 (code, type, arg0_unw,
6341 fold_convert (shorter_type, arg1_unw));
6343 if (TREE_CODE (arg1_unw) != INTEGER_CST
6344 || TREE_CODE (shorter_type) != INTEGER_TYPE
6345 || !int_fits_type_p (arg1_unw, shorter_type))
6346 return NULL_TREE;
6348 /* If we are comparing with the integer that does not fit into the range
6349 of the shorter type, the result is known. */
6350 outer_type = TREE_TYPE (arg1_unw);
6351 min = lower_bound_in_type (outer_type, shorter_type);
6352 max = upper_bound_in_type (outer_type, shorter_type);
6354 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6355 max, arg1_unw));
6356 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6357 arg1_unw, min));
6359 switch (code)
6361 case EQ_EXPR:
6362 if (above || below)
6363 return omit_one_operand (type, integer_zero_node, arg0);
6364 break;
6366 case NE_EXPR:
6367 if (above || below)
6368 return omit_one_operand (type, integer_one_node, arg0);
6369 break;
6371 case LT_EXPR:
6372 case LE_EXPR:
6373 if (above)
6374 return omit_one_operand (type, integer_one_node, arg0);
6375 else if (below)
6376 return omit_one_operand (type, integer_zero_node, arg0);
6378 case GT_EXPR:
6379 case GE_EXPR:
6380 if (above)
6381 return omit_one_operand (type, integer_zero_node, arg0);
6382 else if (below)
6383 return omit_one_operand (type, integer_one_node, arg0);
6385 default:
6386 break;
6389 return NULL_TREE;
6392 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6393 ARG0 just the signedness is changed. */
6395 static tree
6396 fold_sign_changed_comparison (enum tree_code code, tree type,
6397 tree arg0, tree arg1)
6399 tree arg0_inner, tmp;
6400 tree inner_type, outer_type;
6402 if (TREE_CODE (arg0) != NOP_EXPR
6403 && TREE_CODE (arg0) != CONVERT_EXPR)
6404 return NULL_TREE;
6406 outer_type = TREE_TYPE (arg0);
6407 arg0_inner = TREE_OPERAND (arg0, 0);
6408 inner_type = TREE_TYPE (arg0_inner);
6410 #ifdef HAVE_canonicalize_funcptr_for_compare
6411 /* Disable this optimization if we're casting a function pointer
6412 type on targets that require function pointer canonicalization. */
6413 if (HAVE_canonicalize_funcptr_for_compare
6414 && TREE_CODE (inner_type) == POINTER_TYPE
6415 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6416 return NULL_TREE;
6417 #endif
6419 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6420 return NULL_TREE;
6422 if (TREE_CODE (arg1) != INTEGER_CST
6423 && !((TREE_CODE (arg1) == NOP_EXPR
6424 || TREE_CODE (arg1) == CONVERT_EXPR)
6425 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6426 return NULL_TREE;
6428 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6429 && code != NE_EXPR
6430 && code != EQ_EXPR)
6431 return NULL_TREE;
6433 if (TREE_CODE (arg1) == INTEGER_CST)
6435 tmp = build_int_cst_wide (inner_type,
6436 TREE_INT_CST_LOW (arg1),
6437 TREE_INT_CST_HIGH (arg1));
6438 arg1 = force_fit_type (tmp, 0,
6439 TREE_OVERFLOW (arg1),
6440 TREE_CONSTANT_OVERFLOW (arg1));
6442 else
6443 arg1 = fold_convert (inner_type, arg1);
6445 return fold_build2 (code, type, arg0_inner, arg1);
6448 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6449 step of the array. Reconstructs s and delta in the case of s * delta
6450 being an integer constant (and thus already folded).
6451 ADDR is the address. MULT is the multiplicative expression.
6452 If the function succeeds, the new address expression is returned. Otherwise
6453 NULL_TREE is returned. */
6455 static tree
6456 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6458 tree s, delta, step;
6459 tree ref = TREE_OPERAND (addr, 0), pref;
6460 tree ret, pos;
6461 tree itype;
6463 /* Canonicalize op1 into a possibly non-constant delta
6464 and an INTEGER_CST s. */
6465 if (TREE_CODE (op1) == MULT_EXPR)
6467 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6469 STRIP_NOPS (arg0);
6470 STRIP_NOPS (arg1);
6472 if (TREE_CODE (arg0) == INTEGER_CST)
6474 s = arg0;
6475 delta = arg1;
6477 else if (TREE_CODE (arg1) == INTEGER_CST)
6479 s = arg1;
6480 delta = arg0;
6482 else
6483 return NULL_TREE;
6485 else if (TREE_CODE (op1) == INTEGER_CST)
6487 delta = op1;
6488 s = NULL_TREE;
6490 else
6492 /* Simulate we are delta * 1. */
6493 delta = op1;
6494 s = integer_one_node;
6497 for (;; ref = TREE_OPERAND (ref, 0))
6499 if (TREE_CODE (ref) == ARRAY_REF)
6501 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6502 if (! itype)
6503 continue;
6505 step = array_ref_element_size (ref);
6506 if (TREE_CODE (step) != INTEGER_CST)
6507 continue;
6509 if (s)
6511 if (! tree_int_cst_equal (step, s))
6512 continue;
6514 else
6516 /* Try if delta is a multiple of step. */
6517 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6518 if (! tmp)
6519 continue;
6520 delta = tmp;
6523 break;
6526 if (!handled_component_p (ref))
6527 return NULL_TREE;
6530 /* We found the suitable array reference. So copy everything up to it,
6531 and replace the index. */
6533 pref = TREE_OPERAND (addr, 0);
6534 ret = copy_node (pref);
6535 pos = ret;
6537 while (pref != ref)
6539 pref = TREE_OPERAND (pref, 0);
6540 TREE_OPERAND (pos, 0) = copy_node (pref);
6541 pos = TREE_OPERAND (pos, 0);
6544 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6545 fold_convert (itype,
6546 TREE_OPERAND (pos, 1)),
6547 fold_convert (itype, delta));
6549 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6553 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6554 means A >= Y && A != MAX, but in this case we know that
6555 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6557 static tree
6558 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6560 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6562 if (TREE_CODE (bound) == LT_EXPR)
6563 a = TREE_OPERAND (bound, 0);
6564 else if (TREE_CODE (bound) == GT_EXPR)
6565 a = TREE_OPERAND (bound, 1);
6566 else
6567 return NULL_TREE;
6569 typea = TREE_TYPE (a);
6570 if (!INTEGRAL_TYPE_P (typea)
6571 && !POINTER_TYPE_P (typea))
6572 return NULL_TREE;
6574 if (TREE_CODE (ineq) == LT_EXPR)
6576 a1 = TREE_OPERAND (ineq, 1);
6577 y = TREE_OPERAND (ineq, 0);
6579 else if (TREE_CODE (ineq) == GT_EXPR)
6581 a1 = TREE_OPERAND (ineq, 0);
6582 y = TREE_OPERAND (ineq, 1);
6584 else
6585 return NULL_TREE;
6587 if (TREE_TYPE (a1) != typea)
6588 return NULL_TREE;
6590 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6591 if (!integer_onep (diff))
6592 return NULL_TREE;
6594 return fold_build2 (GE_EXPR, type, a, y);
6597 /* Fold a sum or difference of at least one multiplication.
6598 Returns the folded tree or NULL if no simplification could be made. */
6600 static tree
6601 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6603 tree arg00, arg01, arg10, arg11;
6604 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6606 /* (A * C) +- (B * C) -> (A+-B) * C.
6607 (A * C) +- A -> A * (C+-1).
6608 We are most concerned about the case where C is a constant,
6609 but other combinations show up during loop reduction. Since
6610 it is not difficult, try all four possibilities. */
6612 if (TREE_CODE (arg0) == MULT_EXPR)
6614 arg00 = TREE_OPERAND (arg0, 0);
6615 arg01 = TREE_OPERAND (arg0, 1);
6617 else
6619 arg00 = arg0;
6620 if (!FLOAT_TYPE_P (type))
6621 arg01 = build_int_cst (type, 1);
6622 else
6623 arg01 = build_real (type, dconst1);
6625 if (TREE_CODE (arg1) == MULT_EXPR)
6627 arg10 = TREE_OPERAND (arg1, 0);
6628 arg11 = TREE_OPERAND (arg1, 1);
6630 else
6632 arg10 = arg1;
6633 if (!FLOAT_TYPE_P (type))
6634 arg11 = build_int_cst (type, 1);
6635 else
6636 arg11 = build_real (type, dconst1);
6638 same = NULL_TREE;
6640 if (operand_equal_p (arg01, arg11, 0))
6641 same = arg01, alt0 = arg00, alt1 = arg10;
6642 else if (operand_equal_p (arg00, arg10, 0))
6643 same = arg00, alt0 = arg01, alt1 = arg11;
6644 else if (operand_equal_p (arg00, arg11, 0))
6645 same = arg00, alt0 = arg01, alt1 = arg10;
6646 else if (operand_equal_p (arg01, arg10, 0))
6647 same = arg01, alt0 = arg00, alt1 = arg11;
6649 /* No identical multiplicands; see if we can find a common
6650 power-of-two factor in non-power-of-two multiplies. This
6651 can help in multi-dimensional array access. */
6652 else if (host_integerp (arg01, 0)
6653 && host_integerp (arg11, 0))
6655 HOST_WIDE_INT int01, int11, tmp;
6656 bool swap = false;
6657 tree maybe_same;
6658 int01 = TREE_INT_CST_LOW (arg01);
6659 int11 = TREE_INT_CST_LOW (arg11);
6661 /* Move min of absolute values to int11. */
6662 if ((int01 >= 0 ? int01 : -int01)
6663 < (int11 >= 0 ? int11 : -int11))
6665 tmp = int01, int01 = int11, int11 = tmp;
6666 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6667 maybe_same = arg01;
6668 swap = true;
6670 else
6671 maybe_same = arg11;
6673 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6675 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6676 build_int_cst (TREE_TYPE (arg00),
6677 int01 / int11));
6678 alt1 = arg10;
6679 same = maybe_same;
6680 if (swap)
6681 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6685 if (same)
6686 return fold_build2 (MULT_EXPR, type,
6687 fold_build2 (code, type,
6688 fold_convert (type, alt0),
6689 fold_convert (type, alt1)),
6690 fold_convert (type, same));
6692 return NULL_TREE;
6695 /* Fold a unary expression of code CODE and type TYPE with operand
6696 OP0. Return the folded expression if folding is successful.
6697 Otherwise, return NULL_TREE. */
6699 tree
6700 fold_unary (enum tree_code code, tree type, tree op0)
6702 tree tem;
6703 tree arg0;
6704 enum tree_code_class kind = TREE_CODE_CLASS (code);
6706 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6707 && TREE_CODE_LENGTH (code) == 1);
6709 arg0 = op0;
6710 if (arg0)
6712 if (code == NOP_EXPR || code == CONVERT_EXPR
6713 || code == FLOAT_EXPR || code == ABS_EXPR)
6715 /* Don't use STRIP_NOPS, because signedness of argument type
6716 matters. */
6717 STRIP_SIGN_NOPS (arg0);
6719 else
6721 /* Strip any conversions that don't change the mode. This
6722 is safe for every expression, except for a comparison
6723 expression because its signedness is derived from its
6724 operands.
6726 Note that this is done as an internal manipulation within
6727 the constant folder, in order to find the simplest
6728 representation of the arguments so that their form can be
6729 studied. In any cases, the appropriate type conversions
6730 should be put back in the tree that will get out of the
6731 constant folder. */
6732 STRIP_NOPS (arg0);
6736 if (TREE_CODE_CLASS (code) == tcc_unary)
6738 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6739 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6740 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6741 else if (TREE_CODE (arg0) == COND_EXPR)
6743 tree arg01 = TREE_OPERAND (arg0, 1);
6744 tree arg02 = TREE_OPERAND (arg0, 2);
6745 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6746 arg01 = fold_build1 (code, type, arg01);
6747 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6748 arg02 = fold_build1 (code, type, arg02);
6749 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6750 arg01, arg02);
6752 /* If this was a conversion, and all we did was to move into
6753 inside the COND_EXPR, bring it back out. But leave it if
6754 it is a conversion from integer to integer and the
6755 result precision is no wider than a word since such a
6756 conversion is cheap and may be optimized away by combine,
6757 while it couldn't if it were outside the COND_EXPR. Then return
6758 so we don't get into an infinite recursion loop taking the
6759 conversion out and then back in. */
6761 if ((code == NOP_EXPR || code == CONVERT_EXPR
6762 || code == NON_LVALUE_EXPR)
6763 && TREE_CODE (tem) == COND_EXPR
6764 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6765 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6766 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6767 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6768 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6769 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6770 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6771 && (INTEGRAL_TYPE_P
6772 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6773 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6774 || flag_syntax_only))
6775 tem = build1 (code, type,
6776 build3 (COND_EXPR,
6777 TREE_TYPE (TREE_OPERAND
6778 (TREE_OPERAND (tem, 1), 0)),
6779 TREE_OPERAND (tem, 0),
6780 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6781 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6782 return tem;
6784 else if (COMPARISON_CLASS_P (arg0))
6786 if (TREE_CODE (type) == BOOLEAN_TYPE)
6788 arg0 = copy_node (arg0);
6789 TREE_TYPE (arg0) = type;
6790 return arg0;
6792 else if (TREE_CODE (type) != INTEGER_TYPE)
6793 return fold_build3 (COND_EXPR, type, arg0,
6794 fold_build1 (code, type,
6795 integer_one_node),
6796 fold_build1 (code, type,
6797 integer_zero_node));
6801 switch (code)
6803 case NOP_EXPR:
6804 case FLOAT_EXPR:
6805 case CONVERT_EXPR:
6806 case FIX_TRUNC_EXPR:
6807 case FIX_CEIL_EXPR:
6808 case FIX_FLOOR_EXPR:
6809 case FIX_ROUND_EXPR:
6810 if (TREE_TYPE (op0) == type)
6811 return op0;
6813 /* If we have (type) (a CMP b) and type is an integal type, return
6814 new expression involving the new type. */
6815 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
6816 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
6817 TREE_OPERAND (op0, 1));
6819 /* Handle cases of two conversions in a row. */
6820 if (TREE_CODE (op0) == NOP_EXPR
6821 || TREE_CODE (op0) == CONVERT_EXPR)
6823 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6824 tree inter_type = TREE_TYPE (op0);
6825 int inside_int = INTEGRAL_TYPE_P (inside_type);
6826 int inside_ptr = POINTER_TYPE_P (inside_type);
6827 int inside_float = FLOAT_TYPE_P (inside_type);
6828 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6829 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6830 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6831 int inter_int = INTEGRAL_TYPE_P (inter_type);
6832 int inter_ptr = POINTER_TYPE_P (inter_type);
6833 int inter_float = FLOAT_TYPE_P (inter_type);
6834 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6835 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6836 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6837 int final_int = INTEGRAL_TYPE_P (type);
6838 int final_ptr = POINTER_TYPE_P (type);
6839 int final_float = FLOAT_TYPE_P (type);
6840 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6841 unsigned int final_prec = TYPE_PRECISION (type);
6842 int final_unsignedp = TYPE_UNSIGNED (type);
6844 /* In addition to the cases of two conversions in a row
6845 handled below, if we are converting something to its own
6846 type via an object of identical or wider precision, neither
6847 conversion is needed. */
6848 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6849 && ((inter_int && final_int) || (inter_float && final_float))
6850 && inter_prec >= final_prec)
6851 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6853 /* Likewise, if the intermediate and final types are either both
6854 float or both integer, we don't need the middle conversion if
6855 it is wider than the final type and doesn't change the signedness
6856 (for integers). Avoid this if the final type is a pointer
6857 since then we sometimes need the inner conversion. Likewise if
6858 the outer has a precision not equal to the size of its mode. */
6859 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6860 || (inter_float && inside_float)
6861 || (inter_vec && inside_vec))
6862 && inter_prec >= inside_prec
6863 && (inter_float || inter_vec
6864 || inter_unsignedp == inside_unsignedp)
6865 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6866 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6867 && ! final_ptr
6868 && (! final_vec || inter_prec == inside_prec))
6869 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6871 /* If we have a sign-extension of a zero-extended value, we can
6872 replace that by a single zero-extension. */
6873 if (inside_int && inter_int && final_int
6874 && inside_prec < inter_prec && inter_prec < final_prec
6875 && inside_unsignedp && !inter_unsignedp)
6876 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6878 /* Two conversions in a row are not needed unless:
6879 - some conversion is floating-point (overstrict for now), or
6880 - some conversion is a vector (overstrict for now), or
6881 - the intermediate type is narrower than both initial and
6882 final, or
6883 - the intermediate type and innermost type differ in signedness,
6884 and the outermost type is wider than the intermediate, or
6885 - the initial type is a pointer type and the precisions of the
6886 intermediate and final types differ, or
6887 - the final type is a pointer type and the precisions of the
6888 initial and intermediate types differ. */
6889 if (! inside_float && ! inter_float && ! final_float
6890 && ! inside_vec && ! inter_vec && ! final_vec
6891 && (inter_prec > inside_prec || inter_prec > final_prec)
6892 && ! (inside_int && inter_int
6893 && inter_unsignedp != inside_unsignedp
6894 && inter_prec < final_prec)
6895 && ((inter_unsignedp && inter_prec > inside_prec)
6896 == (final_unsignedp && final_prec > inter_prec))
6897 && ! (inside_ptr && inter_prec != final_prec)
6898 && ! (final_ptr && inside_prec != inter_prec)
6899 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6900 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6901 && ! final_ptr)
6902 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6905 /* Handle (T *)&A.B.C for A being of type T and B and C
6906 living at offset zero. This occurs frequently in
6907 C++ upcasting and then accessing the base. */
6908 if (TREE_CODE (op0) == ADDR_EXPR
6909 && POINTER_TYPE_P (type)
6910 && handled_component_p (TREE_OPERAND (op0, 0)))
6912 HOST_WIDE_INT bitsize, bitpos;
6913 tree offset;
6914 enum machine_mode mode;
6915 int unsignedp, volatilep;
6916 tree base = TREE_OPERAND (op0, 0);
6917 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6918 &mode, &unsignedp, &volatilep, false);
6919 /* If the reference was to a (constant) zero offset, we can use
6920 the address of the base if it has the same base type
6921 as the result type. */
6922 if (! offset && bitpos == 0
6923 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
6924 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
6925 return fold_convert (type, build_fold_addr_expr (base));
6928 if (TREE_CODE (op0) == MODIFY_EXPR
6929 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6930 /* Detect assigning a bitfield. */
6931 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6932 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6934 /* Don't leave an assignment inside a conversion
6935 unless assigning a bitfield. */
6936 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6937 /* First do the assignment, then return converted constant. */
6938 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6939 TREE_NO_WARNING (tem) = 1;
6940 TREE_USED (tem) = 1;
6941 return tem;
6944 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6945 constants (if x has signed type, the sign bit cannot be set
6946 in c). This folds extension into the BIT_AND_EXPR. */
6947 if (INTEGRAL_TYPE_P (type)
6948 && TREE_CODE (type) != BOOLEAN_TYPE
6949 && TREE_CODE (op0) == BIT_AND_EXPR
6950 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6952 tree and = op0;
6953 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6954 int change = 0;
6956 if (TYPE_UNSIGNED (TREE_TYPE (and))
6957 || (TYPE_PRECISION (type)
6958 <= TYPE_PRECISION (TREE_TYPE (and))))
6959 change = 1;
6960 else if (TYPE_PRECISION (TREE_TYPE (and1))
6961 <= HOST_BITS_PER_WIDE_INT
6962 && host_integerp (and1, 1))
6964 unsigned HOST_WIDE_INT cst;
6966 cst = tree_low_cst (and1, 1);
6967 cst &= (HOST_WIDE_INT) -1
6968 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6969 change = (cst == 0);
6970 #ifdef LOAD_EXTEND_OP
6971 if (change
6972 && !flag_syntax_only
6973 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6974 == ZERO_EXTEND))
6976 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6977 and0 = fold_convert (uns, and0);
6978 and1 = fold_convert (uns, and1);
6980 #endif
6982 if (change)
6984 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6985 TREE_INT_CST_HIGH (and1));
6986 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6987 TREE_CONSTANT_OVERFLOW (and1));
6988 return fold_build2 (BIT_AND_EXPR, type,
6989 fold_convert (type, and0), tem);
6993 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6994 T2 being pointers to types of the same size. */
6995 if (POINTER_TYPE_P (type)
6996 && BINARY_CLASS_P (arg0)
6997 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6998 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7000 tree arg00 = TREE_OPERAND (arg0, 0);
7001 tree t0 = type;
7002 tree t1 = TREE_TYPE (arg00);
7003 tree tt0 = TREE_TYPE (t0);
7004 tree tt1 = TREE_TYPE (t1);
7005 tree s0 = TYPE_SIZE (tt0);
7006 tree s1 = TYPE_SIZE (tt1);
7008 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7009 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7010 TREE_OPERAND (arg0, 1));
7013 tem = fold_convert_const (code, type, arg0);
7014 return tem ? tem : NULL_TREE;
7016 case VIEW_CONVERT_EXPR:
7017 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7018 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7019 return NULL_TREE;
7021 case NEGATE_EXPR:
7022 if (negate_expr_p (arg0))
7023 return fold_convert (type, negate_expr (arg0));
7024 return NULL_TREE;
7026 case ABS_EXPR:
7027 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7028 return fold_abs_const (arg0, type);
7029 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7030 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7031 /* Convert fabs((double)float) into (double)fabsf(float). */
7032 else if (TREE_CODE (arg0) == NOP_EXPR
7033 && TREE_CODE (type) == REAL_TYPE)
7035 tree targ0 = strip_float_extensions (arg0);
7036 if (targ0 != arg0)
7037 return fold_convert (type, fold_build1 (ABS_EXPR,
7038 TREE_TYPE (targ0),
7039 targ0));
7041 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7042 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7043 return arg0;
7045 /* Strip sign ops from argument. */
7046 if (TREE_CODE (type) == REAL_TYPE)
7048 tem = fold_strip_sign_ops (arg0);
7049 if (tem)
7050 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7052 return NULL_TREE;
7054 case CONJ_EXPR:
7055 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7056 return fold_convert (type, arg0);
7057 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7058 return build2 (COMPLEX_EXPR, type,
7059 TREE_OPERAND (arg0, 0),
7060 negate_expr (TREE_OPERAND (arg0, 1)));
7061 else if (TREE_CODE (arg0) == COMPLEX_CST)
7062 return build_complex (type, TREE_REALPART (arg0),
7063 negate_expr (TREE_IMAGPART (arg0)));
7064 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7065 return fold_build2 (TREE_CODE (arg0), type,
7066 fold_build1 (CONJ_EXPR, type,
7067 TREE_OPERAND (arg0, 0)),
7068 fold_build1 (CONJ_EXPR, type,
7069 TREE_OPERAND (arg0, 1)));
7070 else if (TREE_CODE (arg0) == CONJ_EXPR)
7071 return TREE_OPERAND (arg0, 0);
7072 return NULL_TREE;
7074 case BIT_NOT_EXPR:
7075 if (TREE_CODE (arg0) == INTEGER_CST)
7076 return fold_not_const (arg0, type);
7077 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7078 return TREE_OPERAND (arg0, 0);
7079 /* Convert ~ (-A) to A - 1. */
7080 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7081 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7082 build_int_cst (type, 1));
7083 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7084 else if (INTEGRAL_TYPE_P (type)
7085 && ((TREE_CODE (arg0) == MINUS_EXPR
7086 && integer_onep (TREE_OPERAND (arg0, 1)))
7087 || (TREE_CODE (arg0) == PLUS_EXPR
7088 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7089 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7090 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7091 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7092 && (tem = fold_unary (BIT_NOT_EXPR, type,
7093 fold_convert (type,
7094 TREE_OPERAND (arg0, 0)))))
7095 return fold_build2 (BIT_XOR_EXPR, type, tem,
7096 fold_convert (type, TREE_OPERAND (arg0, 1)));
7097 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7098 && (tem = fold_unary (BIT_NOT_EXPR, type,
7099 fold_convert (type,
7100 TREE_OPERAND (arg0, 1)))))
7101 return fold_build2 (BIT_XOR_EXPR, type,
7102 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7104 return NULL_TREE;
7106 case TRUTH_NOT_EXPR:
7107 /* The argument to invert_truthvalue must have Boolean type. */
7108 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7109 arg0 = fold_convert (boolean_type_node, arg0);
7111 /* Note that the operand of this must be an int
7112 and its values must be 0 or 1.
7113 ("true" is a fixed value perhaps depending on the language,
7114 but we don't handle values other than 1 correctly yet.) */
7115 tem = invert_truthvalue (arg0);
7116 /* Avoid infinite recursion. */
7117 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7118 return NULL_TREE;
7119 return fold_convert (type, tem);
7121 case REALPART_EXPR:
7122 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7123 return NULL_TREE;
7124 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7125 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7126 TREE_OPERAND (arg0, 1));
7127 else if (TREE_CODE (arg0) == COMPLEX_CST)
7128 return TREE_REALPART (arg0);
7129 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7130 return fold_build2 (TREE_CODE (arg0), type,
7131 fold_build1 (REALPART_EXPR, type,
7132 TREE_OPERAND (arg0, 0)),
7133 fold_build1 (REALPART_EXPR, type,
7134 TREE_OPERAND (arg0, 1)));
7135 return NULL_TREE;
7137 case IMAGPART_EXPR:
7138 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7139 return fold_convert (type, integer_zero_node);
7140 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7141 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7142 TREE_OPERAND (arg0, 0));
7143 else if (TREE_CODE (arg0) == COMPLEX_CST)
7144 return TREE_IMAGPART (arg0);
7145 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7146 return fold_build2 (TREE_CODE (arg0), type,
7147 fold_build1 (IMAGPART_EXPR, type,
7148 TREE_OPERAND (arg0, 0)),
7149 fold_build1 (IMAGPART_EXPR, type,
7150 TREE_OPERAND (arg0, 1)));
7151 return NULL_TREE;
7153 default:
7154 return NULL_TREE;
7155 } /* switch (code) */
7158 /* Fold a binary expression of code CODE and type TYPE with operands
7159 OP0 and OP1. Return the folded expression if folding is
7160 successful. Otherwise, return NULL_TREE. */
7162 tree
7163 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7165 tree t1 = NULL_TREE;
7166 tree tem;
7167 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7168 enum tree_code_class kind = TREE_CODE_CLASS (code);
7170 /* WINS will be nonzero when the switch is done
7171 if all operands are constant. */
7172 int wins = 1;
7174 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7175 && TREE_CODE_LENGTH (code) == 2);
7177 arg0 = op0;
7178 arg1 = op1;
7180 if (arg0)
7182 tree subop;
7184 /* Strip any conversions that don't change the mode. This is
7185 safe for every expression, except for a comparison expression
7186 because its signedness is derived from its operands. So, in
7187 the latter case, only strip conversions that don't change the
7188 signedness.
7190 Note that this is done as an internal manipulation within the
7191 constant folder, in order to find the simplest representation
7192 of the arguments so that their form can be studied. In any
7193 cases, the appropriate type conversions should be put back in
7194 the tree that will get out of the constant folder. */
7195 if (kind == tcc_comparison)
7196 STRIP_SIGN_NOPS (arg0);
7197 else
7198 STRIP_NOPS (arg0);
7200 if (TREE_CODE (arg0) == COMPLEX_CST)
7201 subop = TREE_REALPART (arg0);
7202 else
7203 subop = arg0;
7205 if (TREE_CODE (subop) != INTEGER_CST
7206 && TREE_CODE (subop) != REAL_CST)
7207 /* Note that TREE_CONSTANT isn't enough:
7208 static var addresses are constant but we can't
7209 do arithmetic on them. */
7210 wins = 0;
7213 if (arg1)
7215 tree subop;
7217 /* Strip any conversions that don't change the mode. This is
7218 safe for every expression, except for a comparison expression
7219 because its signedness is derived from its operands. So, in
7220 the latter case, only strip conversions that don't change the
7221 signedness.
7223 Note that this is done as an internal manipulation within the
7224 constant folder, in order to find the simplest representation
7225 of the arguments so that their form can be studied. In any
7226 cases, the appropriate type conversions should be put back in
7227 the tree that will get out of the constant folder. */
7228 if (kind == tcc_comparison)
7229 STRIP_SIGN_NOPS (arg1);
7230 else
7231 STRIP_NOPS (arg1);
7233 if (TREE_CODE (arg1) == COMPLEX_CST)
7234 subop = TREE_REALPART (arg1);
7235 else
7236 subop = arg1;
7238 if (TREE_CODE (subop) != INTEGER_CST
7239 && TREE_CODE (subop) != REAL_CST)
7240 /* Note that TREE_CONSTANT isn't enough:
7241 static var addresses are constant but we can't
7242 do arithmetic on them. */
7243 wins = 0;
7246 /* If this is a commutative operation, and ARG0 is a constant, move it
7247 to ARG1 to reduce the number of tests below. */
7248 if (commutative_tree_code (code)
7249 && tree_swap_operands_p (arg0, arg1, true))
7250 return fold_build2 (code, type, op1, op0);
7252 /* Now WINS is set as described above,
7253 ARG0 is the first operand of EXPR,
7254 and ARG1 is the second operand (if it has more than one operand).
7256 First check for cases where an arithmetic operation is applied to a
7257 compound, conditional, or comparison operation. Push the arithmetic
7258 operation inside the compound or conditional to see if any folding
7259 can then be done. Convert comparison to conditional for this purpose.
7260 The also optimizes non-constant cases that used to be done in
7261 expand_expr.
7263 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7264 one of the operands is a comparison and the other is a comparison, a
7265 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7266 code below would make the expression more complex. Change it to a
7267 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7268 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7270 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7271 || code == EQ_EXPR || code == NE_EXPR)
7272 && ((truth_value_p (TREE_CODE (arg0))
7273 && (truth_value_p (TREE_CODE (arg1))
7274 || (TREE_CODE (arg1) == BIT_AND_EXPR
7275 && integer_onep (TREE_OPERAND (arg1, 1)))))
7276 || (truth_value_p (TREE_CODE (arg1))
7277 && (truth_value_p (TREE_CODE (arg0))
7278 || (TREE_CODE (arg0) == BIT_AND_EXPR
7279 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7281 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7282 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7283 : TRUTH_XOR_EXPR,
7284 boolean_type_node,
7285 fold_convert (boolean_type_node, arg0),
7286 fold_convert (boolean_type_node, arg1));
7288 if (code == EQ_EXPR)
7289 tem = invert_truthvalue (tem);
7291 return fold_convert (type, tem);
7294 if (TREE_CODE_CLASS (code) == tcc_binary
7295 || TREE_CODE_CLASS (code) == tcc_comparison)
7297 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7298 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7299 fold_build2 (code, type,
7300 TREE_OPERAND (arg0, 1), op1));
7301 if (TREE_CODE (arg1) == COMPOUND_EXPR
7302 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7303 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7304 fold_build2 (code, type,
7305 op0, TREE_OPERAND (arg1, 1)));
7307 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7309 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7310 arg0, arg1,
7311 /*cond_first_p=*/1);
7312 if (tem != NULL_TREE)
7313 return tem;
7316 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7318 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7319 arg1, arg0,
7320 /*cond_first_p=*/0);
7321 if (tem != NULL_TREE)
7322 return tem;
7326 switch (code)
7328 case PLUS_EXPR:
7329 /* A + (-B) -> A - B */
7330 if (TREE_CODE (arg1) == NEGATE_EXPR)
7331 return fold_build2 (MINUS_EXPR, type,
7332 fold_convert (type, arg0),
7333 fold_convert (type, TREE_OPERAND (arg1, 0)));
7334 /* (-A) + B -> B - A */
7335 if (TREE_CODE (arg0) == NEGATE_EXPR
7336 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7337 return fold_build2 (MINUS_EXPR, type,
7338 fold_convert (type, arg1),
7339 fold_convert (type, TREE_OPERAND (arg0, 0)));
7340 /* Convert ~A + 1 to -A. */
7341 if (INTEGRAL_TYPE_P (type)
7342 && TREE_CODE (arg0) == BIT_NOT_EXPR
7343 && integer_onep (arg1))
7344 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7346 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
7347 same or one. */
7348 if ((TREE_CODE (arg0) == MULT_EXPR
7349 || TREE_CODE (arg1) == MULT_EXPR)
7350 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7352 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
7353 if (tem)
7354 return tem;
7357 if (! FLOAT_TYPE_P (type))
7359 if (integer_zerop (arg1))
7360 return non_lvalue (fold_convert (type, arg0));
7362 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7363 with a constant, and the two constants have no bits in common,
7364 we should treat this as a BIT_IOR_EXPR since this may produce more
7365 simplifications. */
7366 if (TREE_CODE (arg0) == BIT_AND_EXPR
7367 && TREE_CODE (arg1) == BIT_AND_EXPR
7368 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7369 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7370 && integer_zerop (const_binop (BIT_AND_EXPR,
7371 TREE_OPERAND (arg0, 1),
7372 TREE_OPERAND (arg1, 1), 0)))
7374 code = BIT_IOR_EXPR;
7375 goto bit_ior;
7378 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7379 (plus (plus (mult) (mult)) (foo)) so that we can
7380 take advantage of the factoring cases below. */
7381 if (((TREE_CODE (arg0) == PLUS_EXPR
7382 || TREE_CODE (arg0) == MINUS_EXPR)
7383 && TREE_CODE (arg1) == MULT_EXPR)
7384 || ((TREE_CODE (arg1) == PLUS_EXPR
7385 || TREE_CODE (arg1) == MINUS_EXPR)
7386 && TREE_CODE (arg0) == MULT_EXPR))
7388 tree parg0, parg1, parg, marg;
7389 enum tree_code pcode;
7391 if (TREE_CODE (arg1) == MULT_EXPR)
7392 parg = arg0, marg = arg1;
7393 else
7394 parg = arg1, marg = arg0;
7395 pcode = TREE_CODE (parg);
7396 parg0 = TREE_OPERAND (parg, 0);
7397 parg1 = TREE_OPERAND (parg, 1);
7398 STRIP_NOPS (parg0);
7399 STRIP_NOPS (parg1);
7401 if (TREE_CODE (parg0) == MULT_EXPR
7402 && TREE_CODE (parg1) != MULT_EXPR)
7403 return fold_build2 (pcode, type,
7404 fold_build2 (PLUS_EXPR, type,
7405 fold_convert (type, parg0),
7406 fold_convert (type, marg)),
7407 fold_convert (type, parg1));
7408 if (TREE_CODE (parg0) != MULT_EXPR
7409 && TREE_CODE (parg1) == MULT_EXPR)
7410 return fold_build2 (PLUS_EXPR, type,
7411 fold_convert (type, parg0),
7412 fold_build2 (pcode, type,
7413 fold_convert (type, marg),
7414 fold_convert (type,
7415 parg1)));
7418 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7419 of the array. Loop optimizer sometimes produce this type of
7420 expressions. */
7421 if (TREE_CODE (arg0) == ADDR_EXPR)
7423 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7424 if (tem)
7425 return fold_convert (type, tem);
7427 else if (TREE_CODE (arg1) == ADDR_EXPR)
7429 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7430 if (tem)
7431 return fold_convert (type, tem);
7434 else
7436 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7437 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7438 return non_lvalue (fold_convert (type, arg0));
7440 /* Likewise if the operands are reversed. */
7441 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7442 return non_lvalue (fold_convert (type, arg1));
7444 /* Convert X + -C into X - C. */
7445 if (TREE_CODE (arg1) == REAL_CST
7446 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7448 tem = fold_negate_const (arg1, type);
7449 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7450 return fold_build2 (MINUS_EXPR, type,
7451 fold_convert (type, arg0),
7452 fold_convert (type, tem));
7455 if (flag_unsafe_math_optimizations
7456 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7457 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7458 && (tem = distribute_real_division (code, type, arg0, arg1)))
7459 return tem;
7461 /* Convert x+x into x*2.0. */
7462 if (operand_equal_p (arg0, arg1, 0)
7463 && SCALAR_FLOAT_TYPE_P (type))
7464 return fold_build2 (MULT_EXPR, type, arg0,
7465 build_real (type, dconst2));
7467 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7468 if (flag_unsafe_math_optimizations
7469 && TREE_CODE (arg1) == PLUS_EXPR
7470 && TREE_CODE (arg0) != MULT_EXPR)
7472 tree tree10 = TREE_OPERAND (arg1, 0);
7473 tree tree11 = TREE_OPERAND (arg1, 1);
7474 if (TREE_CODE (tree11) == MULT_EXPR
7475 && TREE_CODE (tree10) == MULT_EXPR)
7477 tree tree0;
7478 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7479 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7482 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7483 if (flag_unsafe_math_optimizations
7484 && TREE_CODE (arg0) == PLUS_EXPR
7485 && TREE_CODE (arg1) != MULT_EXPR)
7487 tree tree00 = TREE_OPERAND (arg0, 0);
7488 tree tree01 = TREE_OPERAND (arg0, 1);
7489 if (TREE_CODE (tree01) == MULT_EXPR
7490 && TREE_CODE (tree00) == MULT_EXPR)
7492 tree tree0;
7493 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7494 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7499 bit_rotate:
7500 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7501 is a rotate of A by C1 bits. */
7502 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7503 is a rotate of A by B bits. */
7505 enum tree_code code0, code1;
7506 code0 = TREE_CODE (arg0);
7507 code1 = TREE_CODE (arg1);
7508 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7509 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7510 && operand_equal_p (TREE_OPERAND (arg0, 0),
7511 TREE_OPERAND (arg1, 0), 0)
7512 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7514 tree tree01, tree11;
7515 enum tree_code code01, code11;
7517 tree01 = TREE_OPERAND (arg0, 1);
7518 tree11 = TREE_OPERAND (arg1, 1);
7519 STRIP_NOPS (tree01);
7520 STRIP_NOPS (tree11);
7521 code01 = TREE_CODE (tree01);
7522 code11 = TREE_CODE (tree11);
7523 if (code01 == INTEGER_CST
7524 && code11 == INTEGER_CST
7525 && TREE_INT_CST_HIGH (tree01) == 0
7526 && TREE_INT_CST_HIGH (tree11) == 0
7527 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7528 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7529 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7530 code0 == LSHIFT_EXPR ? tree01 : tree11);
7531 else if (code11 == MINUS_EXPR)
7533 tree tree110, tree111;
7534 tree110 = TREE_OPERAND (tree11, 0);
7535 tree111 = TREE_OPERAND (tree11, 1);
7536 STRIP_NOPS (tree110);
7537 STRIP_NOPS (tree111);
7538 if (TREE_CODE (tree110) == INTEGER_CST
7539 && 0 == compare_tree_int (tree110,
7540 TYPE_PRECISION
7541 (TREE_TYPE (TREE_OPERAND
7542 (arg0, 0))))
7543 && operand_equal_p (tree01, tree111, 0))
7544 return build2 ((code0 == LSHIFT_EXPR
7545 ? LROTATE_EXPR
7546 : RROTATE_EXPR),
7547 type, TREE_OPERAND (arg0, 0), tree01);
7549 else if (code01 == MINUS_EXPR)
7551 tree tree010, tree011;
7552 tree010 = TREE_OPERAND (tree01, 0);
7553 tree011 = TREE_OPERAND (tree01, 1);
7554 STRIP_NOPS (tree010);
7555 STRIP_NOPS (tree011);
7556 if (TREE_CODE (tree010) == INTEGER_CST
7557 && 0 == compare_tree_int (tree010,
7558 TYPE_PRECISION
7559 (TREE_TYPE (TREE_OPERAND
7560 (arg0, 0))))
7561 && operand_equal_p (tree11, tree011, 0))
7562 return build2 ((code0 != LSHIFT_EXPR
7563 ? LROTATE_EXPR
7564 : RROTATE_EXPR),
7565 type, TREE_OPERAND (arg0, 0), tree11);
7570 associate:
7571 /* In most languages, can't associate operations on floats through
7572 parentheses. Rather than remember where the parentheses were, we
7573 don't associate floats at all, unless the user has specified
7574 -funsafe-math-optimizations. */
7576 if (! wins
7577 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7579 tree var0, con0, lit0, minus_lit0;
7580 tree var1, con1, lit1, minus_lit1;
7582 /* Split both trees into variables, constants, and literals. Then
7583 associate each group together, the constants with literals,
7584 then the result with variables. This increases the chances of
7585 literals being recombined later and of generating relocatable
7586 expressions for the sum of a constant and literal. */
7587 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7588 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7589 code == MINUS_EXPR);
7591 /* Only do something if we found more than two objects. Otherwise,
7592 nothing has changed and we risk infinite recursion. */
7593 if (2 < ((var0 != 0) + (var1 != 0)
7594 + (con0 != 0) + (con1 != 0)
7595 + (lit0 != 0) + (lit1 != 0)
7596 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7598 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7599 if (code == MINUS_EXPR)
7600 code = PLUS_EXPR;
7602 var0 = associate_trees (var0, var1, code, type);
7603 con0 = associate_trees (con0, con1, code, type);
7604 lit0 = associate_trees (lit0, lit1, code, type);
7605 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7607 /* Preserve the MINUS_EXPR if the negative part of the literal is
7608 greater than the positive part. Otherwise, the multiplicative
7609 folding code (i.e extract_muldiv) may be fooled in case
7610 unsigned constants are subtracted, like in the following
7611 example: ((X*2 + 4) - 8U)/2. */
7612 if (minus_lit0 && lit0)
7614 if (TREE_CODE (lit0) == INTEGER_CST
7615 && TREE_CODE (minus_lit0) == INTEGER_CST
7616 && tree_int_cst_lt (lit0, minus_lit0))
7618 minus_lit0 = associate_trees (minus_lit0, lit0,
7619 MINUS_EXPR, type);
7620 lit0 = 0;
7622 else
7624 lit0 = associate_trees (lit0, minus_lit0,
7625 MINUS_EXPR, type);
7626 minus_lit0 = 0;
7629 if (minus_lit0)
7631 if (con0 == 0)
7632 return fold_convert (type,
7633 associate_trees (var0, minus_lit0,
7634 MINUS_EXPR, type));
7635 else
7637 con0 = associate_trees (con0, minus_lit0,
7638 MINUS_EXPR, type);
7639 return fold_convert (type,
7640 associate_trees (var0, con0,
7641 PLUS_EXPR, type));
7645 con0 = associate_trees (con0, lit0, code, type);
7646 return fold_convert (type, associate_trees (var0, con0,
7647 code, type));
7651 binary:
7652 if (wins)
7653 t1 = const_binop (code, arg0, arg1, 0);
7654 if (t1 != NULL_TREE)
7656 /* The return value should always have
7657 the same type as the original expression. */
7658 if (TREE_TYPE (t1) != type)
7659 t1 = fold_convert (type, t1);
7661 return t1;
7663 return NULL_TREE;
7665 case MINUS_EXPR:
7666 /* A - (-B) -> A + B */
7667 if (TREE_CODE (arg1) == NEGATE_EXPR)
7668 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7669 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7670 if (TREE_CODE (arg0) == NEGATE_EXPR
7671 && (FLOAT_TYPE_P (type)
7672 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7673 && negate_expr_p (arg1)
7674 && reorder_operands_p (arg0, arg1))
7675 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7676 TREE_OPERAND (arg0, 0));
7677 /* Convert -A - 1 to ~A. */
7678 if (INTEGRAL_TYPE_P (type)
7679 && TREE_CODE (arg0) == NEGATE_EXPR
7680 && integer_onep (arg1))
7681 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7683 /* Convert -1 - A to ~A. */
7684 if (INTEGRAL_TYPE_P (type)
7685 && integer_all_onesp (arg0))
7686 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7688 if (! FLOAT_TYPE_P (type))
7690 if (! wins && integer_zerop (arg0))
7691 return negate_expr (fold_convert (type, arg1));
7692 if (integer_zerop (arg1))
7693 return non_lvalue (fold_convert (type, arg0));
7695 /* Fold A - (A & B) into ~B & A. */
7696 if (!TREE_SIDE_EFFECTS (arg0)
7697 && TREE_CODE (arg1) == BIT_AND_EXPR)
7699 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7700 return fold_build2 (BIT_AND_EXPR, type,
7701 fold_build1 (BIT_NOT_EXPR, type,
7702 TREE_OPERAND (arg1, 0)),
7703 arg0);
7704 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7705 return fold_build2 (BIT_AND_EXPR, type,
7706 fold_build1 (BIT_NOT_EXPR, type,
7707 TREE_OPERAND (arg1, 1)),
7708 arg0);
7711 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7712 any power of 2 minus 1. */
7713 if (TREE_CODE (arg0) == BIT_AND_EXPR
7714 && TREE_CODE (arg1) == BIT_AND_EXPR
7715 && operand_equal_p (TREE_OPERAND (arg0, 0),
7716 TREE_OPERAND (arg1, 0), 0))
7718 tree mask0 = TREE_OPERAND (arg0, 1);
7719 tree mask1 = TREE_OPERAND (arg1, 1);
7720 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7722 if (operand_equal_p (tem, mask1, 0))
7724 tem = fold_build2 (BIT_XOR_EXPR, type,
7725 TREE_OPERAND (arg0, 0), mask1);
7726 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7731 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7732 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7733 return non_lvalue (fold_convert (type, arg0));
7735 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7736 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7737 (-ARG1 + ARG0) reduces to -ARG1. */
7738 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7739 return negate_expr (fold_convert (type, arg1));
7741 /* Fold &x - &x. This can happen from &x.foo - &x.
7742 This is unsafe for certain floats even in non-IEEE formats.
7743 In IEEE, it is unsafe because it does wrong for NaNs.
7744 Also note that operand_equal_p is always false if an operand
7745 is volatile. */
7747 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7748 && operand_equal_p (arg0, arg1, 0))
7749 return fold_convert (type, integer_zero_node);
7751 /* A - B -> A + (-B) if B is easily negatable. */
7752 if (!wins && negate_expr_p (arg1)
7753 && ((FLOAT_TYPE_P (type)
7754 /* Avoid this transformation if B is a positive REAL_CST. */
7755 && (TREE_CODE (arg1) != REAL_CST
7756 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7757 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7758 return fold_build2 (PLUS_EXPR, type,
7759 fold_convert (type, arg0),
7760 fold_convert (type, negate_expr (arg1)));
7762 /* Try folding difference of addresses. */
7764 HOST_WIDE_INT diff;
7766 if ((TREE_CODE (arg0) == ADDR_EXPR
7767 || TREE_CODE (arg1) == ADDR_EXPR)
7768 && ptr_difference_const (arg0, arg1, &diff))
7769 return build_int_cst_type (type, diff);
7772 /* Fold &a[i] - &a[j] to i-j. */
7773 if (TREE_CODE (arg0) == ADDR_EXPR
7774 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7775 && TREE_CODE (arg1) == ADDR_EXPR
7776 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7778 tree aref0 = TREE_OPERAND (arg0, 0);
7779 tree aref1 = TREE_OPERAND (arg1, 0);
7780 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7781 TREE_OPERAND (aref1, 0), 0))
7783 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7784 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7785 tree esz = array_ref_element_size (aref0);
7786 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7787 return fold_build2 (MULT_EXPR, type, diff,
7788 fold_convert (type, esz));
7793 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7794 of the array. Loop optimizer sometimes produce this type of
7795 expressions. */
7796 if (TREE_CODE (arg0) == ADDR_EXPR)
7798 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7799 if (tem)
7800 return fold_convert (type, tem);
7803 if (flag_unsafe_math_optimizations
7804 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7805 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7806 && (tem = distribute_real_division (code, type, arg0, arg1)))
7807 return tem;
7809 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
7810 same or one. */
7811 if ((TREE_CODE (arg0) == MULT_EXPR
7812 || TREE_CODE (arg1) == MULT_EXPR)
7813 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7815 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
7816 if (tem)
7817 return tem;
7820 goto associate;
7822 case MULT_EXPR:
7823 /* (-A) * (-B) -> A * B */
7824 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7825 return fold_build2 (MULT_EXPR, type,
7826 TREE_OPERAND (arg0, 0),
7827 negate_expr (arg1));
7828 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7829 return fold_build2 (MULT_EXPR, type,
7830 negate_expr (arg0),
7831 TREE_OPERAND (arg1, 0));
7833 if (! FLOAT_TYPE_P (type))
7835 if (integer_zerop (arg1))
7836 return omit_one_operand (type, arg1, arg0);
7837 if (integer_onep (arg1))
7838 return non_lvalue (fold_convert (type, arg0));
7839 /* Transform x * -1 into -x. */
7840 if (integer_all_onesp (arg1))
7841 return fold_convert (type, negate_expr (arg0));
7843 /* (a * (1 << b)) is (a << b) */
7844 if (TREE_CODE (arg1) == LSHIFT_EXPR
7845 && integer_onep (TREE_OPERAND (arg1, 0)))
7846 return fold_build2 (LSHIFT_EXPR, type, arg0,
7847 TREE_OPERAND (arg1, 1));
7848 if (TREE_CODE (arg0) == LSHIFT_EXPR
7849 && integer_onep (TREE_OPERAND (arg0, 0)))
7850 return fold_build2 (LSHIFT_EXPR, type, arg1,
7851 TREE_OPERAND (arg0, 1));
7853 if (TREE_CODE (arg1) == INTEGER_CST
7854 && 0 != (tem = extract_muldiv (op0,
7855 fold_convert (type, arg1),
7856 code, NULL_TREE)))
7857 return fold_convert (type, tem);
7860 else
7862 /* Maybe fold x * 0 to 0. The expressions aren't the same
7863 when x is NaN, since x * 0 is also NaN. Nor are they the
7864 same in modes with signed zeros, since multiplying a
7865 negative value by 0 gives -0, not +0. */
7866 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7867 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7868 && real_zerop (arg1))
7869 return omit_one_operand (type, arg1, arg0);
7870 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7871 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7872 && real_onep (arg1))
7873 return non_lvalue (fold_convert (type, arg0));
7875 /* Transform x * -1.0 into -x. */
7876 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7877 && real_minus_onep (arg1))
7878 return fold_convert (type, negate_expr (arg0));
7880 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7881 if (flag_unsafe_math_optimizations
7882 && TREE_CODE (arg0) == RDIV_EXPR
7883 && TREE_CODE (arg1) == REAL_CST
7884 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7886 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7887 arg1, 0);
7888 if (tem)
7889 return fold_build2 (RDIV_EXPR, type, tem,
7890 TREE_OPERAND (arg0, 1));
7893 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7894 if (operand_equal_p (arg0, arg1, 0))
7896 tree tem = fold_strip_sign_ops (arg0);
7897 if (tem != NULL_TREE)
7899 tem = fold_convert (type, tem);
7900 return fold_build2 (MULT_EXPR, type, tem, tem);
7904 if (flag_unsafe_math_optimizations)
7906 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7907 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7909 /* Optimizations of root(...)*root(...). */
7910 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7912 tree rootfn, arg, arglist;
7913 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7914 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7916 /* Optimize sqrt(x)*sqrt(x) as x. */
7917 if (BUILTIN_SQRT_P (fcode0)
7918 && operand_equal_p (arg00, arg10, 0)
7919 && ! HONOR_SNANS (TYPE_MODE (type)))
7920 return arg00;
7922 /* Optimize root(x)*root(y) as root(x*y). */
7923 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7924 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7925 arglist = build_tree_list (NULL_TREE, arg);
7926 return build_function_call_expr (rootfn, arglist);
7929 /* Optimize expN(x)*expN(y) as expN(x+y). */
7930 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7932 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7933 tree arg = fold_build2 (PLUS_EXPR, type,
7934 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7935 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7936 tree arglist = build_tree_list (NULL_TREE, arg);
7937 return build_function_call_expr (expfn, arglist);
7940 /* Optimizations of pow(...)*pow(...). */
7941 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7942 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7943 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7945 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7946 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7947 1)));
7948 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7949 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7950 1)));
7952 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7953 if (operand_equal_p (arg01, arg11, 0))
7955 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7956 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7957 tree arglist = tree_cons (NULL_TREE, arg,
7958 build_tree_list (NULL_TREE,
7959 arg01));
7960 return build_function_call_expr (powfn, arglist);
7963 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7964 if (operand_equal_p (arg00, arg10, 0))
7966 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7967 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7968 tree arglist = tree_cons (NULL_TREE, arg00,
7969 build_tree_list (NULL_TREE,
7970 arg));
7971 return build_function_call_expr (powfn, arglist);
7975 /* Optimize tan(x)*cos(x) as sin(x). */
7976 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7977 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7978 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7979 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7980 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7981 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7982 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7983 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7985 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7987 if (sinfn != NULL_TREE)
7988 return build_function_call_expr (sinfn,
7989 TREE_OPERAND (arg0, 1));
7992 /* Optimize x*pow(x,c) as pow(x,c+1). */
7993 if (fcode1 == BUILT_IN_POW
7994 || fcode1 == BUILT_IN_POWF
7995 || fcode1 == BUILT_IN_POWL)
7997 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7998 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7999 1)));
8000 if (TREE_CODE (arg11) == REAL_CST
8001 && ! TREE_CONSTANT_OVERFLOW (arg11)
8002 && operand_equal_p (arg0, arg10, 0))
8004 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8005 REAL_VALUE_TYPE c;
8006 tree arg, arglist;
8008 c = TREE_REAL_CST (arg11);
8009 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8010 arg = build_real (type, c);
8011 arglist = build_tree_list (NULL_TREE, arg);
8012 arglist = tree_cons (NULL_TREE, arg0, arglist);
8013 return build_function_call_expr (powfn, arglist);
8017 /* Optimize pow(x,c)*x as pow(x,c+1). */
8018 if (fcode0 == BUILT_IN_POW
8019 || fcode0 == BUILT_IN_POWF
8020 || fcode0 == BUILT_IN_POWL)
8022 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8023 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8024 1)));
8025 if (TREE_CODE (arg01) == REAL_CST
8026 && ! TREE_CONSTANT_OVERFLOW (arg01)
8027 && operand_equal_p (arg1, arg00, 0))
8029 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8030 REAL_VALUE_TYPE c;
8031 tree arg, arglist;
8033 c = TREE_REAL_CST (arg01);
8034 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8035 arg = build_real (type, c);
8036 arglist = build_tree_list (NULL_TREE, arg);
8037 arglist = tree_cons (NULL_TREE, arg1, arglist);
8038 return build_function_call_expr (powfn, arglist);
8042 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8043 if (! optimize_size
8044 && operand_equal_p (arg0, arg1, 0))
8046 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8048 if (powfn)
8050 tree arg = build_real (type, dconst2);
8051 tree arglist = build_tree_list (NULL_TREE, arg);
8052 arglist = tree_cons (NULL_TREE, arg0, arglist);
8053 return build_function_call_expr (powfn, arglist);
8058 goto associate;
8060 case BIT_IOR_EXPR:
8061 bit_ior:
8062 if (integer_all_onesp (arg1))
8063 return omit_one_operand (type, arg1, arg0);
8064 if (integer_zerop (arg1))
8065 return non_lvalue (fold_convert (type, arg0));
8066 if (operand_equal_p (arg0, arg1, 0))
8067 return non_lvalue (fold_convert (type, arg0));
8069 /* ~X | X is -1. */
8070 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8071 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8073 t1 = build_int_cst (type, -1);
8074 t1 = force_fit_type (t1, 0, false, false);
8075 return omit_one_operand (type, t1, arg1);
8078 /* X | ~X is -1. */
8079 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8080 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8082 t1 = build_int_cst (type, -1);
8083 t1 = force_fit_type (t1, 0, false, false);
8084 return omit_one_operand (type, t1, arg0);
8087 t1 = distribute_bit_expr (code, type, arg0, arg1);
8088 if (t1 != NULL_TREE)
8089 return t1;
8091 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8093 This results in more efficient code for machines without a NAND
8094 instruction. Combine will canonicalize to the first form
8095 which will allow use of NAND instructions provided by the
8096 backend if they exist. */
8097 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8098 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8100 return fold_build1 (BIT_NOT_EXPR, type,
8101 build2 (BIT_AND_EXPR, type,
8102 TREE_OPERAND (arg0, 0),
8103 TREE_OPERAND (arg1, 0)));
8106 /* See if this can be simplified into a rotate first. If that
8107 is unsuccessful continue in the association code. */
8108 goto bit_rotate;
8110 case BIT_XOR_EXPR:
8111 if (integer_zerop (arg1))
8112 return non_lvalue (fold_convert (type, arg0));
8113 if (integer_all_onesp (arg1))
8114 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8115 if (operand_equal_p (arg0, arg1, 0))
8116 return omit_one_operand (type, integer_zero_node, arg0);
8118 /* ~X ^ X is -1. */
8119 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8120 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8122 t1 = build_int_cst (type, -1);
8123 t1 = force_fit_type (t1, 0, false, false);
8124 return omit_one_operand (type, t1, arg1);
8127 /* X ^ ~X is -1. */
8128 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8129 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8131 t1 = build_int_cst (type, -1);
8132 t1 = force_fit_type (t1, 0, false, false);
8133 return omit_one_operand (type, t1, arg0);
8136 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8137 with a constant, and the two constants have no bits in common,
8138 we should treat this as a BIT_IOR_EXPR since this may produce more
8139 simplifications. */
8140 if (TREE_CODE (arg0) == BIT_AND_EXPR
8141 && TREE_CODE (arg1) == BIT_AND_EXPR
8142 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8143 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8144 && integer_zerop (const_binop (BIT_AND_EXPR,
8145 TREE_OPERAND (arg0, 1),
8146 TREE_OPERAND (arg1, 1), 0)))
8148 code = BIT_IOR_EXPR;
8149 goto bit_ior;
8152 /* (X | Y) ^ X -> Y & ~ X*/
8153 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8154 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8156 tree t2 = TREE_OPERAND (arg0, 1);
8157 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8158 arg1);
8159 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8160 fold_convert (type, t1));
8161 return t1;
8164 /* (Y | X) ^ X -> Y & ~ X*/
8165 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8166 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8168 tree t2 = TREE_OPERAND (arg0, 0);
8169 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8170 arg1);
8171 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8172 fold_convert (type, t1));
8173 return t1;
8176 /* X ^ (X | Y) -> Y & ~ X*/
8177 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8178 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8180 tree t2 = TREE_OPERAND (arg1, 1);
8181 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8182 arg0);
8183 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8184 fold_convert (type, t1));
8185 return t1;
8188 /* X ^ (Y | X) -> Y & ~ X*/
8189 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8190 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8192 tree t2 = TREE_OPERAND (arg1, 0);
8193 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8194 arg0);
8195 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8196 fold_convert (type, t1));
8197 return t1;
8200 /* Convert ~X ^ ~Y to X ^ Y. */
8201 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8202 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8203 return fold_build2 (code, type,
8204 fold_convert (type, TREE_OPERAND (arg0, 0)),
8205 fold_convert (type, TREE_OPERAND (arg1, 0)));
8207 /* See if this can be simplified into a rotate first. If that
8208 is unsuccessful continue in the association code. */
8209 goto bit_rotate;
8211 case BIT_AND_EXPR:
8212 if (integer_all_onesp (arg1))
8213 return non_lvalue (fold_convert (type, arg0));
8214 if (integer_zerop (arg1))
8215 return omit_one_operand (type, arg1, arg0);
8216 if (operand_equal_p (arg0, arg1, 0))
8217 return non_lvalue (fold_convert (type, arg0));
8219 /* ~X & X is always zero. */
8220 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8221 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8222 return omit_one_operand (type, integer_zero_node, arg1);
8224 /* X & ~X is always zero. */
8225 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8226 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8227 return omit_one_operand (type, integer_zero_node, arg0);
8229 t1 = distribute_bit_expr (code, type, arg0, arg1);
8230 if (t1 != NULL_TREE)
8231 return t1;
8232 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8233 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8234 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8236 unsigned int prec
8237 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8239 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8240 && (~TREE_INT_CST_LOW (arg1)
8241 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8242 return fold_convert (type, TREE_OPERAND (arg0, 0));
8245 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8247 This results in more efficient code for machines without a NOR
8248 instruction. Combine will canonicalize to the first form
8249 which will allow use of NOR instructions provided by the
8250 backend if they exist. */
8251 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8252 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8254 return fold_build1 (BIT_NOT_EXPR, type,
8255 build2 (BIT_IOR_EXPR, type,
8256 TREE_OPERAND (arg0, 0),
8257 TREE_OPERAND (arg1, 0)));
8260 goto associate;
8262 case RDIV_EXPR:
8263 /* Don't touch a floating-point divide by zero unless the mode
8264 of the constant can represent infinity. */
8265 if (TREE_CODE (arg1) == REAL_CST
8266 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8267 && real_zerop (arg1))
8268 return NULL_TREE;
8270 /* Optimize A / A to 1.0 if we don't care about
8271 NaNs or Infinities. */
8272 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8273 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
8274 && operand_equal_p (arg0, arg1, 0))
8276 tree r = build_real (TREE_TYPE (arg0), dconst1);
8278 return omit_two_operands (type, r, arg0, arg1);
8281 /* (-A) / (-B) -> A / B */
8282 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8283 return fold_build2 (RDIV_EXPR, type,
8284 TREE_OPERAND (arg0, 0),
8285 negate_expr (arg1));
8286 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8287 return fold_build2 (RDIV_EXPR, type,
8288 negate_expr (arg0),
8289 TREE_OPERAND (arg1, 0));
8291 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8292 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8293 && real_onep (arg1))
8294 return non_lvalue (fold_convert (type, arg0));
8296 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8297 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8298 && real_minus_onep (arg1))
8299 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8301 /* If ARG1 is a constant, we can convert this to a multiply by the
8302 reciprocal. This does not have the same rounding properties,
8303 so only do this if -funsafe-math-optimizations. We can actually
8304 always safely do it if ARG1 is a power of two, but it's hard to
8305 tell if it is or not in a portable manner. */
8306 if (TREE_CODE (arg1) == REAL_CST)
8308 if (flag_unsafe_math_optimizations
8309 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8310 arg1, 0)))
8311 return fold_build2 (MULT_EXPR, type, arg0, tem);
8312 /* Find the reciprocal if optimizing and the result is exact. */
8313 if (optimize)
8315 REAL_VALUE_TYPE r;
8316 r = TREE_REAL_CST (arg1);
8317 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8319 tem = build_real (type, r);
8320 return fold_build2 (MULT_EXPR, type,
8321 fold_convert (type, arg0), tem);
8325 /* Convert A/B/C to A/(B*C). */
8326 if (flag_unsafe_math_optimizations
8327 && TREE_CODE (arg0) == RDIV_EXPR)
8328 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8329 fold_build2 (MULT_EXPR, type,
8330 TREE_OPERAND (arg0, 1), arg1));
8332 /* Convert A/(B/C) to (A/B)*C. */
8333 if (flag_unsafe_math_optimizations
8334 && TREE_CODE (arg1) == RDIV_EXPR)
8335 return fold_build2 (MULT_EXPR, type,
8336 fold_build2 (RDIV_EXPR, type, arg0,
8337 TREE_OPERAND (arg1, 0)),
8338 TREE_OPERAND (arg1, 1));
8340 /* Convert C1/(X*C2) into (C1/C2)/X. */
8341 if (flag_unsafe_math_optimizations
8342 && TREE_CODE (arg1) == MULT_EXPR
8343 && TREE_CODE (arg0) == REAL_CST
8344 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8346 tree tem = const_binop (RDIV_EXPR, arg0,
8347 TREE_OPERAND (arg1, 1), 0);
8348 if (tem)
8349 return fold_build2 (RDIV_EXPR, type, tem,
8350 TREE_OPERAND (arg1, 0));
8353 if (flag_unsafe_math_optimizations)
8355 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8356 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8358 /* Optimize sin(x)/cos(x) as tan(x). */
8359 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8360 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8361 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8362 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8363 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8365 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8367 if (tanfn != NULL_TREE)
8368 return build_function_call_expr (tanfn,
8369 TREE_OPERAND (arg0, 1));
8372 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8373 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8374 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8375 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8376 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8377 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8379 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8381 if (tanfn != NULL_TREE)
8383 tree tmp = TREE_OPERAND (arg0, 1);
8384 tmp = build_function_call_expr (tanfn, tmp);
8385 return fold_build2 (RDIV_EXPR, type,
8386 build_real (type, dconst1), tmp);
8390 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
8391 NaNs or Infinities. */
8392 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
8393 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
8394 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
8396 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8397 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8399 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
8400 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
8401 && operand_equal_p (arg00, arg01, 0))
8403 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
8405 if (cosfn != NULL_TREE)
8406 return build_function_call_expr (cosfn,
8407 TREE_OPERAND (arg0, 1));
8411 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
8412 NaNs or Infintes. */
8413 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
8414 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
8415 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
8417 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8418 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8420 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
8421 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
8422 && operand_equal_p (arg00, arg01, 0))
8424 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
8426 if (cosfn != NULL_TREE)
8428 tree tmp = TREE_OPERAND (arg0, 1);
8429 tmp = build_function_call_expr (cosfn, tmp);
8430 return fold_build2 (RDIV_EXPR, type,
8431 build_real (type, dconst1),
8432 tmp);
8437 /* Optimize pow(x,c)/x as pow(x,c-1). */
8438 if (fcode0 == BUILT_IN_POW
8439 || fcode0 == BUILT_IN_POWF
8440 || fcode0 == BUILT_IN_POWL)
8442 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8443 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8444 if (TREE_CODE (arg01) == REAL_CST
8445 && ! TREE_CONSTANT_OVERFLOW (arg01)
8446 && operand_equal_p (arg1, arg00, 0))
8448 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8449 REAL_VALUE_TYPE c;
8450 tree arg, arglist;
8452 c = TREE_REAL_CST (arg01);
8453 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8454 arg = build_real (type, c);
8455 arglist = build_tree_list (NULL_TREE, arg);
8456 arglist = tree_cons (NULL_TREE, arg1, arglist);
8457 return build_function_call_expr (powfn, arglist);
8461 /* Optimize x/expN(y) into x*expN(-y). */
8462 if (BUILTIN_EXPONENT_P (fcode1))
8464 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8465 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8466 tree arglist = build_tree_list (NULL_TREE,
8467 fold_convert (type, arg));
8468 arg1 = build_function_call_expr (expfn, arglist);
8469 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8472 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8473 if (fcode1 == BUILT_IN_POW
8474 || fcode1 == BUILT_IN_POWF
8475 || fcode1 == BUILT_IN_POWL)
8477 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8478 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8479 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8480 tree neg11 = fold_convert (type, negate_expr (arg11));
8481 tree arglist = tree_cons(NULL_TREE, arg10,
8482 build_tree_list (NULL_TREE, neg11));
8483 arg1 = build_function_call_expr (powfn, arglist);
8484 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8487 goto binary;
8489 case TRUNC_DIV_EXPR:
8490 case ROUND_DIV_EXPR:
8491 case FLOOR_DIV_EXPR:
8492 case CEIL_DIV_EXPR:
8493 case EXACT_DIV_EXPR:
8494 if (integer_onep (arg1))
8495 return non_lvalue (fold_convert (type, arg0));
8496 if (integer_zerop (arg1))
8497 return NULL_TREE;
8498 /* X / -1 is -X. */
8499 if (!TYPE_UNSIGNED (type)
8500 && TREE_CODE (arg1) == INTEGER_CST
8501 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8502 && TREE_INT_CST_HIGH (arg1) == -1)
8503 return fold_convert (type, negate_expr (arg0));
8505 /* Convert -A / -B to A / B when the type is signed and overflow is
8506 undefined. */
8507 if (!TYPE_UNSIGNED (type) && !flag_wrapv
8508 && TREE_CODE (arg0) == NEGATE_EXPR
8509 && negate_expr_p (arg1))
8510 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8511 negate_expr (arg1));
8512 if (!TYPE_UNSIGNED (type) && !flag_wrapv
8513 && TREE_CODE (arg1) == NEGATE_EXPR
8514 && negate_expr_p (arg0))
8515 return fold_build2 (code, type, negate_expr (arg0),
8516 TREE_OPERAND (arg1, 0));
8518 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8519 operation, EXACT_DIV_EXPR.
8521 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8522 At one time others generated faster code, it's not clear if they do
8523 after the last round to changes to the DIV code in expmed.c. */
8524 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8525 && multiple_of_p (type, arg0, arg1))
8526 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8528 if (TREE_CODE (arg1) == INTEGER_CST
8529 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8530 return fold_convert (type, tem);
8532 goto binary;
8534 case CEIL_MOD_EXPR:
8535 case FLOOR_MOD_EXPR:
8536 case ROUND_MOD_EXPR:
8537 case TRUNC_MOD_EXPR:
8538 /* X % 1 is always zero, but be sure to preserve any side
8539 effects in X. */
8540 if (integer_onep (arg1))
8541 return omit_one_operand (type, integer_zero_node, arg0);
8543 /* X % 0, return X % 0 unchanged so that we can get the
8544 proper warnings and errors. */
8545 if (integer_zerop (arg1))
8546 return NULL_TREE;
8548 /* 0 % X is always zero, but be sure to preserve any side
8549 effects in X. Place this after checking for X == 0. */
8550 if (integer_zerop (arg0))
8551 return omit_one_operand (type, integer_zero_node, arg1);
8553 /* X % -1 is zero. */
8554 if (!TYPE_UNSIGNED (type)
8555 && TREE_CODE (arg1) == INTEGER_CST
8556 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8557 && TREE_INT_CST_HIGH (arg1) == -1)
8558 return omit_one_operand (type, integer_zero_node, arg0);
8560 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8561 i.e. "X % C" into "X & C2", if X and C are positive. */
8562 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8563 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8564 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8566 unsigned HOST_WIDE_INT high, low;
8567 tree mask;
8568 int l;
8570 l = tree_log2 (arg1);
8571 if (l >= HOST_BITS_PER_WIDE_INT)
8573 high = ((unsigned HOST_WIDE_INT) 1
8574 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8575 low = -1;
8577 else
8579 high = 0;
8580 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8583 mask = build_int_cst_wide (type, low, high);
8584 return fold_build2 (BIT_AND_EXPR, type,
8585 fold_convert (type, arg0), mask);
8588 /* X % -C is the same as X % C. */
8589 if (code == TRUNC_MOD_EXPR
8590 && !TYPE_UNSIGNED (type)
8591 && TREE_CODE (arg1) == INTEGER_CST
8592 && !TREE_CONSTANT_OVERFLOW (arg1)
8593 && TREE_INT_CST_HIGH (arg1) < 0
8594 && !flag_trapv
8595 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8596 && !sign_bit_p (arg1, arg1))
8597 return fold_build2 (code, type, fold_convert (type, arg0),
8598 fold_convert (type, negate_expr (arg1)));
8600 /* X % -Y is the same as X % Y. */
8601 if (code == TRUNC_MOD_EXPR
8602 && !TYPE_UNSIGNED (type)
8603 && TREE_CODE (arg1) == NEGATE_EXPR
8604 && !flag_trapv)
8605 return fold_build2 (code, type, fold_convert (type, arg0),
8606 fold_convert (type, TREE_OPERAND (arg1, 0)));
8608 if (TREE_CODE (arg1) == INTEGER_CST
8609 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8610 return fold_convert (type, tem);
8612 goto binary;
8614 case LROTATE_EXPR:
8615 case RROTATE_EXPR:
8616 if (integer_all_onesp (arg0))
8617 return omit_one_operand (type, arg0, arg1);
8618 goto shift;
8620 case RSHIFT_EXPR:
8621 /* Optimize -1 >> x for arithmetic right shifts. */
8622 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8623 return omit_one_operand (type, arg0, arg1);
8624 /* ... fall through ... */
8626 case LSHIFT_EXPR:
8627 shift:
8628 if (integer_zerop (arg1))
8629 return non_lvalue (fold_convert (type, arg0));
8630 if (integer_zerop (arg0))
8631 return omit_one_operand (type, arg0, arg1);
8633 /* Since negative shift count is not well-defined,
8634 don't try to compute it in the compiler. */
8635 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8636 return NULL_TREE;
8638 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8639 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
8640 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8641 && host_integerp (TREE_OPERAND (arg0, 1), false)
8642 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8644 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8645 + TREE_INT_CST_LOW (arg1));
8647 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8648 being well defined. */
8649 if (low >= TYPE_PRECISION (type))
8651 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8652 low = low % TYPE_PRECISION (type);
8653 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8654 return build_int_cst (type, 0);
8655 else
8656 low = TYPE_PRECISION (type) - 1;
8659 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8660 build_int_cst (type, low));
8663 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8664 into x & ((unsigned)-1 >> c) for unsigned types. */
8665 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8666 || (TYPE_UNSIGNED (type)
8667 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8668 && host_integerp (arg1, false)
8669 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8670 && host_integerp (TREE_OPERAND (arg0, 1), false)
8671 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8673 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8674 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8675 tree lshift;
8676 tree arg00;
8678 if (low0 == low1)
8680 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8682 lshift = build_int_cst (type, -1);
8683 lshift = int_const_binop (code, lshift, arg1, 0);
8685 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8689 /* Rewrite an LROTATE_EXPR by a constant into an
8690 RROTATE_EXPR by a new constant. */
8691 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8693 tree tem = build_int_cst (NULL_TREE,
8694 GET_MODE_BITSIZE (TYPE_MODE (type)));
8695 tem = fold_convert (TREE_TYPE (arg1), tem);
8696 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8697 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8700 /* If we have a rotate of a bit operation with the rotate count and
8701 the second operand of the bit operation both constant,
8702 permute the two operations. */
8703 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8704 && (TREE_CODE (arg0) == BIT_AND_EXPR
8705 || TREE_CODE (arg0) == BIT_IOR_EXPR
8706 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8707 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8708 return fold_build2 (TREE_CODE (arg0), type,
8709 fold_build2 (code, type,
8710 TREE_OPERAND (arg0, 0), arg1),
8711 fold_build2 (code, type,
8712 TREE_OPERAND (arg0, 1), arg1));
8714 /* Two consecutive rotates adding up to the width of the mode can
8715 be ignored. */
8716 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8717 && TREE_CODE (arg0) == RROTATE_EXPR
8718 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8719 && TREE_INT_CST_HIGH (arg1) == 0
8720 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8721 && ((TREE_INT_CST_LOW (arg1)
8722 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8723 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8724 return TREE_OPERAND (arg0, 0);
8726 goto binary;
8728 case MIN_EXPR:
8729 if (operand_equal_p (arg0, arg1, 0))
8730 return omit_one_operand (type, arg0, arg1);
8731 if (INTEGRAL_TYPE_P (type)
8732 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8733 return omit_one_operand (type, arg1, arg0);
8734 goto associate;
8736 case MAX_EXPR:
8737 if (operand_equal_p (arg0, arg1, 0))
8738 return omit_one_operand (type, arg0, arg1);
8739 if (INTEGRAL_TYPE_P (type)
8740 && TYPE_MAX_VALUE (type)
8741 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8742 return omit_one_operand (type, arg1, arg0);
8743 goto associate;
8745 case TRUTH_ANDIF_EXPR:
8746 /* Note that the operands of this must be ints
8747 and their values must be 0 or 1.
8748 ("true" is a fixed value perhaps depending on the language.) */
8749 /* If first arg is constant zero, return it. */
8750 if (integer_zerop (arg0))
8751 return fold_convert (type, arg0);
8752 case TRUTH_AND_EXPR:
8753 /* If either arg is constant true, drop it. */
8754 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8755 return non_lvalue (fold_convert (type, arg1));
8756 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8757 /* Preserve sequence points. */
8758 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8759 return non_lvalue (fold_convert (type, arg0));
8760 /* If second arg is constant zero, result is zero, but first arg
8761 must be evaluated. */
8762 if (integer_zerop (arg1))
8763 return omit_one_operand (type, arg1, arg0);
8764 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8765 case will be handled here. */
8766 if (integer_zerop (arg0))
8767 return omit_one_operand (type, arg0, arg1);
8769 /* !X && X is always false. */
8770 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8771 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8772 return omit_one_operand (type, integer_zero_node, arg1);
8773 /* X && !X is always false. */
8774 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8775 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8776 return omit_one_operand (type, integer_zero_node, arg0);
8778 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8779 means A >= Y && A != MAX, but in this case we know that
8780 A < X <= MAX. */
8782 if (!TREE_SIDE_EFFECTS (arg0)
8783 && !TREE_SIDE_EFFECTS (arg1))
8785 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8786 if (tem && !operand_equal_p (tem, arg0, 0))
8787 return fold_build2 (code, type, tem, arg1);
8789 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8790 if (tem && !operand_equal_p (tem, arg1, 0))
8791 return fold_build2 (code, type, arg0, tem);
8794 truth_andor:
8795 /* We only do these simplifications if we are optimizing. */
8796 if (!optimize)
8797 return NULL_TREE;
8799 /* Check for things like (A || B) && (A || C). We can convert this
8800 to A || (B && C). Note that either operator can be any of the four
8801 truth and/or operations and the transformation will still be
8802 valid. Also note that we only care about order for the
8803 ANDIF and ORIF operators. If B contains side effects, this
8804 might change the truth-value of A. */
8805 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8806 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8807 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8808 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8809 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8810 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8812 tree a00 = TREE_OPERAND (arg0, 0);
8813 tree a01 = TREE_OPERAND (arg0, 1);
8814 tree a10 = TREE_OPERAND (arg1, 0);
8815 tree a11 = TREE_OPERAND (arg1, 1);
8816 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8817 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8818 && (code == TRUTH_AND_EXPR
8819 || code == TRUTH_OR_EXPR));
8821 if (operand_equal_p (a00, a10, 0))
8822 return fold_build2 (TREE_CODE (arg0), type, a00,
8823 fold_build2 (code, type, a01, a11));
8824 else if (commutative && operand_equal_p (a00, a11, 0))
8825 return fold_build2 (TREE_CODE (arg0), type, a00,
8826 fold_build2 (code, type, a01, a10));
8827 else if (commutative && operand_equal_p (a01, a10, 0))
8828 return fold_build2 (TREE_CODE (arg0), type, a01,
8829 fold_build2 (code, type, a00, a11));
8831 /* This case if tricky because we must either have commutative
8832 operators or else A10 must not have side-effects. */
8834 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8835 && operand_equal_p (a01, a11, 0))
8836 return fold_build2 (TREE_CODE (arg0), type,
8837 fold_build2 (code, type, a00, a10),
8838 a01);
8841 /* See if we can build a range comparison. */
8842 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8843 return tem;
8845 /* Check for the possibility of merging component references. If our
8846 lhs is another similar operation, try to merge its rhs with our
8847 rhs. Then try to merge our lhs and rhs. */
8848 if (TREE_CODE (arg0) == code
8849 && 0 != (tem = fold_truthop (code, type,
8850 TREE_OPERAND (arg0, 1), arg1)))
8851 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8853 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8854 return tem;
8856 return NULL_TREE;
8858 case TRUTH_ORIF_EXPR:
8859 /* Note that the operands of this must be ints
8860 and their values must be 0 or true.
8861 ("true" is a fixed value perhaps depending on the language.) */
8862 /* If first arg is constant true, return it. */
8863 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8864 return fold_convert (type, arg0);
8865 case TRUTH_OR_EXPR:
8866 /* If either arg is constant zero, drop it. */
8867 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8868 return non_lvalue (fold_convert (type, arg1));
8869 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8870 /* Preserve sequence points. */
8871 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8872 return non_lvalue (fold_convert (type, arg0));
8873 /* If second arg is constant true, result is true, but we must
8874 evaluate first arg. */
8875 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8876 return omit_one_operand (type, arg1, arg0);
8877 /* Likewise for first arg, but note this only occurs here for
8878 TRUTH_OR_EXPR. */
8879 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8880 return omit_one_operand (type, arg0, arg1);
8882 /* !X || X is always true. */
8883 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8884 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8885 return omit_one_operand (type, integer_one_node, arg1);
8886 /* X || !X is always true. */
8887 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8888 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8889 return omit_one_operand (type, integer_one_node, arg0);
8891 goto truth_andor;
8893 case TRUTH_XOR_EXPR:
8894 /* If the second arg is constant zero, drop it. */
8895 if (integer_zerop (arg1))
8896 return non_lvalue (fold_convert (type, arg0));
8897 /* If the second arg is constant true, this is a logical inversion. */
8898 if (integer_onep (arg1))
8900 /* Only call invert_truthvalue if operand is a truth value. */
8901 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8902 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8903 else
8904 tem = invert_truthvalue (arg0);
8905 return non_lvalue (fold_convert (type, tem));
8907 /* Identical arguments cancel to zero. */
8908 if (operand_equal_p (arg0, arg1, 0))
8909 return omit_one_operand (type, integer_zero_node, arg0);
8911 /* !X ^ X is always true. */
8912 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8913 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8914 return omit_one_operand (type, integer_one_node, arg1);
8916 /* X ^ !X is always true. */
8917 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8918 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8919 return omit_one_operand (type, integer_one_node, arg0);
8921 return NULL_TREE;
8923 case EQ_EXPR:
8924 case NE_EXPR:
8925 case LT_EXPR:
8926 case GT_EXPR:
8927 case LE_EXPR:
8928 case GE_EXPR:
8929 /* If one arg is a real or integer constant, put it last. */
8930 if (tree_swap_operands_p (arg0, arg1, true))
8931 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8933 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
8934 if (TREE_CODE (arg0) == BIT_NOT_EXPR && TREE_CODE (arg1) == INTEGER_CST
8935 && (code == NE_EXPR || code == EQ_EXPR))
8936 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8937 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8938 arg1));
8940 /* bool_var != 0 becomes bool_var. */
8941 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8942 && code == NE_EXPR)
8943 return non_lvalue (fold_convert (type, arg0));
8945 /* bool_var == 1 becomes bool_var. */
8946 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8947 && code == EQ_EXPR)
8948 return non_lvalue (fold_convert (type, arg0));
8950 /* bool_var != 1 becomes !bool_var. */
8951 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8952 && code == NE_EXPR)
8953 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
8955 /* bool_var == 0 becomes !bool_var. */
8956 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8957 && code == EQ_EXPR)
8958 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
8960 /* If this is an equality comparison of the address of a non-weak
8961 object against zero, then we know the result. */
8962 if ((code == EQ_EXPR || code == NE_EXPR)
8963 && TREE_CODE (arg0) == ADDR_EXPR
8964 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8965 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8966 && integer_zerop (arg1))
8967 return constant_boolean_node (code != EQ_EXPR, type);
8969 /* If this is an equality comparison of the address of two non-weak,
8970 unaliased symbols neither of which are extern (since we do not
8971 have access to attributes for externs), then we know the result. */
8972 if ((code == EQ_EXPR || code == NE_EXPR)
8973 && TREE_CODE (arg0) == ADDR_EXPR
8974 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8975 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8976 && ! lookup_attribute ("alias",
8977 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8978 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8979 && TREE_CODE (arg1) == ADDR_EXPR
8980 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
8981 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8982 && ! lookup_attribute ("alias",
8983 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8984 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8986 /* We know that we're looking at the address of two
8987 non-weak, unaliased, static _DECL nodes.
8989 It is both wasteful and incorrect to call operand_equal_p
8990 to compare the two ADDR_EXPR nodes. It is wasteful in that
8991 all we need to do is test pointer equality for the arguments
8992 to the two ADDR_EXPR nodes. It is incorrect to use
8993 operand_equal_p as that function is NOT equivalent to a
8994 C equality test. It can in fact return false for two
8995 objects which would test as equal using the C equality
8996 operator. */
8997 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
8998 return constant_boolean_node (equal
8999 ? code == EQ_EXPR : code != EQ_EXPR,
9000 type);
9003 /* If this is a comparison of two exprs that look like an
9004 ARRAY_REF of the same object, then we can fold this to a
9005 comparison of the two offsets. */
9006 if (TREE_CODE_CLASS (code) == tcc_comparison)
9008 tree base0, offset0, base1, offset1;
9010 if (extract_array_ref (arg0, &base0, &offset0)
9011 && extract_array_ref (arg1, &base1, &offset1)
9012 && operand_equal_p (base0, base1, 0))
9014 /* Handle no offsets on both sides specially. */
9015 if (offset0 == NULL_TREE
9016 && offset1 == NULL_TREE)
9017 return fold_build2 (code, type, integer_zero_node,
9018 integer_zero_node);
9020 if (!offset0 || !offset1
9021 || TREE_TYPE (offset0) == TREE_TYPE (offset1))
9023 if (offset0 == NULL_TREE)
9024 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
9025 if (offset1 == NULL_TREE)
9026 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
9027 return fold_build2 (code, type, offset0, offset1);
9032 /* Transform comparisons of the form X +- C CMP X. */
9033 if ((code != EQ_EXPR && code != NE_EXPR)
9034 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9035 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9036 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9037 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
9038 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9039 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9040 && !(flag_wrapv || flag_trapv))))
9042 tree arg01 = TREE_OPERAND (arg0, 1);
9043 enum tree_code code0 = TREE_CODE (arg0);
9044 int is_positive;
9046 if (TREE_CODE (arg01) == REAL_CST)
9047 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
9048 else
9049 is_positive = tree_int_cst_sgn (arg01);
9051 /* (X - c) > X becomes false. */
9052 if (code == GT_EXPR
9053 && ((code0 == MINUS_EXPR && is_positive >= 0)
9054 || (code0 == PLUS_EXPR && is_positive <= 0)))
9055 return constant_boolean_node (0, type);
9057 /* Likewise (X + c) < X becomes false. */
9058 if (code == LT_EXPR
9059 && ((code0 == PLUS_EXPR && is_positive >= 0)
9060 || (code0 == MINUS_EXPR && is_positive <= 0)))
9061 return constant_boolean_node (0, type);
9063 /* Convert (X - c) <= X to true. */
9064 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9065 && code == LE_EXPR
9066 && ((code0 == MINUS_EXPR && is_positive >= 0)
9067 || (code0 == PLUS_EXPR && is_positive <= 0)))
9068 return constant_boolean_node (1, type);
9070 /* Convert (X + c) >= X to true. */
9071 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9072 && code == GE_EXPR
9073 && ((code0 == PLUS_EXPR && is_positive >= 0)
9074 || (code0 == MINUS_EXPR && is_positive <= 0)))
9075 return constant_boolean_node (1, type);
9077 if (TREE_CODE (arg01) == INTEGER_CST)
9079 /* Convert X + c > X and X - c < X to true for integers. */
9080 if (code == GT_EXPR
9081 && ((code0 == PLUS_EXPR && is_positive > 0)
9082 || (code0 == MINUS_EXPR && is_positive < 0)))
9083 return constant_boolean_node (1, type);
9085 if (code == LT_EXPR
9086 && ((code0 == MINUS_EXPR && is_positive > 0)
9087 || (code0 == PLUS_EXPR && is_positive < 0)))
9088 return constant_boolean_node (1, type);
9090 /* Convert X + c <= X and X - c >= X to false for integers. */
9091 if (code == LE_EXPR
9092 && ((code0 == PLUS_EXPR && is_positive > 0)
9093 || (code0 == MINUS_EXPR && is_positive < 0)))
9094 return constant_boolean_node (0, type);
9096 if (code == GE_EXPR
9097 && ((code0 == MINUS_EXPR && is_positive > 0)
9098 || (code0 == PLUS_EXPR && is_positive < 0)))
9099 return constant_boolean_node (0, type);
9103 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9104 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9105 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9106 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9107 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9108 && !(flag_wrapv || flag_trapv))
9109 && (TREE_CODE (arg1) == INTEGER_CST
9110 && !TREE_OVERFLOW (arg1)))
9112 tree const1 = TREE_OPERAND (arg0, 1);
9113 tree const2 = arg1;
9114 tree variable = TREE_OPERAND (arg0, 0);
9115 tree lhs;
9116 int lhs_add;
9117 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9119 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
9120 TREE_TYPE (arg1), const2, const1);
9121 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9122 && (TREE_CODE (lhs) != INTEGER_CST
9123 || !TREE_OVERFLOW (lhs)))
9124 return fold_build2 (code, type, variable, lhs);
9127 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9129 tree targ0 = strip_float_extensions (arg0);
9130 tree targ1 = strip_float_extensions (arg1);
9131 tree newtype = TREE_TYPE (targ0);
9133 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9134 newtype = TREE_TYPE (targ1);
9136 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9137 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9138 return fold_build2 (code, type, fold_convert (newtype, targ0),
9139 fold_convert (newtype, targ1));
9141 /* (-a) CMP (-b) -> b CMP a */
9142 if (TREE_CODE (arg0) == NEGATE_EXPR
9143 && TREE_CODE (arg1) == NEGATE_EXPR)
9144 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9145 TREE_OPERAND (arg0, 0));
9147 if (TREE_CODE (arg1) == REAL_CST)
9149 REAL_VALUE_TYPE cst;
9150 cst = TREE_REAL_CST (arg1);
9152 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9153 if (TREE_CODE (arg0) == NEGATE_EXPR)
9154 return
9155 fold_build2 (swap_tree_comparison (code), type,
9156 TREE_OPERAND (arg0, 0),
9157 build_real (TREE_TYPE (arg1),
9158 REAL_VALUE_NEGATE (cst)));
9160 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9161 /* a CMP (-0) -> a CMP 0 */
9162 if (REAL_VALUE_MINUS_ZERO (cst))
9163 return fold_build2 (code, type, arg0,
9164 build_real (TREE_TYPE (arg1), dconst0));
9166 /* x != NaN is always true, other ops are always false. */
9167 if (REAL_VALUE_ISNAN (cst)
9168 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9170 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9171 return omit_one_operand (type, tem, arg0);
9174 /* Fold comparisons against infinity. */
9175 if (REAL_VALUE_ISINF (cst))
9177 tem = fold_inf_compare (code, type, arg0, arg1);
9178 if (tem != NULL_TREE)
9179 return tem;
9183 /* If this is a comparison of a real constant with a PLUS_EXPR
9184 or a MINUS_EXPR of a real constant, we can convert it into a
9185 comparison with a revised real constant as long as no overflow
9186 occurs when unsafe_math_optimizations are enabled. */
9187 if (flag_unsafe_math_optimizations
9188 && TREE_CODE (arg1) == REAL_CST
9189 && (TREE_CODE (arg0) == PLUS_EXPR
9190 || TREE_CODE (arg0) == MINUS_EXPR)
9191 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9192 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9193 ? MINUS_EXPR : PLUS_EXPR,
9194 arg1, TREE_OPERAND (arg0, 1), 0))
9195 && ! TREE_CONSTANT_OVERFLOW (tem))
9196 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9198 /* Likewise, we can simplify a comparison of a real constant with
9199 a MINUS_EXPR whose first operand is also a real constant, i.e.
9200 (c1 - x) < c2 becomes x > c1-c2. */
9201 if (flag_unsafe_math_optimizations
9202 && TREE_CODE (arg1) == REAL_CST
9203 && TREE_CODE (arg0) == MINUS_EXPR
9204 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9205 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9206 arg1, 0))
9207 && ! TREE_CONSTANT_OVERFLOW (tem))
9208 return fold_build2 (swap_tree_comparison (code), type,
9209 TREE_OPERAND (arg0, 1), tem);
9211 /* Fold comparisons against built-in math functions. */
9212 if (TREE_CODE (arg1) == REAL_CST
9213 && flag_unsafe_math_optimizations
9214 && ! flag_errno_math)
9216 enum built_in_function fcode = builtin_mathfn_code (arg0);
9218 if (fcode != END_BUILTINS)
9220 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9221 if (tem != NULL_TREE)
9222 return tem;
9227 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9228 if (TREE_CONSTANT (arg1)
9229 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9230 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9231 /* This optimization is invalid for ordered comparisons
9232 if CONST+INCR overflows or if foo+incr might overflow.
9233 This optimization is invalid for floating point due to rounding.
9234 For pointer types we assume overflow doesn't happen. */
9235 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9236 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9237 && (code == EQ_EXPR || code == NE_EXPR))))
9239 tree varop, newconst;
9241 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9243 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9244 arg1, TREE_OPERAND (arg0, 1));
9245 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9246 TREE_OPERAND (arg0, 0),
9247 TREE_OPERAND (arg0, 1));
9249 else
9251 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9252 arg1, TREE_OPERAND (arg0, 1));
9253 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9254 TREE_OPERAND (arg0, 0),
9255 TREE_OPERAND (arg0, 1));
9259 /* If VAROP is a reference to a bitfield, we must mask
9260 the constant by the width of the field. */
9261 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9262 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9263 && host_integerp (DECL_SIZE (TREE_OPERAND
9264 (TREE_OPERAND (varop, 0), 1)), 1))
9266 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9267 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9268 tree folded_compare, shift;
9270 /* First check whether the comparison would come out
9271 always the same. If we don't do that we would
9272 change the meaning with the masking. */
9273 folded_compare = fold_build2 (code, type,
9274 TREE_OPERAND (varop, 0), arg1);
9275 if (integer_zerop (folded_compare)
9276 || integer_onep (folded_compare))
9277 return omit_one_operand (type, folded_compare, varop);
9279 shift = build_int_cst (NULL_TREE,
9280 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9281 shift = fold_convert (TREE_TYPE (varop), shift);
9282 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9283 newconst, shift);
9284 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9285 newconst, shift);
9288 return fold_build2 (code, type, varop, newconst);
9291 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9292 This transformation affects the cases which are handled in later
9293 optimizations involving comparisons with non-negative constants. */
9294 if (TREE_CODE (arg1) == INTEGER_CST
9295 && TREE_CODE (arg0) != INTEGER_CST
9296 && tree_int_cst_sgn (arg1) > 0)
9298 switch (code)
9300 case GE_EXPR:
9301 arg1 = const_binop (MINUS_EXPR, arg1,
9302 build_int_cst (TREE_TYPE (arg1), 1), 0);
9303 return fold_build2 (GT_EXPR, type, arg0,
9304 fold_convert (TREE_TYPE (arg0), arg1));
9306 case LT_EXPR:
9307 arg1 = const_binop (MINUS_EXPR, arg1,
9308 build_int_cst (TREE_TYPE (arg1), 1), 0);
9309 return fold_build2 (LE_EXPR, type, arg0,
9310 fold_convert (TREE_TYPE (arg0), arg1));
9312 default:
9313 break;
9317 /* Comparisons with the highest or lowest possible integer of
9318 the specified size will have known values. */
9320 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9322 if (TREE_CODE (arg1) == INTEGER_CST
9323 && ! TREE_CONSTANT_OVERFLOW (arg1)
9324 && width <= 2 * HOST_BITS_PER_WIDE_INT
9325 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9326 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9328 HOST_WIDE_INT signed_max_hi;
9329 unsigned HOST_WIDE_INT signed_max_lo;
9330 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9332 if (width <= HOST_BITS_PER_WIDE_INT)
9334 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9335 - 1;
9336 signed_max_hi = 0;
9337 max_hi = 0;
9339 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9341 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9342 min_lo = 0;
9343 min_hi = 0;
9345 else
9347 max_lo = signed_max_lo;
9348 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9349 min_hi = -1;
9352 else
9354 width -= HOST_BITS_PER_WIDE_INT;
9355 signed_max_lo = -1;
9356 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9357 - 1;
9358 max_lo = -1;
9359 min_lo = 0;
9361 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9363 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9364 min_hi = 0;
9366 else
9368 max_hi = signed_max_hi;
9369 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9373 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9374 && TREE_INT_CST_LOW (arg1) == max_lo)
9375 switch (code)
9377 case GT_EXPR:
9378 return omit_one_operand (type, integer_zero_node, arg0);
9380 case GE_EXPR:
9381 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9383 case LE_EXPR:
9384 return omit_one_operand (type, integer_one_node, arg0);
9386 case LT_EXPR:
9387 return fold_build2 (NE_EXPR, type, arg0, arg1);
9389 /* The GE_EXPR and LT_EXPR cases above are not normally
9390 reached because of previous transformations. */
9392 default:
9393 break;
9395 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9396 == max_hi
9397 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9398 switch (code)
9400 case GT_EXPR:
9401 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9402 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9403 case LE_EXPR:
9404 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9405 return fold_build2 (NE_EXPR, type, arg0, arg1);
9406 default:
9407 break;
9409 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9410 == min_hi
9411 && TREE_INT_CST_LOW (arg1) == min_lo)
9412 switch (code)
9414 case LT_EXPR:
9415 return omit_one_operand (type, integer_zero_node, arg0);
9417 case LE_EXPR:
9418 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9420 case GE_EXPR:
9421 return omit_one_operand (type, integer_one_node, arg0);
9423 case GT_EXPR:
9424 return fold_build2 (NE_EXPR, type, op0, op1);
9426 default:
9427 break;
9429 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9430 == min_hi
9431 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9432 switch (code)
9434 case GE_EXPR:
9435 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9436 return fold_build2 (NE_EXPR, type, arg0, arg1);
9437 case LT_EXPR:
9438 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9439 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9440 default:
9441 break;
9444 else if (!in_gimple_form
9445 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9446 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9447 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9448 /* signed_type does not work on pointer types. */
9449 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9451 /* The following case also applies to X < signed_max+1
9452 and X >= signed_max+1 because previous transformations. */
9453 if (code == LE_EXPR || code == GT_EXPR)
9455 tree st0, st1;
9456 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9457 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9458 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9459 type, fold_convert (st0, arg0),
9460 build_int_cst (st1, 0));
9466 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9467 a MINUS_EXPR of a constant, we can convert it into a comparison with
9468 a revised constant as long as no overflow occurs. */
9469 if ((code == EQ_EXPR || code == NE_EXPR)
9470 && TREE_CODE (arg1) == INTEGER_CST
9471 && (TREE_CODE (arg0) == PLUS_EXPR
9472 || TREE_CODE (arg0) == MINUS_EXPR)
9473 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9474 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9475 ? MINUS_EXPR : PLUS_EXPR,
9476 arg1, TREE_OPERAND (arg0, 1), 0))
9477 && ! TREE_CONSTANT_OVERFLOW (tem))
9478 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9480 /* Similarly for a NEGATE_EXPR. */
9481 else if ((code == EQ_EXPR || code == NE_EXPR)
9482 && TREE_CODE (arg0) == NEGATE_EXPR
9483 && TREE_CODE (arg1) == INTEGER_CST
9484 && 0 != (tem = negate_expr (arg1))
9485 && TREE_CODE (tem) == INTEGER_CST
9486 && ! TREE_CONSTANT_OVERFLOW (tem))
9487 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9489 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9490 for !=. Don't do this for ordered comparisons due to overflow. */
9491 else if ((code == NE_EXPR || code == EQ_EXPR)
9492 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9493 return fold_build2 (code, type,
9494 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9496 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9497 && (TREE_CODE (arg0) == NOP_EXPR
9498 || TREE_CODE (arg0) == CONVERT_EXPR))
9500 /* If we are widening one operand of an integer comparison,
9501 see if the other operand is similarly being widened. Perhaps we
9502 can do the comparison in the narrower type. */
9503 tem = fold_widened_comparison (code, type, arg0, arg1);
9504 if (tem)
9505 return tem;
9507 /* Or if we are changing signedness. */
9508 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9509 if (tem)
9510 return tem;
9513 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9514 constant, we can simplify it. */
9515 else if (TREE_CODE (arg1) == INTEGER_CST
9516 && (TREE_CODE (arg0) == MIN_EXPR
9517 || TREE_CODE (arg0) == MAX_EXPR)
9518 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9520 tem = optimize_minmax_comparison (code, type, op0, op1);
9521 if (tem)
9522 return tem;
9524 return NULL_TREE;
9527 /* If we are comparing an ABS_EXPR with a constant, we can
9528 convert all the cases into explicit comparisons, but they may
9529 well not be faster than doing the ABS and one comparison.
9530 But ABS (X) <= C is a range comparison, which becomes a subtraction
9531 and a comparison, and is probably faster. */
9532 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9533 && TREE_CODE (arg0) == ABS_EXPR
9534 && ! TREE_SIDE_EFFECTS (arg0)
9535 && (0 != (tem = negate_expr (arg1)))
9536 && TREE_CODE (tem) == INTEGER_CST
9537 && ! TREE_CONSTANT_OVERFLOW (tem))
9538 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9539 build2 (GE_EXPR, type,
9540 TREE_OPERAND (arg0, 0), tem),
9541 build2 (LE_EXPR, type,
9542 TREE_OPERAND (arg0, 0), arg1));
9544 /* Convert ABS_EXPR<x> >= 0 to true. */
9545 else if (code == GE_EXPR
9546 && tree_expr_nonnegative_p (arg0)
9547 && (integer_zerop (arg1)
9548 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9549 && real_zerop (arg1))))
9550 return omit_one_operand (type, integer_one_node, arg0);
9552 /* Convert ABS_EXPR<x> < 0 to false. */
9553 else if (code == LT_EXPR
9554 && tree_expr_nonnegative_p (arg0)
9555 && (integer_zerop (arg1) || real_zerop (arg1)))
9556 return omit_one_operand (type, integer_zero_node, arg0);
9558 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9559 else if ((code == EQ_EXPR || code == NE_EXPR)
9560 && TREE_CODE (arg0) == ABS_EXPR
9561 && (integer_zerop (arg1) || real_zerop (arg1)))
9562 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9564 /* If this is an EQ or NE comparison with zero and ARG0 is
9565 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9566 two operations, but the latter can be done in one less insn
9567 on machines that have only two-operand insns or on which a
9568 constant cannot be the first operand. */
9569 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9570 && TREE_CODE (arg0) == BIT_AND_EXPR)
9572 tree arg00 = TREE_OPERAND (arg0, 0);
9573 tree arg01 = TREE_OPERAND (arg0, 1);
9574 if (TREE_CODE (arg00) == LSHIFT_EXPR
9575 && integer_onep (TREE_OPERAND (arg00, 0)))
9576 return
9577 fold_build2 (code, type,
9578 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9579 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9580 arg01, TREE_OPERAND (arg00, 1)),
9581 fold_convert (TREE_TYPE (arg0),
9582 integer_one_node)),
9583 arg1);
9584 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9585 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9586 return
9587 fold_build2 (code, type,
9588 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9589 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9590 arg00, TREE_OPERAND (arg01, 1)),
9591 fold_convert (TREE_TYPE (arg0),
9592 integer_one_node)),
9593 arg1);
9596 /* If this is an NE or EQ comparison of zero against the result of a
9597 signed MOD operation whose second operand is a power of 2, make
9598 the MOD operation unsigned since it is simpler and equivalent. */
9599 if ((code == NE_EXPR || code == EQ_EXPR)
9600 && integer_zerop (arg1)
9601 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9602 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9603 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9604 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9605 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9606 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9608 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9609 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9610 fold_convert (newtype,
9611 TREE_OPERAND (arg0, 0)),
9612 fold_convert (newtype,
9613 TREE_OPERAND (arg0, 1)));
9615 return fold_build2 (code, type, newmod,
9616 fold_convert (newtype, arg1));
9619 /* If this is an NE comparison of zero with an AND of one, remove the
9620 comparison since the AND will give the correct value. */
9621 if (code == NE_EXPR && integer_zerop (arg1)
9622 && TREE_CODE (arg0) == BIT_AND_EXPR
9623 && integer_onep (TREE_OPERAND (arg0, 1)))
9624 return fold_convert (type, arg0);
9626 /* If we have (A & C) == C where C is a power of 2, convert this into
9627 (A & C) != 0. Similarly for NE_EXPR. */
9628 if ((code == EQ_EXPR || code == NE_EXPR)
9629 && TREE_CODE (arg0) == BIT_AND_EXPR
9630 && integer_pow2p (TREE_OPERAND (arg0, 1))
9631 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9632 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9633 arg0, fold_convert (TREE_TYPE (arg0),
9634 integer_zero_node));
9636 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9637 bit, then fold the expression into A < 0 or A >= 0. */
9638 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9639 if (tem)
9640 return tem;
9642 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9643 Similarly for NE_EXPR. */
9644 if ((code == EQ_EXPR || code == NE_EXPR)
9645 && TREE_CODE (arg0) == BIT_AND_EXPR
9646 && TREE_CODE (arg1) == INTEGER_CST
9647 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9649 tree notc = fold_build1 (BIT_NOT_EXPR,
9650 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9651 TREE_OPERAND (arg0, 1));
9652 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9653 arg1, notc);
9654 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9655 if (integer_nonzerop (dandnotc))
9656 return omit_one_operand (type, rslt, arg0);
9659 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9660 Similarly for NE_EXPR. */
9661 if ((code == EQ_EXPR || code == NE_EXPR)
9662 && TREE_CODE (arg0) == BIT_IOR_EXPR
9663 && TREE_CODE (arg1) == INTEGER_CST
9664 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9666 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9667 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9668 TREE_OPERAND (arg0, 1), notd);
9669 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9670 if (integer_nonzerop (candnotd))
9671 return omit_one_operand (type, rslt, arg0);
9674 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9675 and similarly for >= into !=. */
9676 if ((code == LT_EXPR || code == GE_EXPR)
9677 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9678 && TREE_CODE (arg1) == LSHIFT_EXPR
9679 && integer_onep (TREE_OPERAND (arg1, 0)))
9680 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9681 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9682 TREE_OPERAND (arg1, 1)),
9683 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9685 else if ((code == LT_EXPR || code == GE_EXPR)
9686 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9687 && (TREE_CODE (arg1) == NOP_EXPR
9688 || TREE_CODE (arg1) == CONVERT_EXPR)
9689 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9690 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9691 return
9692 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9693 fold_convert (TREE_TYPE (arg0),
9694 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9695 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9696 1))),
9697 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9699 /* Simplify comparison of something with itself. (For IEEE
9700 floating-point, we can only do some of these simplifications.) */
9701 if (operand_equal_p (arg0, arg1, 0))
9703 switch (code)
9705 case EQ_EXPR:
9706 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9707 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9708 return constant_boolean_node (1, type);
9709 break;
9711 case GE_EXPR:
9712 case LE_EXPR:
9713 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9714 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9715 return constant_boolean_node (1, type);
9716 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9718 case NE_EXPR:
9719 /* For NE, we can only do this simplification if integer
9720 or we don't honor IEEE floating point NaNs. */
9721 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9722 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9723 break;
9724 /* ... fall through ... */
9725 case GT_EXPR:
9726 case LT_EXPR:
9727 return constant_boolean_node (0, type);
9728 default:
9729 gcc_unreachable ();
9733 /* If we are comparing an expression that just has comparisons
9734 of two integer values, arithmetic expressions of those comparisons,
9735 and constants, we can simplify it. There are only three cases
9736 to check: the two values can either be equal, the first can be
9737 greater, or the second can be greater. Fold the expression for
9738 those three values. Since each value must be 0 or 1, we have
9739 eight possibilities, each of which corresponds to the constant 0
9740 or 1 or one of the six possible comparisons.
9742 This handles common cases like (a > b) == 0 but also handles
9743 expressions like ((x > y) - (y > x)) > 0, which supposedly
9744 occur in macroized code. */
9746 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9748 tree cval1 = 0, cval2 = 0;
9749 int save_p = 0;
9751 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9752 /* Don't handle degenerate cases here; they should already
9753 have been handled anyway. */
9754 && cval1 != 0 && cval2 != 0
9755 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9756 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9757 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9758 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9759 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9760 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9761 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9763 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9764 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9766 /* We can't just pass T to eval_subst in case cval1 or cval2
9767 was the same as ARG1. */
9769 tree high_result
9770 = fold_build2 (code, type,
9771 eval_subst (arg0, cval1, maxval,
9772 cval2, minval),
9773 arg1);
9774 tree equal_result
9775 = fold_build2 (code, type,
9776 eval_subst (arg0, cval1, maxval,
9777 cval2, maxval),
9778 arg1);
9779 tree low_result
9780 = fold_build2 (code, type,
9781 eval_subst (arg0, cval1, minval,
9782 cval2, maxval),
9783 arg1);
9785 /* All three of these results should be 0 or 1. Confirm they
9786 are. Then use those values to select the proper code
9787 to use. */
9789 if ((integer_zerop (high_result)
9790 || integer_onep (high_result))
9791 && (integer_zerop (equal_result)
9792 || integer_onep (equal_result))
9793 && (integer_zerop (low_result)
9794 || integer_onep (low_result)))
9796 /* Make a 3-bit mask with the high-order bit being the
9797 value for `>', the next for '=', and the low for '<'. */
9798 switch ((integer_onep (high_result) * 4)
9799 + (integer_onep (equal_result) * 2)
9800 + integer_onep (low_result))
9802 case 0:
9803 /* Always false. */
9804 return omit_one_operand (type, integer_zero_node, arg0);
9805 case 1:
9806 code = LT_EXPR;
9807 break;
9808 case 2:
9809 code = EQ_EXPR;
9810 break;
9811 case 3:
9812 code = LE_EXPR;
9813 break;
9814 case 4:
9815 code = GT_EXPR;
9816 break;
9817 case 5:
9818 code = NE_EXPR;
9819 break;
9820 case 6:
9821 code = GE_EXPR;
9822 break;
9823 case 7:
9824 /* Always true. */
9825 return omit_one_operand (type, integer_one_node, arg0);
9828 if (save_p)
9829 return save_expr (build2 (code, type, cval1, cval2));
9830 else
9831 return fold_build2 (code, type, cval1, cval2);
9836 /* If this is a comparison of a field, we may be able to simplify it. */
9837 if (((TREE_CODE (arg0) == COMPONENT_REF
9838 && lang_hooks.can_use_bit_fields_p ())
9839 || TREE_CODE (arg0) == BIT_FIELD_REF)
9840 && (code == EQ_EXPR || code == NE_EXPR)
9841 /* Handle the constant case even without -O
9842 to make sure the warnings are given. */
9843 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9845 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9846 if (t1)
9847 return t1;
9850 /* Fold a comparison of the address of COMPONENT_REFs with the same
9851 type and component to a comparison of the address of the base
9852 object. In short, &x->a OP &y->a to x OP y and
9853 &x->a OP &y.a to x OP &y */
9854 if (TREE_CODE (arg0) == ADDR_EXPR
9855 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9856 && TREE_CODE (arg1) == ADDR_EXPR
9857 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9859 tree cref0 = TREE_OPERAND (arg0, 0);
9860 tree cref1 = TREE_OPERAND (arg1, 0);
9861 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9863 tree op0 = TREE_OPERAND (cref0, 0);
9864 tree op1 = TREE_OPERAND (cref1, 0);
9865 return fold_build2 (code, type,
9866 build_fold_addr_expr (op0),
9867 build_fold_addr_expr (op1));
9871 /* Optimize comparisons of strlen vs zero to a compare of the
9872 first character of the string vs zero. To wit,
9873 strlen(ptr) == 0 => *ptr == 0
9874 strlen(ptr) != 0 => *ptr != 0
9875 Other cases should reduce to one of these two (or a constant)
9876 due to the return value of strlen being unsigned. */
9877 if ((code == EQ_EXPR || code == NE_EXPR)
9878 && integer_zerop (arg1)
9879 && TREE_CODE (arg0) == CALL_EXPR)
9881 tree fndecl = get_callee_fndecl (arg0);
9882 tree arglist;
9884 if (fndecl
9885 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9886 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9887 && (arglist = TREE_OPERAND (arg0, 1))
9888 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9889 && ! TREE_CHAIN (arglist))
9891 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9892 return fold_build2 (code, type, iref,
9893 build_int_cst (TREE_TYPE (iref), 0));
9897 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9898 into a single range test. */
9899 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9900 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9901 && TREE_CODE (arg1) == INTEGER_CST
9902 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9903 && !integer_zerop (TREE_OPERAND (arg0, 1))
9904 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9905 && !TREE_OVERFLOW (arg1))
9907 t1 = fold_div_compare (code, type, arg0, arg1);
9908 if (t1 != NULL_TREE)
9909 return t1;
9912 if ((code == EQ_EXPR || code == NE_EXPR)
9913 && integer_zerop (arg1)
9914 && tree_expr_nonzero_p (arg0))
9916 tree res = constant_boolean_node (code==NE_EXPR, type);
9917 return omit_one_operand (type, res, arg0);
9920 t1 = fold_relational_const (code, type, arg0, arg1);
9921 return t1 == NULL_TREE ? NULL_TREE : t1;
9923 case UNORDERED_EXPR:
9924 case ORDERED_EXPR:
9925 case UNLT_EXPR:
9926 case UNLE_EXPR:
9927 case UNGT_EXPR:
9928 case UNGE_EXPR:
9929 case UNEQ_EXPR:
9930 case LTGT_EXPR:
9931 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9933 t1 = fold_relational_const (code, type, arg0, arg1);
9934 if (t1 != NULL_TREE)
9935 return t1;
9938 /* If the first operand is NaN, the result is constant. */
9939 if (TREE_CODE (arg0) == REAL_CST
9940 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9941 && (code != LTGT_EXPR || ! flag_trapping_math))
9943 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9944 ? integer_zero_node
9945 : integer_one_node;
9946 return omit_one_operand (type, t1, arg1);
9949 /* If the second operand is NaN, the result is constant. */
9950 if (TREE_CODE (arg1) == REAL_CST
9951 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9952 && (code != LTGT_EXPR || ! flag_trapping_math))
9954 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9955 ? integer_zero_node
9956 : integer_one_node;
9957 return omit_one_operand (type, t1, arg0);
9960 /* Simplify unordered comparison of something with itself. */
9961 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9962 && operand_equal_p (arg0, arg1, 0))
9963 return constant_boolean_node (1, type);
9965 if (code == LTGT_EXPR
9966 && !flag_trapping_math
9967 && operand_equal_p (arg0, arg1, 0))
9968 return constant_boolean_node (0, type);
9970 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9972 tree targ0 = strip_float_extensions (arg0);
9973 tree targ1 = strip_float_extensions (arg1);
9974 tree newtype = TREE_TYPE (targ0);
9976 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9977 newtype = TREE_TYPE (targ1);
9979 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9980 return fold_build2 (code, type, fold_convert (newtype, targ0),
9981 fold_convert (newtype, targ1));
9984 return NULL_TREE;
9986 case COMPOUND_EXPR:
9987 /* When pedantic, a compound expression can be neither an lvalue
9988 nor an integer constant expression. */
9989 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9990 return NULL_TREE;
9991 /* Don't let (0, 0) be null pointer constant. */
9992 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9993 : fold_convert (type, arg1);
9994 return pedantic_non_lvalue (tem);
9996 case COMPLEX_EXPR:
9997 if (wins)
9998 return build_complex (type, arg0, arg1);
9999 return NULL_TREE;
10001 case ASSERT_EXPR:
10002 /* An ASSERT_EXPR should never be passed to fold_binary. */
10003 gcc_unreachable ();
10005 default:
10006 return NULL_TREE;
10007 } /* switch (code) */
10010 /* Callback for walk_tree, looking for LABEL_EXPR.
10011 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10012 Do not check the sub-tree of GOTO_EXPR. */
10014 static tree
10015 contains_label_1 (tree *tp,
10016 int *walk_subtrees,
10017 void *data ATTRIBUTE_UNUSED)
10019 switch (TREE_CODE (*tp))
10021 case LABEL_EXPR:
10022 return *tp;
10023 case GOTO_EXPR:
10024 *walk_subtrees = 0;
10025 /* no break */
10026 default:
10027 return NULL_TREE;
10031 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
10032 accessible from outside the sub-tree. Returns NULL_TREE if no
10033 addressable label is found. */
10035 static bool
10036 contains_label_p (tree st)
10038 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
10041 /* Fold a ternary expression of code CODE and type TYPE with operands
10042 OP0, OP1, and OP2. Return the folded expression if folding is
10043 successful. Otherwise, return NULL_TREE. */
10045 tree
10046 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10048 tree tem;
10049 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
10050 enum tree_code_class kind = TREE_CODE_CLASS (code);
10052 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10053 && TREE_CODE_LENGTH (code) == 3);
10055 /* Strip any conversions that don't change the mode. This is safe
10056 for every expression, except for a comparison expression because
10057 its signedness is derived from its operands. So, in the latter
10058 case, only strip conversions that don't change the signedness.
10060 Note that this is done as an internal manipulation within the
10061 constant folder, in order to find the simplest representation of
10062 the arguments so that their form can be studied. In any cases,
10063 the appropriate type conversions should be put back in the tree
10064 that will get out of the constant folder. */
10065 if (op0)
10067 arg0 = op0;
10068 STRIP_NOPS (arg0);
10071 if (op1)
10073 arg1 = op1;
10074 STRIP_NOPS (arg1);
10077 switch (code)
10079 case COMPONENT_REF:
10080 if (TREE_CODE (arg0) == CONSTRUCTOR
10081 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
10083 unsigned HOST_WIDE_INT idx;
10084 tree field, value;
10085 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
10086 if (field == arg1)
10087 return value;
10089 return NULL_TREE;
10091 case COND_EXPR:
10092 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10093 so all simple results must be passed through pedantic_non_lvalue. */
10094 if (TREE_CODE (arg0) == INTEGER_CST)
10096 tree unused_op = integer_zerop (arg0) ? op1 : op2;
10097 tem = integer_zerop (arg0) ? op2 : op1;
10098 /* Only optimize constant conditions when the selected branch
10099 has the same type as the COND_EXPR. This avoids optimizing
10100 away "c ? x : throw", where the throw has a void type.
10101 Avoid throwing away that operand which contains label. */
10102 if ((!TREE_SIDE_EFFECTS (unused_op)
10103 || !contains_label_p (unused_op))
10104 && (! VOID_TYPE_P (TREE_TYPE (tem))
10105 || VOID_TYPE_P (type)))
10106 return pedantic_non_lvalue (tem);
10107 return NULL_TREE;
10109 if (operand_equal_p (arg1, op2, 0))
10110 return pedantic_omit_one_operand (type, arg1, arg0);
10112 /* If we have A op B ? A : C, we may be able to convert this to a
10113 simpler expression, depending on the operation and the values
10114 of B and C. Signed zeros prevent all of these transformations,
10115 for reasons given above each one.
10117 Also try swapping the arguments and inverting the conditional. */
10118 if (COMPARISON_CLASS_P (arg0)
10119 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10120 arg1, TREE_OPERAND (arg0, 1))
10121 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10123 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10124 if (tem)
10125 return tem;
10128 if (COMPARISON_CLASS_P (arg0)
10129 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10130 op2,
10131 TREE_OPERAND (arg0, 1))
10132 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10134 tem = invert_truthvalue (arg0);
10135 if (COMPARISON_CLASS_P (tem))
10137 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10138 if (tem)
10139 return tem;
10143 /* If the second operand is simpler than the third, swap them
10144 since that produces better jump optimization results. */
10145 if (truth_value_p (TREE_CODE (arg0))
10146 && tree_swap_operands_p (op1, op2, false))
10148 /* See if this can be inverted. If it can't, possibly because
10149 it was a floating-point inequality comparison, don't do
10150 anything. */
10151 tem = invert_truthvalue (arg0);
10153 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10154 return fold_build3 (code, type, tem, op2, op1);
10157 /* Convert A ? 1 : 0 to simply A. */
10158 if (integer_onep (op1)
10159 && integer_zerop (op2)
10160 /* If we try to convert OP0 to our type, the
10161 call to fold will try to move the conversion inside
10162 a COND, which will recurse. In that case, the COND_EXPR
10163 is probably the best choice, so leave it alone. */
10164 && type == TREE_TYPE (arg0))
10165 return pedantic_non_lvalue (arg0);
10167 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10168 over COND_EXPR in cases such as floating point comparisons. */
10169 if (integer_zerop (op1)
10170 && integer_onep (op2)
10171 && truth_value_p (TREE_CODE (arg0)))
10172 return pedantic_non_lvalue (fold_convert (type,
10173 invert_truthvalue (arg0)));
10175 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10176 if (TREE_CODE (arg0) == LT_EXPR
10177 && integer_zerop (TREE_OPERAND (arg0, 1))
10178 && integer_zerop (op2)
10179 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10180 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10181 TREE_TYPE (tem), tem, arg1));
10183 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10184 already handled above. */
10185 if (TREE_CODE (arg0) == BIT_AND_EXPR
10186 && integer_onep (TREE_OPERAND (arg0, 1))
10187 && integer_zerop (op2)
10188 && integer_pow2p (arg1))
10190 tree tem = TREE_OPERAND (arg0, 0);
10191 STRIP_NOPS (tem);
10192 if (TREE_CODE (tem) == RSHIFT_EXPR
10193 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10194 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10195 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10196 return fold_build2 (BIT_AND_EXPR, type,
10197 TREE_OPERAND (tem, 0), arg1);
10200 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10201 is probably obsolete because the first operand should be a
10202 truth value (that's why we have the two cases above), but let's
10203 leave it in until we can confirm this for all front-ends. */
10204 if (integer_zerop (op2)
10205 && TREE_CODE (arg0) == NE_EXPR
10206 && integer_zerop (TREE_OPERAND (arg0, 1))
10207 && integer_pow2p (arg1)
10208 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10209 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10210 arg1, OEP_ONLY_CONST))
10211 return pedantic_non_lvalue (fold_convert (type,
10212 TREE_OPERAND (arg0, 0)));
10214 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10215 if (integer_zerop (op2)
10216 && truth_value_p (TREE_CODE (arg0))
10217 && truth_value_p (TREE_CODE (arg1)))
10218 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10220 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10221 if (integer_onep (op2)
10222 && truth_value_p (TREE_CODE (arg0))
10223 && truth_value_p (TREE_CODE (arg1)))
10225 /* Only perform transformation if ARG0 is easily inverted. */
10226 tem = invert_truthvalue (arg0);
10227 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10228 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10231 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10232 if (integer_zerop (arg1)
10233 && truth_value_p (TREE_CODE (arg0))
10234 && truth_value_p (TREE_CODE (op2)))
10236 /* Only perform transformation if ARG0 is easily inverted. */
10237 tem = invert_truthvalue (arg0);
10238 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10239 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10242 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10243 if (integer_onep (arg1)
10244 && truth_value_p (TREE_CODE (arg0))
10245 && truth_value_p (TREE_CODE (op2)))
10246 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10248 return NULL_TREE;
10250 case CALL_EXPR:
10251 /* Check for a built-in function. */
10252 if (TREE_CODE (op0) == ADDR_EXPR
10253 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10254 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10255 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
10256 return NULL_TREE;
10258 case BIT_FIELD_REF:
10259 if (TREE_CODE (arg0) == VECTOR_CST
10260 && type == TREE_TYPE (TREE_TYPE (arg0))
10261 && host_integerp (arg1, 1)
10262 && host_integerp (op2, 1))
10264 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10265 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10267 if (width != 0
10268 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10269 && (idx % width) == 0
10270 && (idx = idx / width)
10271 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10273 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10274 while (idx-- > 0 && elements)
10275 elements = TREE_CHAIN (elements);
10276 if (elements)
10277 return TREE_VALUE (elements);
10278 else
10279 return fold_convert (type, integer_zero_node);
10282 return NULL_TREE;
10284 default:
10285 return NULL_TREE;
10286 } /* switch (code) */
10289 /* Perform constant folding and related simplification of EXPR.
10290 The related simplifications include x*1 => x, x*0 => 0, etc.,
10291 and application of the associative law.
10292 NOP_EXPR conversions may be removed freely (as long as we
10293 are careful not to change the type of the overall expression).
10294 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10295 but we can constant-fold them if they have constant operands. */
10297 #ifdef ENABLE_FOLD_CHECKING
10298 # define fold(x) fold_1 (x)
10299 static tree fold_1 (tree);
10300 static
10301 #endif
10302 tree
10303 fold (tree expr)
10305 const tree t = expr;
10306 enum tree_code code = TREE_CODE (t);
10307 enum tree_code_class kind = TREE_CODE_CLASS (code);
10308 tree tem;
10310 /* Return right away if a constant. */
10311 if (kind == tcc_constant)
10312 return t;
10314 if (IS_EXPR_CODE_CLASS (kind))
10316 tree type = TREE_TYPE (t);
10317 tree op0, op1, op2;
10319 switch (TREE_CODE_LENGTH (code))
10321 case 1:
10322 op0 = TREE_OPERAND (t, 0);
10323 tem = fold_unary (code, type, op0);
10324 return tem ? tem : expr;
10325 case 2:
10326 op0 = TREE_OPERAND (t, 0);
10327 op1 = TREE_OPERAND (t, 1);
10328 tem = fold_binary (code, type, op0, op1);
10329 return tem ? tem : expr;
10330 case 3:
10331 op0 = TREE_OPERAND (t, 0);
10332 op1 = TREE_OPERAND (t, 1);
10333 op2 = TREE_OPERAND (t, 2);
10334 tem = fold_ternary (code, type, op0, op1, op2);
10335 return tem ? tem : expr;
10336 default:
10337 break;
10341 switch (code)
10343 case CONST_DECL:
10344 return fold (DECL_INITIAL (t));
10346 default:
10347 return t;
10348 } /* switch (code) */
10351 #ifdef ENABLE_FOLD_CHECKING
10352 #undef fold
10354 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10355 static void fold_check_failed (tree, tree);
10356 void print_fold_checksum (tree);
10358 /* When --enable-checking=fold, compute a digest of expr before
10359 and after actual fold call to see if fold did not accidentally
10360 change original expr. */
10362 tree
10363 fold (tree expr)
10365 tree ret;
10366 struct md5_ctx ctx;
10367 unsigned char checksum_before[16], checksum_after[16];
10368 htab_t ht;
10370 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10371 md5_init_ctx (&ctx);
10372 fold_checksum_tree (expr, &ctx, ht);
10373 md5_finish_ctx (&ctx, checksum_before);
10374 htab_empty (ht);
10376 ret = fold_1 (expr);
10378 md5_init_ctx (&ctx);
10379 fold_checksum_tree (expr, &ctx, ht);
10380 md5_finish_ctx (&ctx, checksum_after);
10381 htab_delete (ht);
10383 if (memcmp (checksum_before, checksum_after, 16))
10384 fold_check_failed (expr, ret);
10386 return ret;
10389 void
10390 print_fold_checksum (tree expr)
10392 struct md5_ctx ctx;
10393 unsigned char checksum[16], cnt;
10394 htab_t ht;
10396 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10397 md5_init_ctx (&ctx);
10398 fold_checksum_tree (expr, &ctx, ht);
10399 md5_finish_ctx (&ctx, checksum);
10400 htab_delete (ht);
10401 for (cnt = 0; cnt < 16; ++cnt)
10402 fprintf (stderr, "%02x", checksum[cnt]);
10403 putc ('\n', stderr);
10406 static void
10407 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10409 internal_error ("fold check: original tree changed by fold");
10412 static void
10413 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10415 void **slot;
10416 enum tree_code code;
10417 char buf[sizeof (struct tree_function_decl)];
10418 int i, len;
10420 recursive_label:
10422 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10423 <= sizeof (struct tree_function_decl))
10424 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
10425 if (expr == NULL)
10426 return;
10427 slot = htab_find_slot (ht, expr, INSERT);
10428 if (*slot != NULL)
10429 return;
10430 *slot = expr;
10431 code = TREE_CODE (expr);
10432 if (TREE_CODE_CLASS (code) == tcc_declaration
10433 && DECL_ASSEMBLER_NAME_SET_P (expr))
10435 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10436 memcpy (buf, expr, tree_size (expr));
10437 expr = (tree) buf;
10438 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10440 else if (TREE_CODE_CLASS (code) == tcc_type
10441 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10442 || TYPE_CACHED_VALUES_P (expr)
10443 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10445 /* Allow these fields to be modified. */
10446 memcpy (buf, expr, tree_size (expr));
10447 expr = (tree) buf;
10448 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10449 TYPE_POINTER_TO (expr) = NULL;
10450 TYPE_REFERENCE_TO (expr) = NULL;
10451 if (TYPE_CACHED_VALUES_P (expr))
10453 TYPE_CACHED_VALUES_P (expr) = 0;
10454 TYPE_CACHED_VALUES (expr) = NULL;
10457 md5_process_bytes (expr, tree_size (expr), ctx);
10458 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10459 if (TREE_CODE_CLASS (code) != tcc_type
10460 && TREE_CODE_CLASS (code) != tcc_declaration
10461 && code != TREE_LIST)
10462 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10463 switch (TREE_CODE_CLASS (code))
10465 case tcc_constant:
10466 switch (code)
10468 case STRING_CST:
10469 md5_process_bytes (TREE_STRING_POINTER (expr),
10470 TREE_STRING_LENGTH (expr), ctx);
10471 break;
10472 case COMPLEX_CST:
10473 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10474 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10475 break;
10476 case VECTOR_CST:
10477 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10478 break;
10479 default:
10480 break;
10482 break;
10483 case tcc_exceptional:
10484 switch (code)
10486 case TREE_LIST:
10487 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10488 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10489 expr = TREE_CHAIN (expr);
10490 goto recursive_label;
10491 break;
10492 case TREE_VEC:
10493 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10494 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10495 break;
10496 default:
10497 break;
10499 break;
10500 case tcc_expression:
10501 case tcc_reference:
10502 case tcc_comparison:
10503 case tcc_unary:
10504 case tcc_binary:
10505 case tcc_statement:
10506 len = TREE_CODE_LENGTH (code);
10507 for (i = 0; i < len; ++i)
10508 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10509 break;
10510 case tcc_declaration:
10511 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10512 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10513 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10514 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10515 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10516 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10517 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10518 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
10519 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10521 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
10523 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10524 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10525 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
10527 break;
10528 case tcc_type:
10529 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10530 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10531 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10532 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10533 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10534 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10535 if (INTEGRAL_TYPE_P (expr)
10536 || SCALAR_FLOAT_TYPE_P (expr))
10538 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10539 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10541 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10542 if (TREE_CODE (expr) == RECORD_TYPE
10543 || TREE_CODE (expr) == UNION_TYPE
10544 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10545 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10546 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10547 break;
10548 default:
10549 break;
10553 #endif
10555 /* Fold a unary tree expression with code CODE of type TYPE with an
10556 operand OP0. Return a folded expression if successful. Otherwise,
10557 return a tree expression with code CODE of type TYPE with an
10558 operand OP0. */
10560 tree
10561 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
10563 tree tem;
10564 #ifdef ENABLE_FOLD_CHECKING
10565 unsigned char checksum_before[16], checksum_after[16];
10566 struct md5_ctx ctx;
10567 htab_t ht;
10569 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10570 md5_init_ctx (&ctx);
10571 fold_checksum_tree (op0, &ctx, ht);
10572 md5_finish_ctx (&ctx, checksum_before);
10573 htab_empty (ht);
10574 #endif
10576 tem = fold_unary (code, type, op0);
10577 if (!tem)
10578 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
10580 #ifdef ENABLE_FOLD_CHECKING
10581 md5_init_ctx (&ctx);
10582 fold_checksum_tree (op0, &ctx, ht);
10583 md5_finish_ctx (&ctx, checksum_after);
10584 htab_delete (ht);
10586 if (memcmp (checksum_before, checksum_after, 16))
10587 fold_check_failed (op0, tem);
10588 #endif
10589 return tem;
10592 /* Fold a binary tree expression with code CODE of type TYPE with
10593 operands OP0 and OP1. Return a folded expression if successful.
10594 Otherwise, return a tree expression with code CODE of type TYPE
10595 with operands OP0 and OP1. */
10597 tree
10598 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
10599 MEM_STAT_DECL)
10601 tree tem;
10602 #ifdef ENABLE_FOLD_CHECKING
10603 unsigned char checksum_before_op0[16],
10604 checksum_before_op1[16],
10605 checksum_after_op0[16],
10606 checksum_after_op1[16];
10607 struct md5_ctx ctx;
10608 htab_t ht;
10610 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10611 md5_init_ctx (&ctx);
10612 fold_checksum_tree (op0, &ctx, ht);
10613 md5_finish_ctx (&ctx, checksum_before_op0);
10614 htab_empty (ht);
10616 md5_init_ctx (&ctx);
10617 fold_checksum_tree (op1, &ctx, ht);
10618 md5_finish_ctx (&ctx, checksum_before_op1);
10619 htab_empty (ht);
10620 #endif
10622 tem = fold_binary (code, type, op0, op1);
10623 if (!tem)
10624 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
10626 #ifdef ENABLE_FOLD_CHECKING
10627 md5_init_ctx (&ctx);
10628 fold_checksum_tree (op0, &ctx, ht);
10629 md5_finish_ctx (&ctx, checksum_after_op0);
10630 htab_empty (ht);
10632 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10633 fold_check_failed (op0, tem);
10635 md5_init_ctx (&ctx);
10636 fold_checksum_tree (op1, &ctx, ht);
10637 md5_finish_ctx (&ctx, checksum_after_op1);
10638 htab_delete (ht);
10640 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10641 fold_check_failed (op1, tem);
10642 #endif
10643 return tem;
10646 /* Fold a ternary tree expression with code CODE of type TYPE with
10647 operands OP0, OP1, and OP2. Return a folded expression if
10648 successful. Otherwise, return a tree expression with code CODE of
10649 type TYPE with operands OP0, OP1, and OP2. */
10651 tree
10652 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
10653 MEM_STAT_DECL)
10655 tree tem;
10656 #ifdef ENABLE_FOLD_CHECKING
10657 unsigned char checksum_before_op0[16],
10658 checksum_before_op1[16],
10659 checksum_before_op2[16],
10660 checksum_after_op0[16],
10661 checksum_after_op1[16],
10662 checksum_after_op2[16];
10663 struct md5_ctx ctx;
10664 htab_t ht;
10666 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10667 md5_init_ctx (&ctx);
10668 fold_checksum_tree (op0, &ctx, ht);
10669 md5_finish_ctx (&ctx, checksum_before_op0);
10670 htab_empty (ht);
10672 md5_init_ctx (&ctx);
10673 fold_checksum_tree (op1, &ctx, ht);
10674 md5_finish_ctx (&ctx, checksum_before_op1);
10675 htab_empty (ht);
10677 md5_init_ctx (&ctx);
10678 fold_checksum_tree (op2, &ctx, ht);
10679 md5_finish_ctx (&ctx, checksum_before_op2);
10680 htab_empty (ht);
10681 #endif
10683 tem = fold_ternary (code, type, op0, op1, op2);
10684 if (!tem)
10685 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
10687 #ifdef ENABLE_FOLD_CHECKING
10688 md5_init_ctx (&ctx);
10689 fold_checksum_tree (op0, &ctx, ht);
10690 md5_finish_ctx (&ctx, checksum_after_op0);
10691 htab_empty (ht);
10693 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10694 fold_check_failed (op0, tem);
10696 md5_init_ctx (&ctx);
10697 fold_checksum_tree (op1, &ctx, ht);
10698 md5_finish_ctx (&ctx, checksum_after_op1);
10699 htab_empty (ht);
10701 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10702 fold_check_failed (op1, tem);
10704 md5_init_ctx (&ctx);
10705 fold_checksum_tree (op2, &ctx, ht);
10706 md5_finish_ctx (&ctx, checksum_after_op2);
10707 htab_delete (ht);
10709 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
10710 fold_check_failed (op2, tem);
10711 #endif
10712 return tem;
10715 /* Perform constant folding and related simplification of initializer
10716 expression EXPR. These behave identically to "fold_buildN" but ignore
10717 potential run-time traps and exceptions that fold must preserve. */
10719 #define START_FOLD_INIT \
10720 int saved_signaling_nans = flag_signaling_nans;\
10721 int saved_trapping_math = flag_trapping_math;\
10722 int saved_rounding_math = flag_rounding_math;\
10723 int saved_trapv = flag_trapv;\
10724 flag_signaling_nans = 0;\
10725 flag_trapping_math = 0;\
10726 flag_rounding_math = 0;\
10727 flag_trapv = 0
10729 #define END_FOLD_INIT \
10730 flag_signaling_nans = saved_signaling_nans;\
10731 flag_trapping_math = saved_trapping_math;\
10732 flag_rounding_math = saved_rounding_math;\
10733 flag_trapv = saved_trapv
10735 tree
10736 fold_build1_initializer (enum tree_code code, tree type, tree op)
10738 tree result;
10739 START_FOLD_INIT;
10741 result = fold_build1 (code, type, op);
10743 END_FOLD_INIT;
10744 return result;
10747 tree
10748 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
10750 tree result;
10751 START_FOLD_INIT;
10753 result = fold_build2 (code, type, op0, op1);
10755 END_FOLD_INIT;
10756 return result;
10759 tree
10760 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
10761 tree op2)
10763 tree result;
10764 START_FOLD_INIT;
10766 result = fold_build3 (code, type, op0, op1, op2);
10768 END_FOLD_INIT;
10769 return result;
10772 #undef START_FOLD_INIT
10773 #undef END_FOLD_INIT
10775 /* Determine if first argument is a multiple of second argument. Return 0 if
10776 it is not, or we cannot easily determined it to be.
10778 An example of the sort of thing we care about (at this point; this routine
10779 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10780 fold cases do now) is discovering that
10782 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10784 is a multiple of
10786 SAVE_EXPR (J * 8)
10788 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10790 This code also handles discovering that
10792 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10794 is a multiple of 8 so we don't have to worry about dealing with a
10795 possible remainder.
10797 Note that we *look* inside a SAVE_EXPR only to determine how it was
10798 calculated; it is not safe for fold to do much of anything else with the
10799 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10800 at run time. For example, the latter example above *cannot* be implemented
10801 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10802 evaluation time of the original SAVE_EXPR is not necessarily the same at
10803 the time the new expression is evaluated. The only optimization of this
10804 sort that would be valid is changing
10806 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10808 divided by 8 to
10810 SAVE_EXPR (I) * SAVE_EXPR (J)
10812 (where the same SAVE_EXPR (J) is used in the original and the
10813 transformed version). */
10815 static int
10816 multiple_of_p (tree type, tree top, tree bottom)
10818 if (operand_equal_p (top, bottom, 0))
10819 return 1;
10821 if (TREE_CODE (type) != INTEGER_TYPE)
10822 return 0;
10824 switch (TREE_CODE (top))
10826 case BIT_AND_EXPR:
10827 /* Bitwise and provides a power of two multiple. If the mask is
10828 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10829 if (!integer_pow2p (bottom))
10830 return 0;
10831 /* FALLTHRU */
10833 case MULT_EXPR:
10834 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10835 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10837 case PLUS_EXPR:
10838 case MINUS_EXPR:
10839 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10840 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10842 case LSHIFT_EXPR:
10843 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10845 tree op1, t1;
10847 op1 = TREE_OPERAND (top, 1);
10848 /* const_binop may not detect overflow correctly,
10849 so check for it explicitly here. */
10850 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10851 > TREE_INT_CST_LOW (op1)
10852 && TREE_INT_CST_HIGH (op1) == 0
10853 && 0 != (t1 = fold_convert (type,
10854 const_binop (LSHIFT_EXPR,
10855 size_one_node,
10856 op1, 0)))
10857 && ! TREE_OVERFLOW (t1))
10858 return multiple_of_p (type, t1, bottom);
10860 return 0;
10862 case NOP_EXPR:
10863 /* Can't handle conversions from non-integral or wider integral type. */
10864 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10865 || (TYPE_PRECISION (type)
10866 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10867 return 0;
10869 /* .. fall through ... */
10871 case SAVE_EXPR:
10872 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10874 case INTEGER_CST:
10875 if (TREE_CODE (bottom) != INTEGER_CST
10876 || (TYPE_UNSIGNED (type)
10877 && (tree_int_cst_sgn (top) < 0
10878 || tree_int_cst_sgn (bottom) < 0)))
10879 return 0;
10880 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10881 top, bottom, 0));
10883 default:
10884 return 0;
10888 /* Return true if `t' is known to be non-negative. */
10891 tree_expr_nonnegative_p (tree t)
10893 if (TYPE_UNSIGNED (TREE_TYPE (t)))
10894 return 1;
10896 switch (TREE_CODE (t))
10898 case ABS_EXPR:
10899 /* We can't return 1 if flag_wrapv is set because
10900 ABS_EXPR<INT_MIN> = INT_MIN. */
10901 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
10902 return 1;
10903 break;
10905 case INTEGER_CST:
10906 return tree_int_cst_sgn (t) >= 0;
10908 case REAL_CST:
10909 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10911 case PLUS_EXPR:
10912 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10913 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10914 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10916 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10917 both unsigned and at least 2 bits shorter than the result. */
10918 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10919 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10920 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10922 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10923 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10924 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10925 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10927 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10928 TYPE_PRECISION (inner2)) + 1;
10929 return prec < TYPE_PRECISION (TREE_TYPE (t));
10932 break;
10934 case MULT_EXPR:
10935 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10937 /* x * x for floating point x is always non-negative. */
10938 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10939 return 1;
10940 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10941 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10944 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10945 both unsigned and their total bits is shorter than the result. */
10946 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10947 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10948 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10950 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10951 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10952 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10953 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10954 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10955 < TYPE_PRECISION (TREE_TYPE (t));
10957 return 0;
10959 case BIT_AND_EXPR:
10960 case MAX_EXPR:
10961 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10962 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10964 case BIT_IOR_EXPR:
10965 case BIT_XOR_EXPR:
10966 case MIN_EXPR:
10967 case RDIV_EXPR:
10968 case TRUNC_DIV_EXPR:
10969 case CEIL_DIV_EXPR:
10970 case FLOOR_DIV_EXPR:
10971 case ROUND_DIV_EXPR:
10972 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10973 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10975 case TRUNC_MOD_EXPR:
10976 case CEIL_MOD_EXPR:
10977 case FLOOR_MOD_EXPR:
10978 case ROUND_MOD_EXPR:
10979 case SAVE_EXPR:
10980 case NON_LVALUE_EXPR:
10981 case FLOAT_EXPR:
10982 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10984 case COMPOUND_EXPR:
10985 case MODIFY_EXPR:
10986 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10988 case BIND_EXPR:
10989 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10991 case COND_EXPR:
10992 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10993 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10995 case NOP_EXPR:
10997 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10998 tree outer_type = TREE_TYPE (t);
11000 if (TREE_CODE (outer_type) == REAL_TYPE)
11002 if (TREE_CODE (inner_type) == REAL_TYPE)
11003 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11004 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11006 if (TYPE_UNSIGNED (inner_type))
11007 return 1;
11008 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11011 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
11013 if (TREE_CODE (inner_type) == REAL_TYPE)
11014 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
11015 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11016 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
11017 && TYPE_UNSIGNED (inner_type);
11020 break;
11022 case TARGET_EXPR:
11024 tree temp = TARGET_EXPR_SLOT (t);
11025 t = TARGET_EXPR_INITIAL (t);
11027 /* If the initializer is non-void, then it's a normal expression
11028 that will be assigned to the slot. */
11029 if (!VOID_TYPE_P (t))
11030 return tree_expr_nonnegative_p (t);
11032 /* Otherwise, the initializer sets the slot in some way. One common
11033 way is an assignment statement at the end of the initializer. */
11034 while (1)
11036 if (TREE_CODE (t) == BIND_EXPR)
11037 t = expr_last (BIND_EXPR_BODY (t));
11038 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
11039 || TREE_CODE (t) == TRY_CATCH_EXPR)
11040 t = expr_last (TREE_OPERAND (t, 0));
11041 else if (TREE_CODE (t) == STATEMENT_LIST)
11042 t = expr_last (t);
11043 else
11044 break;
11046 if (TREE_CODE (t) == MODIFY_EXPR
11047 && TREE_OPERAND (t, 0) == temp)
11048 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11050 return 0;
11053 case CALL_EXPR:
11055 tree fndecl = get_callee_fndecl (t);
11056 tree arglist = TREE_OPERAND (t, 1);
11057 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
11058 switch (DECL_FUNCTION_CODE (fndecl))
11060 CASE_FLT_FN (BUILT_IN_ACOS):
11061 CASE_FLT_FN (BUILT_IN_ACOSH):
11062 CASE_FLT_FN (BUILT_IN_CABS):
11063 CASE_FLT_FN (BUILT_IN_COSH):
11064 CASE_FLT_FN (BUILT_IN_ERFC):
11065 CASE_FLT_FN (BUILT_IN_EXP):
11066 CASE_FLT_FN (BUILT_IN_EXP10):
11067 CASE_FLT_FN (BUILT_IN_EXP2):
11068 CASE_FLT_FN (BUILT_IN_FABS):
11069 CASE_FLT_FN (BUILT_IN_FDIM):
11070 CASE_FLT_FN (BUILT_IN_HYPOT):
11071 CASE_FLT_FN (BUILT_IN_POW10):
11072 CASE_INT_FN (BUILT_IN_FFS):
11073 CASE_INT_FN (BUILT_IN_PARITY):
11074 CASE_INT_FN (BUILT_IN_POPCOUNT):
11075 /* Always true. */
11076 return 1;
11078 CASE_FLT_FN (BUILT_IN_SQRT):
11079 /* sqrt(-0.0) is -0.0. */
11080 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
11081 return 1;
11082 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11084 CASE_FLT_FN (BUILT_IN_ASINH):
11085 CASE_FLT_FN (BUILT_IN_ATAN):
11086 CASE_FLT_FN (BUILT_IN_ATANH):
11087 CASE_FLT_FN (BUILT_IN_CBRT):
11088 CASE_FLT_FN (BUILT_IN_CEIL):
11089 CASE_FLT_FN (BUILT_IN_ERF):
11090 CASE_FLT_FN (BUILT_IN_EXPM1):
11091 CASE_FLT_FN (BUILT_IN_FLOOR):
11092 CASE_FLT_FN (BUILT_IN_FMOD):
11093 CASE_FLT_FN (BUILT_IN_FREXP):
11094 CASE_FLT_FN (BUILT_IN_LCEIL):
11095 CASE_FLT_FN (BUILT_IN_LDEXP):
11096 CASE_FLT_FN (BUILT_IN_LFLOOR):
11097 CASE_FLT_FN (BUILT_IN_LLCEIL):
11098 CASE_FLT_FN (BUILT_IN_LLFLOOR):
11099 CASE_FLT_FN (BUILT_IN_LLRINT):
11100 CASE_FLT_FN (BUILT_IN_LLROUND):
11101 CASE_FLT_FN (BUILT_IN_LRINT):
11102 CASE_FLT_FN (BUILT_IN_LROUND):
11103 CASE_FLT_FN (BUILT_IN_MODF):
11104 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11105 CASE_FLT_FN (BUILT_IN_POW):
11106 CASE_FLT_FN (BUILT_IN_RINT):
11107 CASE_FLT_FN (BUILT_IN_ROUND):
11108 CASE_FLT_FN (BUILT_IN_SIGNBIT):
11109 CASE_FLT_FN (BUILT_IN_SINH):
11110 CASE_FLT_FN (BUILT_IN_TANH):
11111 CASE_FLT_FN (BUILT_IN_TRUNC):
11112 /* True if the 1st argument is nonnegative. */
11113 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11115 CASE_FLT_FN (BUILT_IN_FMAX):
11116 /* True if the 1st OR 2nd arguments are nonnegative. */
11117 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11118 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11120 CASE_FLT_FN (BUILT_IN_FMIN):
11121 /* True if the 1st AND 2nd arguments are nonnegative. */
11122 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11123 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11125 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11126 /* True if the 2nd argument is nonnegative. */
11127 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11129 default:
11130 break;
11134 /* ... fall through ... */
11136 default:
11137 if (truth_value_p (TREE_CODE (t)))
11138 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11139 return 1;
11142 /* We don't know sign of `t', so be conservative and return false. */
11143 return 0;
11146 /* Return true when T is an address and is known to be nonzero.
11147 For floating point we further ensure that T is not denormal.
11148 Similar logic is present in nonzero_address in rtlanal.h. */
11150 bool
11151 tree_expr_nonzero_p (tree t)
11153 tree type = TREE_TYPE (t);
11155 /* Doing something useful for floating point would need more work. */
11156 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11157 return false;
11159 switch (TREE_CODE (t))
11161 case ABS_EXPR:
11162 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11164 case INTEGER_CST:
11165 /* We used to test for !integer_zerop here. This does not work correctly
11166 if TREE_CONSTANT_OVERFLOW (t). */
11167 return (TREE_INT_CST_LOW (t) != 0
11168 || TREE_INT_CST_HIGH (t) != 0);
11170 case PLUS_EXPR:
11171 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11173 /* With the presence of negative values it is hard
11174 to say something. */
11175 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11176 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11177 return false;
11178 /* One of operands must be positive and the other non-negative. */
11179 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11180 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11182 break;
11184 case MULT_EXPR:
11185 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11187 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11188 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11190 break;
11192 case NOP_EXPR:
11194 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11195 tree outer_type = TREE_TYPE (t);
11197 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11198 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11200 break;
11202 case ADDR_EXPR:
11204 tree base = get_base_address (TREE_OPERAND (t, 0));
11206 if (!base)
11207 return false;
11209 /* Weak declarations may link to NULL. */
11210 if (VAR_OR_FUNCTION_DECL_P (base))
11211 return !DECL_WEAK (base);
11213 /* Constants are never weak. */
11214 if (CONSTANT_CLASS_P (base))
11215 return true;
11217 return false;
11220 case COND_EXPR:
11221 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11222 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11224 case MIN_EXPR:
11225 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11226 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11228 case MAX_EXPR:
11229 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11231 /* When both operands are nonzero, then MAX must be too. */
11232 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11233 return true;
11235 /* MAX where operand 0 is positive is positive. */
11236 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11238 /* MAX where operand 1 is positive is positive. */
11239 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11240 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11241 return true;
11242 break;
11244 case COMPOUND_EXPR:
11245 case MODIFY_EXPR:
11246 case BIND_EXPR:
11247 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11249 case SAVE_EXPR:
11250 case NON_LVALUE_EXPR:
11251 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11253 case BIT_IOR_EXPR:
11254 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11255 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11257 case CALL_EXPR:
11258 return alloca_call_p (t);
11260 default:
11261 break;
11263 return false;
11266 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11267 attempt to fold the expression to a constant without modifying TYPE,
11268 OP0 or OP1.
11270 If the expression could be simplified to a constant, then return
11271 the constant. If the expression would not be simplified to a
11272 constant, then return NULL_TREE. */
11274 tree
11275 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11277 tree tem = fold_binary (code, type, op0, op1);
11278 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11281 /* Given the components of a unary expression CODE, TYPE and OP0,
11282 attempt to fold the expression to a constant without modifying
11283 TYPE or OP0.
11285 If the expression could be simplified to a constant, then return
11286 the constant. If the expression would not be simplified to a
11287 constant, then return NULL_TREE. */
11289 tree
11290 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11292 tree tem = fold_unary (code, type, op0);
11293 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11296 /* If EXP represents referencing an element in a constant string
11297 (either via pointer arithmetic or array indexing), return the
11298 tree representing the value accessed, otherwise return NULL. */
11300 tree
11301 fold_read_from_constant_string (tree exp)
11303 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11305 tree exp1 = TREE_OPERAND (exp, 0);
11306 tree index;
11307 tree string;
11309 if (TREE_CODE (exp) == INDIRECT_REF)
11310 string = string_constant (exp1, &index);
11311 else
11313 tree low_bound = array_ref_low_bound (exp);
11314 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11316 /* Optimize the special-case of a zero lower bound.
11318 We convert the low_bound to sizetype to avoid some problems
11319 with constant folding. (E.g. suppose the lower bound is 1,
11320 and its mode is QI. Without the conversion,l (ARRAY
11321 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11322 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11323 if (! integer_zerop (low_bound))
11324 index = size_diffop (index, fold_convert (sizetype, low_bound));
11326 string = exp1;
11329 if (string
11330 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11331 && TREE_CODE (string) == STRING_CST
11332 && TREE_CODE (index) == INTEGER_CST
11333 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11334 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11335 == MODE_INT)
11336 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11337 return fold_convert (TREE_TYPE (exp),
11338 build_int_cst (NULL_TREE,
11339 (TREE_STRING_POINTER (string)
11340 [TREE_INT_CST_LOW (index)])));
11342 return NULL;
11345 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11346 an integer constant or real constant.
11348 TYPE is the type of the result. */
11350 static tree
11351 fold_negate_const (tree arg0, tree type)
11353 tree t = NULL_TREE;
11355 switch (TREE_CODE (arg0))
11357 case INTEGER_CST:
11359 unsigned HOST_WIDE_INT low;
11360 HOST_WIDE_INT high;
11361 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11362 TREE_INT_CST_HIGH (arg0),
11363 &low, &high);
11364 t = build_int_cst_wide (type, low, high);
11365 t = force_fit_type (t, 1,
11366 (overflow | TREE_OVERFLOW (arg0))
11367 && !TYPE_UNSIGNED (type),
11368 TREE_CONSTANT_OVERFLOW (arg0));
11369 break;
11372 case REAL_CST:
11373 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11374 break;
11376 default:
11377 gcc_unreachable ();
11380 return t;
11383 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11384 an integer constant or real constant.
11386 TYPE is the type of the result. */
11388 tree
11389 fold_abs_const (tree arg0, tree type)
11391 tree t = NULL_TREE;
11393 switch (TREE_CODE (arg0))
11395 case INTEGER_CST:
11396 /* If the value is unsigned, then the absolute value is
11397 the same as the ordinary value. */
11398 if (TYPE_UNSIGNED (type))
11399 t = arg0;
11400 /* Similarly, if the value is non-negative. */
11401 else if (INT_CST_LT (integer_minus_one_node, arg0))
11402 t = arg0;
11403 /* If the value is negative, then the absolute value is
11404 its negation. */
11405 else
11407 unsigned HOST_WIDE_INT low;
11408 HOST_WIDE_INT high;
11409 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11410 TREE_INT_CST_HIGH (arg0),
11411 &low, &high);
11412 t = build_int_cst_wide (type, low, high);
11413 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11414 TREE_CONSTANT_OVERFLOW (arg0));
11416 break;
11418 case REAL_CST:
11419 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11420 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11421 else
11422 t = arg0;
11423 break;
11425 default:
11426 gcc_unreachable ();
11429 return t;
11432 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11433 constant. TYPE is the type of the result. */
11435 static tree
11436 fold_not_const (tree arg0, tree type)
11438 tree t = NULL_TREE;
11440 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11442 t = build_int_cst_wide (type,
11443 ~ TREE_INT_CST_LOW (arg0),
11444 ~ TREE_INT_CST_HIGH (arg0));
11445 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11446 TREE_CONSTANT_OVERFLOW (arg0));
11448 return t;
11451 /* Given CODE, a relational operator, the target type, TYPE and two
11452 constant operands OP0 and OP1, return the result of the
11453 relational operation. If the result is not a compile time
11454 constant, then return NULL_TREE. */
11456 static tree
11457 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11459 int result, invert;
11461 /* From here on, the only cases we handle are when the result is
11462 known to be a constant. */
11464 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11466 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11467 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11469 /* Handle the cases where either operand is a NaN. */
11470 if (real_isnan (c0) || real_isnan (c1))
11472 switch (code)
11474 case EQ_EXPR:
11475 case ORDERED_EXPR:
11476 result = 0;
11477 break;
11479 case NE_EXPR:
11480 case UNORDERED_EXPR:
11481 case UNLT_EXPR:
11482 case UNLE_EXPR:
11483 case UNGT_EXPR:
11484 case UNGE_EXPR:
11485 case UNEQ_EXPR:
11486 result = 1;
11487 break;
11489 case LT_EXPR:
11490 case LE_EXPR:
11491 case GT_EXPR:
11492 case GE_EXPR:
11493 case LTGT_EXPR:
11494 if (flag_trapping_math)
11495 return NULL_TREE;
11496 result = 0;
11497 break;
11499 default:
11500 gcc_unreachable ();
11503 return constant_boolean_node (result, type);
11506 return constant_boolean_node (real_compare (code, c0, c1), type);
11509 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11511 To compute GT, swap the arguments and do LT.
11512 To compute GE, do LT and invert the result.
11513 To compute LE, swap the arguments, do LT and invert the result.
11514 To compute NE, do EQ and invert the result.
11516 Therefore, the code below must handle only EQ and LT. */
11518 if (code == LE_EXPR || code == GT_EXPR)
11520 tree tem = op0;
11521 op0 = op1;
11522 op1 = tem;
11523 code = swap_tree_comparison (code);
11526 /* Note that it is safe to invert for real values here because we
11527 have already handled the one case that it matters. */
11529 invert = 0;
11530 if (code == NE_EXPR || code == GE_EXPR)
11532 invert = 1;
11533 code = invert_tree_comparison (code, false);
11536 /* Compute a result for LT or EQ if args permit;
11537 Otherwise return T. */
11538 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11540 if (code == EQ_EXPR)
11541 result = tree_int_cst_equal (op0, op1);
11542 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11543 result = INT_CST_LT_UNSIGNED (op0, op1);
11544 else
11545 result = INT_CST_LT (op0, op1);
11547 else
11548 return NULL_TREE;
11550 if (invert)
11551 result ^= 1;
11552 return constant_boolean_node (result, type);
11555 /* Build an expression for the a clean point containing EXPR with type TYPE.
11556 Don't build a cleanup point expression for EXPR which don't have side
11557 effects. */
11559 tree
11560 fold_build_cleanup_point_expr (tree type, tree expr)
11562 /* If the expression does not have side effects then we don't have to wrap
11563 it with a cleanup point expression. */
11564 if (!TREE_SIDE_EFFECTS (expr))
11565 return expr;
11567 /* If the expression is a return, check to see if the expression inside the
11568 return has no side effects or the right hand side of the modify expression
11569 inside the return. If either don't have side effects set we don't need to
11570 wrap the expression in a cleanup point expression. Note we don't check the
11571 left hand side of the modify because it should always be a return decl. */
11572 if (TREE_CODE (expr) == RETURN_EXPR)
11574 tree op = TREE_OPERAND (expr, 0);
11575 if (!op || !TREE_SIDE_EFFECTS (op))
11576 return expr;
11577 op = TREE_OPERAND (op, 1);
11578 if (!TREE_SIDE_EFFECTS (op))
11579 return expr;
11582 return build1 (CLEANUP_POINT_EXPR, type, expr);
11585 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11586 avoid confusing the gimplify process. */
11588 tree
11589 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11591 /* The size of the object is not relevant when talking about its address. */
11592 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11593 t = TREE_OPERAND (t, 0);
11595 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11596 if (TREE_CODE (t) == INDIRECT_REF
11597 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11599 t = TREE_OPERAND (t, 0);
11600 if (TREE_TYPE (t) != ptrtype)
11601 t = build1 (NOP_EXPR, ptrtype, t);
11603 else
11605 tree base = t;
11607 while (handled_component_p (base))
11608 base = TREE_OPERAND (base, 0);
11609 if (DECL_P (base))
11610 TREE_ADDRESSABLE (base) = 1;
11612 t = build1 (ADDR_EXPR, ptrtype, t);
11615 return t;
11618 tree
11619 build_fold_addr_expr (tree t)
11621 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11624 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11625 of an indirection through OP0, or NULL_TREE if no simplification is
11626 possible. */
11628 tree
11629 fold_indirect_ref_1 (tree type, tree op0)
11631 tree sub = op0;
11632 tree subtype;
11634 STRIP_NOPS (sub);
11635 subtype = TREE_TYPE (sub);
11636 if (!POINTER_TYPE_P (subtype))
11637 return NULL_TREE;
11639 if (TREE_CODE (sub) == ADDR_EXPR)
11641 tree op = TREE_OPERAND (sub, 0);
11642 tree optype = TREE_TYPE (op);
11643 /* *&p => p; make sure to handle *&"str"[cst] here. */
11644 if (type == optype)
11646 tree fop = fold_read_from_constant_string (op);
11647 if (fop)
11648 return fop;
11649 else
11650 return op;
11652 /* *(foo *)&fooarray => fooarray[0] */
11653 else if (TREE_CODE (optype) == ARRAY_TYPE
11654 && type == TREE_TYPE (optype))
11656 tree type_domain = TYPE_DOMAIN (optype);
11657 tree min_val = size_zero_node;
11658 if (type_domain && TYPE_MIN_VALUE (type_domain))
11659 min_val = TYPE_MIN_VALUE (type_domain);
11660 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11664 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11665 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11666 && type == TREE_TYPE (TREE_TYPE (subtype)))
11668 tree type_domain;
11669 tree min_val = size_zero_node;
11670 sub = build_fold_indirect_ref (sub);
11671 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11672 if (type_domain && TYPE_MIN_VALUE (type_domain))
11673 min_val = TYPE_MIN_VALUE (type_domain);
11674 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11677 return NULL_TREE;
11680 /* Builds an expression for an indirection through T, simplifying some
11681 cases. */
11683 tree
11684 build_fold_indirect_ref (tree t)
11686 tree type = TREE_TYPE (TREE_TYPE (t));
11687 tree sub = fold_indirect_ref_1 (type, t);
11689 if (sub)
11690 return sub;
11691 else
11692 return build1 (INDIRECT_REF, type, t);
11695 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11697 tree
11698 fold_indirect_ref (tree t)
11700 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11702 if (sub)
11703 return sub;
11704 else
11705 return t;
11708 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11709 whose result is ignored. The type of the returned tree need not be
11710 the same as the original expression. */
11712 tree
11713 fold_ignored_result (tree t)
11715 if (!TREE_SIDE_EFFECTS (t))
11716 return integer_zero_node;
11718 for (;;)
11719 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11721 case tcc_unary:
11722 t = TREE_OPERAND (t, 0);
11723 break;
11725 case tcc_binary:
11726 case tcc_comparison:
11727 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11728 t = TREE_OPERAND (t, 0);
11729 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11730 t = TREE_OPERAND (t, 1);
11731 else
11732 return t;
11733 break;
11735 case tcc_expression:
11736 switch (TREE_CODE (t))
11738 case COMPOUND_EXPR:
11739 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11740 return t;
11741 t = TREE_OPERAND (t, 0);
11742 break;
11744 case COND_EXPR:
11745 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11746 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11747 return t;
11748 t = TREE_OPERAND (t, 0);
11749 break;
11751 default:
11752 return t;
11754 break;
11756 default:
11757 return t;
11761 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11762 This can only be applied to objects of a sizetype. */
11764 tree
11765 round_up (tree value, int divisor)
11767 tree div = NULL_TREE;
11769 gcc_assert (divisor > 0);
11770 if (divisor == 1)
11771 return value;
11773 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11774 have to do anything. Only do this when we are not given a const,
11775 because in that case, this check is more expensive than just
11776 doing it. */
11777 if (TREE_CODE (value) != INTEGER_CST)
11779 div = build_int_cst (TREE_TYPE (value), divisor);
11781 if (multiple_of_p (TREE_TYPE (value), value, div))
11782 return value;
11785 /* If divisor is a power of two, simplify this to bit manipulation. */
11786 if (divisor == (divisor & -divisor))
11788 tree t;
11790 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11791 value = size_binop (PLUS_EXPR, value, t);
11792 t = build_int_cst (TREE_TYPE (value), -divisor);
11793 value = size_binop (BIT_AND_EXPR, value, t);
11795 else
11797 if (!div)
11798 div = build_int_cst (TREE_TYPE (value), divisor);
11799 value = size_binop (CEIL_DIV_EXPR, value, div);
11800 value = size_binop (MULT_EXPR, value, div);
11803 return value;
11806 /* Likewise, but round down. */
11808 tree
11809 round_down (tree value, int divisor)
11811 tree div = NULL_TREE;
11813 gcc_assert (divisor > 0);
11814 if (divisor == 1)
11815 return value;
11817 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11818 have to do anything. Only do this when we are not given a const,
11819 because in that case, this check is more expensive than just
11820 doing it. */
11821 if (TREE_CODE (value) != INTEGER_CST)
11823 div = build_int_cst (TREE_TYPE (value), divisor);
11825 if (multiple_of_p (TREE_TYPE (value), value, div))
11826 return value;
11829 /* If divisor is a power of two, simplify this to bit manipulation. */
11830 if (divisor == (divisor & -divisor))
11832 tree t;
11834 t = build_int_cst (TREE_TYPE (value), -divisor);
11835 value = size_binop (BIT_AND_EXPR, value, t);
11837 else
11839 if (!div)
11840 div = build_int_cst (TREE_TYPE (value), divisor);
11841 value = size_binop (FLOOR_DIV_EXPR, value, div);
11842 value = size_binop (MULT_EXPR, value, div);
11845 return value;
11848 /* Returns the pointer to the base of the object addressed by EXP and
11849 extracts the information about the offset of the access, storing it
11850 to PBITPOS and POFFSET. */
11852 static tree
11853 split_address_to_core_and_offset (tree exp,
11854 HOST_WIDE_INT *pbitpos, tree *poffset)
11856 tree core;
11857 enum machine_mode mode;
11858 int unsignedp, volatilep;
11859 HOST_WIDE_INT bitsize;
11861 if (TREE_CODE (exp) == ADDR_EXPR)
11863 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11864 poffset, &mode, &unsignedp, &volatilep,
11865 false);
11866 core = build_fold_addr_expr (core);
11868 else
11870 core = exp;
11871 *pbitpos = 0;
11872 *poffset = NULL_TREE;
11875 return core;
11878 /* Returns true if addresses of E1 and E2 differ by a constant, false
11879 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11881 bool
11882 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11884 tree core1, core2;
11885 HOST_WIDE_INT bitpos1, bitpos2;
11886 tree toffset1, toffset2, tdiff, type;
11888 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11889 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11891 if (bitpos1 % BITS_PER_UNIT != 0
11892 || bitpos2 % BITS_PER_UNIT != 0
11893 || !operand_equal_p (core1, core2, 0))
11894 return false;
11896 if (toffset1 && toffset2)
11898 type = TREE_TYPE (toffset1);
11899 if (type != TREE_TYPE (toffset2))
11900 toffset2 = fold_convert (type, toffset2);
11902 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11903 if (!cst_and_fits_in_hwi (tdiff))
11904 return false;
11906 *diff = int_cst_value (tdiff);
11908 else if (toffset1 || toffset2)
11910 /* If only one of the offsets is non-constant, the difference cannot
11911 be a constant. */
11912 return false;
11914 else
11915 *diff = 0;
11917 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11918 return true;
11921 /* Simplify the floating point expression EXP when the sign of the
11922 result is not significant. Return NULL_TREE if no simplification
11923 is possible. */
11925 tree
11926 fold_strip_sign_ops (tree exp)
11928 tree arg0, arg1;
11930 switch (TREE_CODE (exp))
11932 case ABS_EXPR:
11933 case NEGATE_EXPR:
11934 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11935 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11937 case MULT_EXPR:
11938 case RDIV_EXPR:
11939 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11940 return NULL_TREE;
11941 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11942 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11943 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11944 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11945 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11946 arg1 ? arg1 : TREE_OPERAND (exp, 1));
11947 break;
11949 default:
11950 break;
11952 return NULL_TREE;