* configure.ac: Enable checking assembler dwarf2 support for bfin
[official-gcc/alias-decl.git] / gcc / fold-const.c
blob6f829adfa7714a6772cc49a549c14ece905a5460
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
106 tree *, tree *);
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
114 tree);
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
124 tree, tree,
125 tree, tree, int);
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
128 tree, tree, tree);
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
139 addition.
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
143 sign. */
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
151 #define LOWPART(x) \
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
161 static void
162 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
164 words[0] = LOWPART (low);
165 words[1] = HIGHPART (low);
166 words[2] = LOWPART (hi);
167 words[3] = HIGHPART (hi);
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
174 static void
175 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
176 HOST_WIDE_INT *hi)
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
197 tree
198 force_fit_type (tree t, int overflowable,
199 bool overflowed, bool overflowed_const)
201 unsigned HOST_WIDE_INT low;
202 HOST_WIDE_INT high;
203 unsigned int prec;
204 int sign_extended_type;
206 gcc_assert (TREE_CODE (t) == INTEGER_CST);
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
211 if (POINTER_TYPE_P (TREE_TYPE (t))
212 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
213 prec = POINTER_SIZE;
214 else
215 prec = TYPE_PRECISION (TREE_TYPE (t));
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
218 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 else
229 high = 0;
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 low &= ~((HOST_WIDE_INT) (-1) << prec);
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (high & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)low < 0)
248 high = -1;
250 else
252 /* Sign extend bottom half? */
253 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
255 high = -1;
256 low |= (HOST_WIDE_INT)(-1) << prec;
260 /* If the value changed, return a new node. */
261 if (overflowed || overflowed_const
262 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
264 t = build_int_cst_wide (TREE_TYPE (t), low, high);
266 if (overflowed
267 || overflowable < 0
268 || (overflowable > 0 && sign_extended_type))
270 t = copy_node (t);
271 TREE_OVERFLOW (t) = 1;
272 TREE_CONSTANT_OVERFLOW (t) = 1;
274 else if (overflowed_const)
276 t = copy_node (t);
277 TREE_CONSTANT_OVERFLOW (t) = 1;
281 return t;
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
291 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
292 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
294 unsigned HOST_WIDE_INT l;
295 HOST_WIDE_INT h;
297 l = l1 + l2;
298 h = h1 + h2 + (l < l1);
300 *lv = l;
301 *hv = h;
302 return OVERFLOW_SUM_SIGN (h1, h2, h);
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
314 if (l1 == 0)
316 *lv = 0;
317 *hv = - h1;
318 return (*hv & h1) < 0;
320 else
322 *lv = -l1;
323 *hv = ~h1;
324 return 0;
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
335 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
336 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
337 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
339 HOST_WIDE_INT arg1[4];
340 HOST_WIDE_INT arg2[4];
341 HOST_WIDE_INT prod[4 * 2];
342 unsigned HOST_WIDE_INT carry;
343 int i, j, k;
344 unsigned HOST_WIDE_INT toplow, neglow;
345 HOST_WIDE_INT tophigh, neghigh;
347 encode (arg1, l1, h1);
348 encode (arg2, l2, h2);
350 memset (prod, 0, sizeof prod);
352 for (i = 0; i < 4; i++)
354 carry = 0;
355 for (j = 0; j < 4; j++)
357 k = i + j;
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry += arg1[i] * arg2[j];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
361 carry += prod[k];
362 prod[k] = LOWPART (carry);
363 carry = HIGHPART (carry);
365 prod[i + 4] = carry;
368 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod + 4, &toplow, &tophigh);
373 if (h1 < 0)
375 neg_double (l2, h2, &neglow, &neghigh);
376 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
378 if (h2 < 0)
380 neg_double (l1, h1, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
392 void
393 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
394 HOST_WIDE_INT count, unsigned int prec,
395 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
397 unsigned HOST_WIDE_INT signmask;
399 if (count < 0)
401 rshift_double (l1, h1, -count, prec, lv, hv, arith);
402 return;
405 if (SHIFT_COUNT_TRUNCATED)
406 count %= prec;
408 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
412 *hv = 0;
413 *lv = 0;
415 else if (count >= HOST_BITS_PER_WIDE_INT)
417 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
418 *lv = 0;
420 else
422 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
423 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
424 *lv = l1 << count;
427 /* Sign extend all bits that are beyond the precision. */
429 signmask = -((prec > HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT) *hv
431 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
432 : (*lv >> (prec - 1))) & 1);
434 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
436 else if (prec >= HOST_BITS_PER_WIDE_INT)
438 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
439 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
441 else
443 *hv = signmask;
444 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
445 *lv |= signmask << prec;
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
454 void
455 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
456 HOST_WIDE_INT count, unsigned int prec,
457 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
458 int arith)
460 unsigned HOST_WIDE_INT signmask;
462 signmask = (arith
463 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
464 : 0);
466 if (SHIFT_COUNT_TRUNCATED)
467 count %= prec;
469 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
473 *hv = 0;
474 *lv = 0;
476 else if (count >= HOST_BITS_PER_WIDE_INT)
478 *hv = 0;
479 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
481 else
483 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
484 *lv = ((l1 >> count)
485 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
488 /* Zero / sign extend all bits that are beyond the precision. */
490 if (count >= (HOST_WIDE_INT)prec)
492 *hv = signmask;
493 *lv = signmask;
495 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
497 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
499 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
500 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
502 else
504 *hv = signmask;
505 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
506 *lv |= signmask << (prec - count);
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
515 void
516 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
517 HOST_WIDE_INT count, unsigned int prec,
518 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
520 unsigned HOST_WIDE_INT s1l, s2l;
521 HOST_WIDE_INT s1h, s2h;
523 count %= prec;
524 if (count < 0)
525 count += prec;
527 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
528 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
529 *lv = s1l | s2l;
530 *hv = s1h | s2h;
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
537 void
538 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
545 count %= prec;
546 if (count < 0)
547 count += prec;
549 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551 *lv = s1l | s2l;
552 *hv = s1h | s2h;
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
559 or EXACT_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
565 div_and_round_double (enum tree_code code, int uns,
566 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig,
568 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig,
570 unsigned HOST_WIDE_INT *lquo,
571 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
572 HOST_WIDE_INT *hrem)
574 int quo_neg = 0;
575 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den[4], quo[4];
577 int i, j;
578 unsigned HOST_WIDE_INT work;
579 unsigned HOST_WIDE_INT carry = 0;
580 unsigned HOST_WIDE_INT lnum = lnum_orig;
581 HOST_WIDE_INT hnum = hnum_orig;
582 unsigned HOST_WIDE_INT lden = lden_orig;
583 HOST_WIDE_INT hden = hden_orig;
584 int overflow = 0;
586 if (hden == 0 && lden == 0)
587 overflow = 1, lden = 1;
589 /* Calculate quotient sign and convert operands to unsigned. */
590 if (!uns)
592 if (hnum < 0)
594 quo_neg = ~ quo_neg;
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum, hnum, &lnum, &hnum)
597 && ((HOST_WIDE_INT) lden & hden) == -1)
598 overflow = 1;
600 if (hden < 0)
602 quo_neg = ~ quo_neg;
603 neg_double (lden, hden, &lden, &hden);
607 if (hnum == 0 && hden == 0)
608 { /* single precision */
609 *hquo = *hrem = 0;
610 /* This unsigned division rounds toward zero. */
611 *lquo = lnum / lden;
612 goto finish_up;
615 if (hnum == 0)
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
618 *hquo = *lquo = 0;
619 *hrem = hnum;
620 *lrem = lnum;
621 goto finish_up;
624 memset (quo, 0, sizeof quo);
626 memset (num, 0, sizeof num); /* to zero 9th element */
627 memset (den, 0, sizeof den);
629 encode (num, lnum, hnum);
630 encode (den, lden, hden);
632 /* Special code for when the divisor < BASE. */
633 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
635 /* hnum != 0 already checked. */
636 for (i = 4 - 1; i >= 0; i--)
638 work = num[i] + carry * BASE;
639 quo[i] = work / lden;
640 carry = work % lden;
643 else
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig, den_hi_sig;
648 unsigned HOST_WIDE_INT quo_est, scale;
650 /* Find the highest nonzero divisor digit. */
651 for (i = 4 - 1;; i--)
652 if (den[i] != 0)
654 den_hi_sig = i;
655 break;
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
661 scale = BASE / (den[den_hi_sig] + 1);
662 if (scale > 1)
663 { /* scale divisor and dividend */
664 carry = 0;
665 for (i = 0; i <= 4 - 1; i++)
667 work = (num[i] * scale) + carry;
668 num[i] = LOWPART (work);
669 carry = HIGHPART (work);
672 num[4] = carry;
673 carry = 0;
674 for (i = 0; i <= 4 - 1; i++)
676 work = (den[i] * scale) + carry;
677 den[i] = LOWPART (work);
678 carry = HIGHPART (work);
679 if (den[i] != 0) den_hi_sig = i;
683 num_hi_sig = 4;
685 /* Main loop */
686 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp;
693 num_hi_sig = i + den_hi_sig + 1;
694 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
695 if (num[num_hi_sig] != den[den_hi_sig])
696 quo_est = work / den[den_hi_sig];
697 else
698 quo_est = BASE - 1;
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp = work - quo_est * den[den_hi_sig];
702 if (tmp < BASE
703 && (den[den_hi_sig - 1] * quo_est
704 > (tmp * BASE + num[num_hi_sig - 2])))
705 quo_est--;
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
711 carry = 0;
712 for (j = 0; j <= den_hi_sig; j++)
714 work = quo_est * den[j] + carry;
715 carry = HIGHPART (work);
716 work = num[i + j] - LOWPART (work);
717 num[i + j] = LOWPART (work);
718 carry += HIGHPART (work) != 0;
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
725 quo_est--;
726 carry = 0; /* add divisor back in */
727 for (j = 0; j <= den_hi_sig; j++)
729 work = num[i + j] + den[j] + carry;
730 carry = HIGHPART (work);
731 num[i + j] = LOWPART (work);
734 num [num_hi_sig] += carry;
737 /* Store the quotient digit. */
738 quo[i] = quo_est;
742 decode (quo, lquo, hquo);
744 finish_up:
745 /* If result is negative, make it so. */
746 if (quo_neg)
747 neg_double (*lquo, *hquo, lquo, hquo);
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
751 neg_double (*lrem, *hrem, lrem, hrem);
752 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
754 switch (code)
756 case TRUNC_DIV_EXPR:
757 case TRUNC_MOD_EXPR: /* round toward zero */
758 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
759 return overflow;
761 case FLOOR_DIV_EXPR:
762 case FLOOR_MOD_EXPR: /* round toward negative infinity */
763 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
765 /* quo = quo - 1; */
766 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
767 lquo, hquo);
769 else
770 return overflow;
771 break;
773 case CEIL_DIV_EXPR:
774 case CEIL_MOD_EXPR: /* round toward positive infinity */
775 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
778 lquo, hquo);
780 else
781 return overflow;
782 break;
784 case ROUND_DIV_EXPR:
785 case ROUND_MOD_EXPR: /* round to closest integer */
787 unsigned HOST_WIDE_INT labs_rem = *lrem;
788 HOST_WIDE_INT habs_rem = *hrem;
789 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
790 HOST_WIDE_INT habs_den = hden, htwice;
792 /* Get absolute values. */
793 if (*hrem < 0)
794 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
795 if (hden < 0)
796 neg_double (lden, hden, &labs_den, &habs_den);
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
800 labs_rem, habs_rem, &ltwice, &htwice);
802 if (((unsigned HOST_WIDE_INT) habs_den
803 < (unsigned HOST_WIDE_INT) htwice)
804 || (((unsigned HOST_WIDE_INT) habs_den
805 == (unsigned HOST_WIDE_INT) htwice)
806 && (labs_den < ltwice)))
808 if (*hquo < 0)
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo,
811 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
812 else
813 /* quo = quo + 1; */
814 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
815 lquo, hquo);
817 else
818 return overflow;
820 break;
822 default:
823 gcc_unreachable ();
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
828 neg_double (*lrem, *hrem, lrem, hrem);
829 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
830 return overflow;
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
837 static tree
838 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
840 unsigned HOST_WIDE_INT int1l, int2l;
841 HOST_WIDE_INT int1h, int2h;
842 unsigned HOST_WIDE_INT quol, reml;
843 HOST_WIDE_INT quoh, remh;
844 tree type = TREE_TYPE (arg1);
845 int uns = TYPE_UNSIGNED (type);
847 int1l = TREE_INT_CST_LOW (arg1);
848 int1h = TREE_INT_CST_HIGH (arg1);
849 int2l = TREE_INT_CST_LOW (arg2);
850 int2h = TREE_INT_CST_HIGH (arg2);
852 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
853 &quol, &quoh, &reml, &remh);
854 if (remh != 0 || reml != 0)
855 return NULL_TREE;
857 return build_int_cst_wide (type, quol, quoh);
860 /* Return true if built-in mathematical function specified by CODE
861 preserves the sign of it argument, i.e. -f(x) == f(-x). */
863 static bool
864 negate_mathfn_p (enum built_in_function code)
866 switch (code)
868 case BUILT_IN_ASIN:
869 case BUILT_IN_ASINF:
870 case BUILT_IN_ASINL:
871 case BUILT_IN_ATAN:
872 case BUILT_IN_ATANF:
873 case BUILT_IN_ATANL:
874 case BUILT_IN_SIN:
875 case BUILT_IN_SINF:
876 case BUILT_IN_SINL:
877 case BUILT_IN_TAN:
878 case BUILT_IN_TANF:
879 case BUILT_IN_TANL:
880 return true;
882 default:
883 break;
885 return false;
888 /* Check whether we may negate an integer constant T without causing
889 overflow. */
891 bool
892 may_negate_without_overflow_p (tree t)
894 unsigned HOST_WIDE_INT val;
895 unsigned int prec;
896 tree type;
898 gcc_assert (TREE_CODE (t) == INTEGER_CST);
900 type = TREE_TYPE (t);
901 if (TYPE_UNSIGNED (type))
902 return false;
904 prec = TYPE_PRECISION (type);
905 if (prec > HOST_BITS_PER_WIDE_INT)
907 if (TREE_INT_CST_LOW (t) != 0)
908 return true;
909 prec -= HOST_BITS_PER_WIDE_INT;
910 val = TREE_INT_CST_HIGH (t);
912 else
913 val = TREE_INT_CST_LOW (t);
914 if (prec < HOST_BITS_PER_WIDE_INT)
915 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
916 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
919 /* Determine whether an expression T can be cheaply negated using
920 the function negate_expr. */
922 static bool
923 negate_expr_p (tree t)
925 tree type;
927 if (t == 0)
928 return false;
930 type = TREE_TYPE (t);
932 STRIP_SIGN_NOPS (t);
933 switch (TREE_CODE (t))
935 case INTEGER_CST:
936 if (TYPE_UNSIGNED (type) || ! flag_trapv)
937 return true;
939 /* Check that -CST will not overflow type. */
940 return may_negate_without_overflow_p (t);
942 case REAL_CST:
943 case NEGATE_EXPR:
944 return true;
946 case COMPLEX_CST:
947 return negate_expr_p (TREE_REALPART (t))
948 && negate_expr_p (TREE_IMAGPART (t));
950 case PLUS_EXPR:
951 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
952 return false;
953 /* -(A + B) -> (-B) - A. */
954 if (negate_expr_p (TREE_OPERAND (t, 1))
955 && reorder_operands_p (TREE_OPERAND (t, 0),
956 TREE_OPERAND (t, 1)))
957 return true;
958 /* -(A + B) -> (-A) - B. */
959 return negate_expr_p (TREE_OPERAND (t, 0));
961 case MINUS_EXPR:
962 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
963 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
964 && reorder_operands_p (TREE_OPERAND (t, 0),
965 TREE_OPERAND (t, 1));
967 case MULT_EXPR:
968 if (TYPE_UNSIGNED (TREE_TYPE (t)))
969 break;
971 /* Fall through. */
973 case RDIV_EXPR:
974 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
975 return negate_expr_p (TREE_OPERAND (t, 1))
976 || negate_expr_p (TREE_OPERAND (t, 0));
977 break;
979 case NOP_EXPR:
980 /* Negate -((double)float) as (double)(-float). */
981 if (TREE_CODE (type) == REAL_TYPE)
983 tree tem = strip_float_extensions (t);
984 if (tem != t)
985 return negate_expr_p (tem);
987 break;
989 case CALL_EXPR:
990 /* Negate -f(x) as f(-x). */
991 if (negate_mathfn_p (builtin_mathfn_code (t)))
992 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
993 break;
995 case RSHIFT_EXPR:
996 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
997 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
999 tree op1 = TREE_OPERAND (t, 1);
1000 if (TREE_INT_CST_HIGH (op1) == 0
1001 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1002 == TREE_INT_CST_LOW (op1))
1003 return true;
1005 break;
1007 default:
1008 break;
1010 return false;
1013 /* Given T, an expression, return the negation of T. Allow for T to be
1014 null, in which case return null. */
1016 static tree
1017 negate_expr (tree t)
1019 tree type;
1020 tree tem;
1022 if (t == 0)
1023 return 0;
1025 type = TREE_TYPE (t);
1026 STRIP_SIGN_NOPS (t);
1028 switch (TREE_CODE (t))
1030 case INTEGER_CST:
1031 tem = fold_negate_const (t, type);
1032 if (! TREE_OVERFLOW (tem)
1033 || TYPE_UNSIGNED (type)
1034 || ! flag_trapv)
1035 return tem;
1036 break;
1038 case REAL_CST:
1039 tem = fold_negate_const (t, type);
1040 /* Two's complement FP formats, such as c4x, may overflow. */
1041 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1042 return fold_convert (type, tem);
1043 break;
1045 case COMPLEX_CST:
1047 tree rpart = negate_expr (TREE_REALPART (t));
1048 tree ipart = negate_expr (TREE_IMAGPART (t));
1050 if ((TREE_CODE (rpart) == REAL_CST
1051 && TREE_CODE (ipart) == REAL_CST)
1052 || (TREE_CODE (rpart) == INTEGER_CST
1053 && TREE_CODE (ipart) == INTEGER_CST))
1054 return build_complex (type, rpart, ipart);
1056 break;
1058 case NEGATE_EXPR:
1059 return fold_convert (type, TREE_OPERAND (t, 0));
1061 case PLUS_EXPR:
1062 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1064 /* -(A + B) -> (-B) - A. */
1065 if (negate_expr_p (TREE_OPERAND (t, 1))
1066 && reorder_operands_p (TREE_OPERAND (t, 0),
1067 TREE_OPERAND (t, 1)))
1069 tem = negate_expr (TREE_OPERAND (t, 1));
1070 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1071 tem, TREE_OPERAND (t, 0));
1072 return fold_convert (type, tem);
1075 /* -(A + B) -> (-A) - B. */
1076 if (negate_expr_p (TREE_OPERAND (t, 0)))
1078 tem = negate_expr (TREE_OPERAND (t, 0));
1079 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1080 tem, TREE_OPERAND (t, 1));
1081 return fold_convert (type, tem);
1084 break;
1086 case MINUS_EXPR:
1087 /* - (A - B) -> B - A */
1088 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1089 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1090 return fold_convert (type,
1091 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1092 TREE_OPERAND (t, 1),
1093 TREE_OPERAND (t, 0)));
1094 break;
1096 case MULT_EXPR:
1097 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1098 break;
1100 /* Fall through. */
1102 case RDIV_EXPR:
1103 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1105 tem = TREE_OPERAND (t, 1);
1106 if (negate_expr_p (tem))
1107 return fold_convert (type,
1108 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1109 TREE_OPERAND (t, 0),
1110 negate_expr (tem)));
1111 tem = TREE_OPERAND (t, 0);
1112 if (negate_expr_p (tem))
1113 return fold_convert (type,
1114 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1115 negate_expr (tem),
1116 TREE_OPERAND (t, 1)));
1118 break;
1120 case NOP_EXPR:
1121 /* Convert -((double)float) into (double)(-float). */
1122 if (TREE_CODE (type) == REAL_TYPE)
1124 tem = strip_float_extensions (t);
1125 if (tem != t && negate_expr_p (tem))
1126 return fold_convert (type, negate_expr (tem));
1128 break;
1130 case CALL_EXPR:
1131 /* Negate -f(x) as f(-x). */
1132 if (negate_mathfn_p (builtin_mathfn_code (t))
1133 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1135 tree fndecl, arg, arglist;
1137 fndecl = get_callee_fndecl (t);
1138 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1139 arglist = build_tree_list (NULL_TREE, arg);
1140 return build_function_call_expr (fndecl, arglist);
1142 break;
1144 case RSHIFT_EXPR:
1145 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1146 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1148 tree op1 = TREE_OPERAND (t, 1);
1149 if (TREE_INT_CST_HIGH (op1) == 0
1150 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1151 == TREE_INT_CST_LOW (op1))
1153 tree ntype = TYPE_UNSIGNED (type)
1154 ? lang_hooks.types.signed_type (type)
1155 : lang_hooks.types.unsigned_type (type);
1156 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1157 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1158 return fold_convert (type, temp);
1161 break;
1163 default:
1164 break;
1167 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1168 return fold_convert (type, tem);
1171 /* Split a tree IN into a constant, literal and variable parts that could be
1172 combined with CODE to make IN. "constant" means an expression with
1173 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1174 commutative arithmetic operation. Store the constant part into *CONP,
1175 the literal in *LITP and return the variable part. If a part isn't
1176 present, set it to null. If the tree does not decompose in this way,
1177 return the entire tree as the variable part and the other parts as null.
1179 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1180 case, we negate an operand that was subtracted. Except if it is a
1181 literal for which we use *MINUS_LITP instead.
1183 If NEGATE_P is true, we are negating all of IN, again except a literal
1184 for which we use *MINUS_LITP instead.
1186 If IN is itself a literal or constant, return it as appropriate.
1188 Note that we do not guarantee that any of the three values will be the
1189 same type as IN, but they will have the same signedness and mode. */
1191 static tree
1192 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1193 tree *minus_litp, int negate_p)
1195 tree var = 0;
1197 *conp = 0;
1198 *litp = 0;
1199 *minus_litp = 0;
1201 /* Strip any conversions that don't change the machine mode or signedness. */
1202 STRIP_SIGN_NOPS (in);
1204 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1205 *litp = in;
1206 else if (TREE_CODE (in) == code
1207 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1208 /* We can associate addition and subtraction together (even
1209 though the C standard doesn't say so) for integers because
1210 the value is not affected. For reals, the value might be
1211 affected, so we can't. */
1212 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1213 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1215 tree op0 = TREE_OPERAND (in, 0);
1216 tree op1 = TREE_OPERAND (in, 1);
1217 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1218 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1220 /* First see if either of the operands is a literal, then a constant. */
1221 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1222 *litp = op0, op0 = 0;
1223 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1224 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1226 if (op0 != 0 && TREE_CONSTANT (op0))
1227 *conp = op0, op0 = 0;
1228 else if (op1 != 0 && TREE_CONSTANT (op1))
1229 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1231 /* If we haven't dealt with either operand, this is not a case we can
1232 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1233 if (op0 != 0 && op1 != 0)
1234 var = in;
1235 else if (op0 != 0)
1236 var = op0;
1237 else
1238 var = op1, neg_var_p = neg1_p;
1240 /* Now do any needed negations. */
1241 if (neg_litp_p)
1242 *minus_litp = *litp, *litp = 0;
1243 if (neg_conp_p)
1244 *conp = negate_expr (*conp);
1245 if (neg_var_p)
1246 var = negate_expr (var);
1248 else if (TREE_CONSTANT (in))
1249 *conp = in;
1250 else
1251 var = in;
1253 if (negate_p)
1255 if (*litp)
1256 *minus_litp = *litp, *litp = 0;
1257 else if (*minus_litp)
1258 *litp = *minus_litp, *minus_litp = 0;
1259 *conp = negate_expr (*conp);
1260 var = negate_expr (var);
1263 return var;
1266 /* Re-associate trees split by the above function. T1 and T2 are either
1267 expressions to associate or null. Return the new expression, if any. If
1268 we build an operation, do it in TYPE and with CODE. */
1270 static tree
1271 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1273 if (t1 == 0)
1274 return t2;
1275 else if (t2 == 0)
1276 return t1;
1278 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1279 try to fold this since we will have infinite recursion. But do
1280 deal with any NEGATE_EXPRs. */
1281 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1282 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1284 if (code == PLUS_EXPR)
1286 if (TREE_CODE (t1) == NEGATE_EXPR)
1287 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1288 fold_convert (type, TREE_OPERAND (t1, 0)));
1289 else if (TREE_CODE (t2) == NEGATE_EXPR)
1290 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1291 fold_convert (type, TREE_OPERAND (t2, 0)));
1292 else if (integer_zerop (t2))
1293 return fold_convert (type, t1);
1295 else if (code == MINUS_EXPR)
1297 if (integer_zerop (t2))
1298 return fold_convert (type, t1);
1301 return build2 (code, type, fold_convert (type, t1),
1302 fold_convert (type, t2));
1305 return fold_build2 (code, type, fold_convert (type, t1),
1306 fold_convert (type, t2));
1309 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1310 to produce a new constant.
1312 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1314 tree
1315 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1317 unsigned HOST_WIDE_INT int1l, int2l;
1318 HOST_WIDE_INT int1h, int2h;
1319 unsigned HOST_WIDE_INT low;
1320 HOST_WIDE_INT hi;
1321 unsigned HOST_WIDE_INT garbagel;
1322 HOST_WIDE_INT garbageh;
1323 tree t;
1324 tree type = TREE_TYPE (arg1);
1325 int uns = TYPE_UNSIGNED (type);
1326 int is_sizetype
1327 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1328 int overflow = 0;
1330 int1l = TREE_INT_CST_LOW (arg1);
1331 int1h = TREE_INT_CST_HIGH (arg1);
1332 int2l = TREE_INT_CST_LOW (arg2);
1333 int2h = TREE_INT_CST_HIGH (arg2);
1335 switch (code)
1337 case BIT_IOR_EXPR:
1338 low = int1l | int2l, hi = int1h | int2h;
1339 break;
1341 case BIT_XOR_EXPR:
1342 low = int1l ^ int2l, hi = int1h ^ int2h;
1343 break;
1345 case BIT_AND_EXPR:
1346 low = int1l & int2l, hi = int1h & int2h;
1347 break;
1349 case RSHIFT_EXPR:
1350 int2l = -int2l;
1351 case LSHIFT_EXPR:
1352 /* It's unclear from the C standard whether shifts can overflow.
1353 The following code ignores overflow; perhaps a C standard
1354 interpretation ruling is needed. */
1355 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1356 &low, &hi, !uns);
1357 break;
1359 case RROTATE_EXPR:
1360 int2l = - int2l;
1361 case LROTATE_EXPR:
1362 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1363 &low, &hi);
1364 break;
1366 case PLUS_EXPR:
1367 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1368 break;
1370 case MINUS_EXPR:
1371 neg_double (int2l, int2h, &low, &hi);
1372 add_double (int1l, int1h, low, hi, &low, &hi);
1373 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1374 break;
1376 case MULT_EXPR:
1377 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1378 break;
1380 case TRUNC_DIV_EXPR:
1381 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1382 case EXACT_DIV_EXPR:
1383 /* This is a shortcut for a common special case. */
1384 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1385 && ! TREE_CONSTANT_OVERFLOW (arg1)
1386 && ! TREE_CONSTANT_OVERFLOW (arg2)
1387 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1389 if (code == CEIL_DIV_EXPR)
1390 int1l += int2l - 1;
1392 low = int1l / int2l, hi = 0;
1393 break;
1396 /* ... fall through ... */
1398 case ROUND_DIV_EXPR:
1399 if (int2h == 0 && int2l == 1)
1401 low = int1l, hi = int1h;
1402 break;
1404 if (int1l == int2l && int1h == int2h
1405 && ! (int1l == 0 && int1h == 0))
1407 low = 1, hi = 0;
1408 break;
1410 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1411 &low, &hi, &garbagel, &garbageh);
1412 break;
1414 case TRUNC_MOD_EXPR:
1415 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1416 /* This is a shortcut for a common special case. */
1417 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1418 && ! TREE_CONSTANT_OVERFLOW (arg1)
1419 && ! TREE_CONSTANT_OVERFLOW (arg2)
1420 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1422 if (code == CEIL_MOD_EXPR)
1423 int1l += int2l - 1;
1424 low = int1l % int2l, hi = 0;
1425 break;
1428 /* ... fall through ... */
1430 case ROUND_MOD_EXPR:
1431 overflow = div_and_round_double (code, uns,
1432 int1l, int1h, int2l, int2h,
1433 &garbagel, &garbageh, &low, &hi);
1434 break;
1436 case MIN_EXPR:
1437 case MAX_EXPR:
1438 if (uns)
1439 low = (((unsigned HOST_WIDE_INT) int1h
1440 < (unsigned HOST_WIDE_INT) int2h)
1441 || (((unsigned HOST_WIDE_INT) int1h
1442 == (unsigned HOST_WIDE_INT) int2h)
1443 && int1l < int2l));
1444 else
1445 low = (int1h < int2h
1446 || (int1h == int2h && int1l < int2l));
1448 if (low == (code == MIN_EXPR))
1449 low = int1l, hi = int1h;
1450 else
1451 low = int2l, hi = int2h;
1452 break;
1454 default:
1455 gcc_unreachable ();
1458 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1460 if (notrunc)
1462 /* Propagate overflow flags ourselves. */
1463 if (((!uns || is_sizetype) && overflow)
1464 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1466 t = copy_node (t);
1467 TREE_OVERFLOW (t) = 1;
1468 TREE_CONSTANT_OVERFLOW (t) = 1;
1470 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1472 t = copy_node (t);
1473 TREE_CONSTANT_OVERFLOW (t) = 1;
1476 else
1477 t = force_fit_type (t, 1,
1478 ((!uns || is_sizetype) && overflow)
1479 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1480 TREE_CONSTANT_OVERFLOW (arg1)
1481 | TREE_CONSTANT_OVERFLOW (arg2));
1483 return t;
1486 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1487 constant. We assume ARG1 and ARG2 have the same data type, or at least
1488 are the same kind of constant and the same machine mode.
1490 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1492 static tree
1493 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1495 STRIP_NOPS (arg1);
1496 STRIP_NOPS (arg2);
1498 if (TREE_CODE (arg1) == INTEGER_CST)
1499 return int_const_binop (code, arg1, arg2, notrunc);
1501 if (TREE_CODE (arg1) == REAL_CST)
1503 enum machine_mode mode;
1504 REAL_VALUE_TYPE d1;
1505 REAL_VALUE_TYPE d2;
1506 REAL_VALUE_TYPE value;
1507 REAL_VALUE_TYPE result;
1508 bool inexact;
1509 tree t, type;
1511 d1 = TREE_REAL_CST (arg1);
1512 d2 = TREE_REAL_CST (arg2);
1514 type = TREE_TYPE (arg1);
1515 mode = TYPE_MODE (type);
1517 /* Don't perform operation if we honor signaling NaNs and
1518 either operand is a NaN. */
1519 if (HONOR_SNANS (mode)
1520 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1521 return NULL_TREE;
1523 /* Don't perform operation if it would raise a division
1524 by zero exception. */
1525 if (code == RDIV_EXPR
1526 && REAL_VALUES_EQUAL (d2, dconst0)
1527 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1528 return NULL_TREE;
1530 /* If either operand is a NaN, just return it. Otherwise, set up
1531 for floating-point trap; we return an overflow. */
1532 if (REAL_VALUE_ISNAN (d1))
1533 return arg1;
1534 else if (REAL_VALUE_ISNAN (d2))
1535 return arg2;
1537 inexact = real_arithmetic (&value, code, &d1, &d2);
1538 real_convert (&result, mode, &value);
1540 /* Don't constant fold this floating point operation if the
1541 result may dependent upon the run-time rounding mode and
1542 flag_rounding_math is set, or if GCC's software emulation
1543 is unable to accurately represent the result. */
1545 if ((flag_rounding_math
1546 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1547 && !flag_unsafe_math_optimizations))
1548 && (inexact || !real_identical (&result, &value)))
1549 return NULL_TREE;
1551 t = build_real (type, result);
1553 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1554 TREE_CONSTANT_OVERFLOW (t)
1555 = TREE_OVERFLOW (t)
1556 | TREE_CONSTANT_OVERFLOW (arg1)
1557 | TREE_CONSTANT_OVERFLOW (arg2);
1558 return t;
1560 if (TREE_CODE (arg1) == COMPLEX_CST)
1562 tree type = TREE_TYPE (arg1);
1563 tree r1 = TREE_REALPART (arg1);
1564 tree i1 = TREE_IMAGPART (arg1);
1565 tree r2 = TREE_REALPART (arg2);
1566 tree i2 = TREE_IMAGPART (arg2);
1567 tree t;
1569 switch (code)
1571 case PLUS_EXPR:
1572 t = build_complex (type,
1573 const_binop (PLUS_EXPR, r1, r2, notrunc),
1574 const_binop (PLUS_EXPR, i1, i2, notrunc));
1575 break;
1577 case MINUS_EXPR:
1578 t = build_complex (type,
1579 const_binop (MINUS_EXPR, r1, r2, notrunc),
1580 const_binop (MINUS_EXPR, i1, i2, notrunc));
1581 break;
1583 case MULT_EXPR:
1584 t = build_complex (type,
1585 const_binop (MINUS_EXPR,
1586 const_binop (MULT_EXPR,
1587 r1, r2, notrunc),
1588 const_binop (MULT_EXPR,
1589 i1, i2, notrunc),
1590 notrunc),
1591 const_binop (PLUS_EXPR,
1592 const_binop (MULT_EXPR,
1593 r1, i2, notrunc),
1594 const_binop (MULT_EXPR,
1595 i1, r2, notrunc),
1596 notrunc));
1597 break;
1599 case RDIV_EXPR:
1601 tree t1, t2, real, imag;
1602 tree magsquared
1603 = const_binop (PLUS_EXPR,
1604 const_binop (MULT_EXPR, r2, r2, notrunc),
1605 const_binop (MULT_EXPR, i2, i2, notrunc),
1606 notrunc);
1608 t1 = const_binop (PLUS_EXPR,
1609 const_binop (MULT_EXPR, r1, r2, notrunc),
1610 const_binop (MULT_EXPR, i1, i2, notrunc),
1611 notrunc);
1612 t2 = const_binop (MINUS_EXPR,
1613 const_binop (MULT_EXPR, i1, r2, notrunc),
1614 const_binop (MULT_EXPR, r1, i2, notrunc),
1615 notrunc);
1617 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1619 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1620 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1622 else
1624 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1625 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1626 if (!real || !imag)
1627 return NULL_TREE;
1630 t = build_complex (type, real, imag);
1632 break;
1634 default:
1635 gcc_unreachable ();
1637 return t;
1639 return 0;
1642 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1643 indicates which particular sizetype to create. */
1645 tree
1646 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1648 return build_int_cst (sizetype_tab[(int) kind], number);
1651 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1652 is a tree code. The type of the result is taken from the operands.
1653 Both must be the same type integer type and it must be a size type.
1654 If the operands are constant, so is the result. */
1656 tree
1657 size_binop (enum tree_code code, tree arg0, tree arg1)
1659 tree type = TREE_TYPE (arg0);
1661 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1662 && type == TREE_TYPE (arg1));
1664 /* Handle the special case of two integer constants faster. */
1665 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1667 /* And some specific cases even faster than that. */
1668 if (code == PLUS_EXPR && integer_zerop (arg0))
1669 return arg1;
1670 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1671 && integer_zerop (arg1))
1672 return arg0;
1673 else if (code == MULT_EXPR && integer_onep (arg0))
1674 return arg1;
1676 /* Handle general case of two integer constants. */
1677 return int_const_binop (code, arg0, arg1, 0);
1680 if (arg0 == error_mark_node || arg1 == error_mark_node)
1681 return error_mark_node;
1683 return fold_build2 (code, type, arg0, arg1);
1686 /* Given two values, either both of sizetype or both of bitsizetype,
1687 compute the difference between the two values. Return the value
1688 in signed type corresponding to the type of the operands. */
1690 tree
1691 size_diffop (tree arg0, tree arg1)
1693 tree type = TREE_TYPE (arg0);
1694 tree ctype;
1696 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1697 && type == TREE_TYPE (arg1));
1699 /* If the type is already signed, just do the simple thing. */
1700 if (!TYPE_UNSIGNED (type))
1701 return size_binop (MINUS_EXPR, arg0, arg1);
1703 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1705 /* If either operand is not a constant, do the conversions to the signed
1706 type and subtract. The hardware will do the right thing with any
1707 overflow in the subtraction. */
1708 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1709 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1710 fold_convert (ctype, arg1));
1712 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1713 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1714 overflow) and negate (which can't either). Special-case a result
1715 of zero while we're here. */
1716 if (tree_int_cst_equal (arg0, arg1))
1717 return fold_convert (ctype, integer_zero_node);
1718 else if (tree_int_cst_lt (arg1, arg0))
1719 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1720 else
1721 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1722 fold_convert (ctype, size_binop (MINUS_EXPR,
1723 arg1, arg0)));
1726 /* A subroutine of fold_convert_const handling conversions of an
1727 INTEGER_CST to another integer type. */
1729 static tree
1730 fold_convert_const_int_from_int (tree type, tree arg1)
1732 tree t;
1734 /* Given an integer constant, make new constant with new type,
1735 appropriately sign-extended or truncated. */
1736 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1737 TREE_INT_CST_HIGH (arg1));
1739 t = force_fit_type (t,
1740 /* Don't set the overflow when
1741 converting a pointer */
1742 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1743 (TREE_INT_CST_HIGH (arg1) < 0
1744 && (TYPE_UNSIGNED (type)
1745 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1746 | TREE_OVERFLOW (arg1),
1747 TREE_CONSTANT_OVERFLOW (arg1));
1749 return t;
1752 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1753 to an integer type. */
1755 static tree
1756 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1758 int overflow = 0;
1759 tree t;
1761 /* The following code implements the floating point to integer
1762 conversion rules required by the Java Language Specification,
1763 that IEEE NaNs are mapped to zero and values that overflow
1764 the target precision saturate, i.e. values greater than
1765 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1766 are mapped to INT_MIN. These semantics are allowed by the
1767 C and C++ standards that simply state that the behavior of
1768 FP-to-integer conversion is unspecified upon overflow. */
1770 HOST_WIDE_INT high, low;
1771 REAL_VALUE_TYPE r;
1772 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1774 switch (code)
1776 case FIX_TRUNC_EXPR:
1777 real_trunc (&r, VOIDmode, &x);
1778 break;
1780 case FIX_CEIL_EXPR:
1781 real_ceil (&r, VOIDmode, &x);
1782 break;
1784 case FIX_FLOOR_EXPR:
1785 real_floor (&r, VOIDmode, &x);
1786 break;
1788 case FIX_ROUND_EXPR:
1789 real_round (&r, VOIDmode, &x);
1790 break;
1792 default:
1793 gcc_unreachable ();
1796 /* If R is NaN, return zero and show we have an overflow. */
1797 if (REAL_VALUE_ISNAN (r))
1799 overflow = 1;
1800 high = 0;
1801 low = 0;
1804 /* See if R is less than the lower bound or greater than the
1805 upper bound. */
1807 if (! overflow)
1809 tree lt = TYPE_MIN_VALUE (type);
1810 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1811 if (REAL_VALUES_LESS (r, l))
1813 overflow = 1;
1814 high = TREE_INT_CST_HIGH (lt);
1815 low = TREE_INT_CST_LOW (lt);
1819 if (! overflow)
1821 tree ut = TYPE_MAX_VALUE (type);
1822 if (ut)
1824 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1825 if (REAL_VALUES_LESS (u, r))
1827 overflow = 1;
1828 high = TREE_INT_CST_HIGH (ut);
1829 low = TREE_INT_CST_LOW (ut);
1834 if (! overflow)
1835 REAL_VALUE_TO_INT (&low, &high, r);
1837 t = build_int_cst_wide (type, low, high);
1839 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1840 TREE_CONSTANT_OVERFLOW (arg1));
1841 return t;
1844 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1845 to another floating point type. */
1847 static tree
1848 fold_convert_const_real_from_real (tree type, tree arg1)
1850 REAL_VALUE_TYPE value;
1851 tree t;
1853 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1854 t = build_real (type, value);
1856 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1857 TREE_CONSTANT_OVERFLOW (t)
1858 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1859 return t;
1862 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1863 type TYPE. If no simplification can be done return NULL_TREE. */
1865 static tree
1866 fold_convert_const (enum tree_code code, tree type, tree arg1)
1868 if (TREE_TYPE (arg1) == type)
1869 return arg1;
1871 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1873 if (TREE_CODE (arg1) == INTEGER_CST)
1874 return fold_convert_const_int_from_int (type, arg1);
1875 else if (TREE_CODE (arg1) == REAL_CST)
1876 return fold_convert_const_int_from_real (code, type, arg1);
1878 else if (TREE_CODE (type) == REAL_TYPE)
1880 if (TREE_CODE (arg1) == INTEGER_CST)
1881 return build_real_from_int_cst (type, arg1);
1882 if (TREE_CODE (arg1) == REAL_CST)
1883 return fold_convert_const_real_from_real (type, arg1);
1885 return NULL_TREE;
1888 /* Construct a vector of zero elements of vector type TYPE. */
1890 static tree
1891 build_zero_vector (tree type)
1893 tree elem, list;
1894 int i, units;
1896 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1897 units = TYPE_VECTOR_SUBPARTS (type);
1899 list = NULL_TREE;
1900 for (i = 0; i < units; i++)
1901 list = tree_cons (NULL_TREE, elem, list);
1902 return build_vector (type, list);
1905 /* Convert expression ARG to type TYPE. Used by the middle-end for
1906 simple conversions in preference to calling the front-end's convert. */
1908 tree
1909 fold_convert (tree type, tree arg)
1911 tree orig = TREE_TYPE (arg);
1912 tree tem;
1914 if (type == orig)
1915 return arg;
1917 if (TREE_CODE (arg) == ERROR_MARK
1918 || TREE_CODE (type) == ERROR_MARK
1919 || TREE_CODE (orig) == ERROR_MARK)
1920 return error_mark_node;
1922 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1923 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1924 TYPE_MAIN_VARIANT (orig)))
1925 return fold_build1 (NOP_EXPR, type, arg);
1927 switch (TREE_CODE (type))
1929 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1930 case POINTER_TYPE: case REFERENCE_TYPE:
1931 case OFFSET_TYPE:
1932 if (TREE_CODE (arg) == INTEGER_CST)
1934 tem = fold_convert_const (NOP_EXPR, type, arg);
1935 if (tem != NULL_TREE)
1936 return tem;
1938 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1939 || TREE_CODE (orig) == OFFSET_TYPE)
1940 return fold_build1 (NOP_EXPR, type, arg);
1941 if (TREE_CODE (orig) == COMPLEX_TYPE)
1943 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1944 return fold_convert (type, tem);
1946 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1947 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1948 return fold_build1 (NOP_EXPR, type, arg);
1950 case REAL_TYPE:
1951 if (TREE_CODE (arg) == INTEGER_CST)
1953 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1954 if (tem != NULL_TREE)
1955 return tem;
1957 else if (TREE_CODE (arg) == REAL_CST)
1959 tem = fold_convert_const (NOP_EXPR, type, arg);
1960 if (tem != NULL_TREE)
1961 return tem;
1964 switch (TREE_CODE (orig))
1966 case INTEGER_TYPE: case CHAR_TYPE:
1967 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1968 case POINTER_TYPE: case REFERENCE_TYPE:
1969 return fold_build1 (FLOAT_EXPR, type, arg);
1971 case REAL_TYPE:
1972 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1973 type, arg);
1975 case COMPLEX_TYPE:
1976 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1977 return fold_convert (type, tem);
1979 default:
1980 gcc_unreachable ();
1983 case COMPLEX_TYPE:
1984 switch (TREE_CODE (orig))
1986 case INTEGER_TYPE: case CHAR_TYPE:
1987 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1988 case POINTER_TYPE: case REFERENCE_TYPE:
1989 case REAL_TYPE:
1990 return build2 (COMPLEX_EXPR, type,
1991 fold_convert (TREE_TYPE (type), arg),
1992 fold_convert (TREE_TYPE (type), integer_zero_node));
1993 case COMPLEX_TYPE:
1995 tree rpart, ipart;
1997 if (TREE_CODE (arg) == COMPLEX_EXPR)
1999 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2000 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2001 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2004 arg = save_expr (arg);
2005 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2006 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2007 rpart = fold_convert (TREE_TYPE (type), rpart);
2008 ipart = fold_convert (TREE_TYPE (type), ipart);
2009 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2012 default:
2013 gcc_unreachable ();
2016 case VECTOR_TYPE:
2017 if (integer_zerop (arg))
2018 return build_zero_vector (type);
2019 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2020 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2021 || TREE_CODE (orig) == VECTOR_TYPE);
2022 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2024 case VOID_TYPE:
2025 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2027 default:
2028 gcc_unreachable ();
2032 /* Return false if expr can be assumed not to be an lvalue, true
2033 otherwise. */
2035 static bool
2036 maybe_lvalue_p (tree x)
2038 /* We only need to wrap lvalue tree codes. */
2039 switch (TREE_CODE (x))
2041 case VAR_DECL:
2042 case PARM_DECL:
2043 case RESULT_DECL:
2044 case LABEL_DECL:
2045 case FUNCTION_DECL:
2046 case SSA_NAME:
2048 case COMPONENT_REF:
2049 case INDIRECT_REF:
2050 case ALIGN_INDIRECT_REF:
2051 case MISALIGNED_INDIRECT_REF:
2052 case ARRAY_REF:
2053 case ARRAY_RANGE_REF:
2054 case BIT_FIELD_REF:
2055 case OBJ_TYPE_REF:
2057 case REALPART_EXPR:
2058 case IMAGPART_EXPR:
2059 case PREINCREMENT_EXPR:
2060 case PREDECREMENT_EXPR:
2061 case SAVE_EXPR:
2062 case TRY_CATCH_EXPR:
2063 case WITH_CLEANUP_EXPR:
2064 case COMPOUND_EXPR:
2065 case MODIFY_EXPR:
2066 case TARGET_EXPR:
2067 case COND_EXPR:
2068 case BIND_EXPR:
2069 case MIN_EXPR:
2070 case MAX_EXPR:
2071 break;
2073 default:
2074 /* Assume the worst for front-end tree codes. */
2075 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2076 break;
2077 return false;
2080 return true;
2083 /* Return an expr equal to X but certainly not valid as an lvalue. */
2085 tree
2086 non_lvalue (tree x)
2088 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2089 us. */
2090 if (in_gimple_form)
2091 return x;
2093 if (! maybe_lvalue_p (x))
2094 return x;
2095 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2098 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2099 Zero means allow extended lvalues. */
2101 int pedantic_lvalues;
2103 /* When pedantic, return an expr equal to X but certainly not valid as a
2104 pedantic lvalue. Otherwise, return X. */
2106 static tree
2107 pedantic_non_lvalue (tree x)
2109 if (pedantic_lvalues)
2110 return non_lvalue (x);
2111 else
2112 return x;
2115 /* Given a tree comparison code, return the code that is the logical inverse
2116 of the given code. It is not safe to do this for floating-point
2117 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2118 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2120 enum tree_code
2121 invert_tree_comparison (enum tree_code code, bool honor_nans)
2123 if (honor_nans && flag_trapping_math)
2124 return ERROR_MARK;
2126 switch (code)
2128 case EQ_EXPR:
2129 return NE_EXPR;
2130 case NE_EXPR:
2131 return EQ_EXPR;
2132 case GT_EXPR:
2133 return honor_nans ? UNLE_EXPR : LE_EXPR;
2134 case GE_EXPR:
2135 return honor_nans ? UNLT_EXPR : LT_EXPR;
2136 case LT_EXPR:
2137 return honor_nans ? UNGE_EXPR : GE_EXPR;
2138 case LE_EXPR:
2139 return honor_nans ? UNGT_EXPR : GT_EXPR;
2140 case LTGT_EXPR:
2141 return UNEQ_EXPR;
2142 case UNEQ_EXPR:
2143 return LTGT_EXPR;
2144 case UNGT_EXPR:
2145 return LE_EXPR;
2146 case UNGE_EXPR:
2147 return LT_EXPR;
2148 case UNLT_EXPR:
2149 return GE_EXPR;
2150 case UNLE_EXPR:
2151 return GT_EXPR;
2152 case ORDERED_EXPR:
2153 return UNORDERED_EXPR;
2154 case UNORDERED_EXPR:
2155 return ORDERED_EXPR;
2156 default:
2157 gcc_unreachable ();
2161 /* Similar, but return the comparison that results if the operands are
2162 swapped. This is safe for floating-point. */
2164 enum tree_code
2165 swap_tree_comparison (enum tree_code code)
2167 switch (code)
2169 case EQ_EXPR:
2170 case NE_EXPR:
2171 case ORDERED_EXPR:
2172 case UNORDERED_EXPR:
2173 case LTGT_EXPR:
2174 case UNEQ_EXPR:
2175 return code;
2176 case GT_EXPR:
2177 return LT_EXPR;
2178 case GE_EXPR:
2179 return LE_EXPR;
2180 case LT_EXPR:
2181 return GT_EXPR;
2182 case LE_EXPR:
2183 return GE_EXPR;
2184 case UNGT_EXPR:
2185 return UNLT_EXPR;
2186 case UNGE_EXPR:
2187 return UNLE_EXPR;
2188 case UNLT_EXPR:
2189 return UNGT_EXPR;
2190 case UNLE_EXPR:
2191 return UNGE_EXPR;
2192 default:
2193 gcc_unreachable ();
2198 /* Convert a comparison tree code from an enum tree_code representation
2199 into a compcode bit-based encoding. This function is the inverse of
2200 compcode_to_comparison. */
2202 static enum comparison_code
2203 comparison_to_compcode (enum tree_code code)
2205 switch (code)
2207 case LT_EXPR:
2208 return COMPCODE_LT;
2209 case EQ_EXPR:
2210 return COMPCODE_EQ;
2211 case LE_EXPR:
2212 return COMPCODE_LE;
2213 case GT_EXPR:
2214 return COMPCODE_GT;
2215 case NE_EXPR:
2216 return COMPCODE_NE;
2217 case GE_EXPR:
2218 return COMPCODE_GE;
2219 case ORDERED_EXPR:
2220 return COMPCODE_ORD;
2221 case UNORDERED_EXPR:
2222 return COMPCODE_UNORD;
2223 case UNLT_EXPR:
2224 return COMPCODE_UNLT;
2225 case UNEQ_EXPR:
2226 return COMPCODE_UNEQ;
2227 case UNLE_EXPR:
2228 return COMPCODE_UNLE;
2229 case UNGT_EXPR:
2230 return COMPCODE_UNGT;
2231 case LTGT_EXPR:
2232 return COMPCODE_LTGT;
2233 case UNGE_EXPR:
2234 return COMPCODE_UNGE;
2235 default:
2236 gcc_unreachable ();
2240 /* Convert a compcode bit-based encoding of a comparison operator back
2241 to GCC's enum tree_code representation. This function is the
2242 inverse of comparison_to_compcode. */
2244 static enum tree_code
2245 compcode_to_comparison (enum comparison_code code)
2247 switch (code)
2249 case COMPCODE_LT:
2250 return LT_EXPR;
2251 case COMPCODE_EQ:
2252 return EQ_EXPR;
2253 case COMPCODE_LE:
2254 return LE_EXPR;
2255 case COMPCODE_GT:
2256 return GT_EXPR;
2257 case COMPCODE_NE:
2258 return NE_EXPR;
2259 case COMPCODE_GE:
2260 return GE_EXPR;
2261 case COMPCODE_ORD:
2262 return ORDERED_EXPR;
2263 case COMPCODE_UNORD:
2264 return UNORDERED_EXPR;
2265 case COMPCODE_UNLT:
2266 return UNLT_EXPR;
2267 case COMPCODE_UNEQ:
2268 return UNEQ_EXPR;
2269 case COMPCODE_UNLE:
2270 return UNLE_EXPR;
2271 case COMPCODE_UNGT:
2272 return UNGT_EXPR;
2273 case COMPCODE_LTGT:
2274 return LTGT_EXPR;
2275 case COMPCODE_UNGE:
2276 return UNGE_EXPR;
2277 default:
2278 gcc_unreachable ();
2282 /* Return a tree for the comparison which is the combination of
2283 doing the AND or OR (depending on CODE) of the two operations LCODE
2284 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2285 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2286 if this makes the transformation invalid. */
2288 tree
2289 combine_comparisons (enum tree_code code, enum tree_code lcode,
2290 enum tree_code rcode, tree truth_type,
2291 tree ll_arg, tree lr_arg)
2293 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2294 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2295 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2296 enum comparison_code compcode;
2298 switch (code)
2300 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2301 compcode = lcompcode & rcompcode;
2302 break;
2304 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2305 compcode = lcompcode | rcompcode;
2306 break;
2308 default:
2309 return NULL_TREE;
2312 if (!honor_nans)
2314 /* Eliminate unordered comparisons, as well as LTGT and ORD
2315 which are not used unless the mode has NaNs. */
2316 compcode &= ~COMPCODE_UNORD;
2317 if (compcode == COMPCODE_LTGT)
2318 compcode = COMPCODE_NE;
2319 else if (compcode == COMPCODE_ORD)
2320 compcode = COMPCODE_TRUE;
2322 else if (flag_trapping_math)
2324 /* Check that the original operation and the optimized ones will trap
2325 under the same condition. */
2326 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2327 && (lcompcode != COMPCODE_EQ)
2328 && (lcompcode != COMPCODE_ORD);
2329 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2330 && (rcompcode != COMPCODE_EQ)
2331 && (rcompcode != COMPCODE_ORD);
2332 bool trap = (compcode & COMPCODE_UNORD) == 0
2333 && (compcode != COMPCODE_EQ)
2334 && (compcode != COMPCODE_ORD);
2336 /* In a short-circuited boolean expression the LHS might be
2337 such that the RHS, if evaluated, will never trap. For
2338 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2339 if neither x nor y is NaN. (This is a mixed blessing: for
2340 example, the expression above will never trap, hence
2341 optimizing it to x < y would be invalid). */
2342 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2343 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2344 rtrap = false;
2346 /* If the comparison was short-circuited, and only the RHS
2347 trapped, we may now generate a spurious trap. */
2348 if (rtrap && !ltrap
2349 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2350 return NULL_TREE;
2352 /* If we changed the conditions that cause a trap, we lose. */
2353 if ((ltrap || rtrap) != trap)
2354 return NULL_TREE;
2357 if (compcode == COMPCODE_TRUE)
2358 return constant_boolean_node (true, truth_type);
2359 else if (compcode == COMPCODE_FALSE)
2360 return constant_boolean_node (false, truth_type);
2361 else
2362 return fold_build2 (compcode_to_comparison (compcode),
2363 truth_type, ll_arg, lr_arg);
2366 /* Return nonzero if CODE is a tree code that represents a truth value. */
2368 static int
2369 truth_value_p (enum tree_code code)
2371 return (TREE_CODE_CLASS (code) == tcc_comparison
2372 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2373 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2374 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2377 /* Return nonzero if two operands (typically of the same tree node)
2378 are necessarily equal. If either argument has side-effects this
2379 function returns zero. FLAGS modifies behavior as follows:
2381 If OEP_ONLY_CONST is set, only return nonzero for constants.
2382 This function tests whether the operands are indistinguishable;
2383 it does not test whether they are equal using C's == operation.
2384 The distinction is important for IEEE floating point, because
2385 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2386 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2388 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2389 even though it may hold multiple values during a function.
2390 This is because a GCC tree node guarantees that nothing else is
2391 executed between the evaluation of its "operands" (which may often
2392 be evaluated in arbitrary order). Hence if the operands themselves
2393 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2394 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2395 unset means assuming isochronic (or instantaneous) tree equivalence.
2396 Unless comparing arbitrary expression trees, such as from different
2397 statements, this flag can usually be left unset.
2399 If OEP_PURE_SAME is set, then pure functions with identical arguments
2400 are considered the same. It is used when the caller has other ways
2401 to ensure that global memory is unchanged in between. */
2404 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2406 /* If either is ERROR_MARK, they aren't equal. */
2407 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2408 return 0;
2410 /* If both types don't have the same signedness, then we can't consider
2411 them equal. We must check this before the STRIP_NOPS calls
2412 because they may change the signedness of the arguments. */
2413 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2414 return 0;
2416 STRIP_NOPS (arg0);
2417 STRIP_NOPS (arg1);
2419 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2420 /* This is needed for conversions and for COMPONENT_REF.
2421 Might as well play it safe and always test this. */
2422 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2423 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2424 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2425 return 0;
2427 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2428 We don't care about side effects in that case because the SAVE_EXPR
2429 takes care of that for us. In all other cases, two expressions are
2430 equal if they have no side effects. If we have two identical
2431 expressions with side effects that should be treated the same due
2432 to the only side effects being identical SAVE_EXPR's, that will
2433 be detected in the recursive calls below. */
2434 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2435 && (TREE_CODE (arg0) == SAVE_EXPR
2436 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2437 return 1;
2439 /* Next handle constant cases, those for which we can return 1 even
2440 if ONLY_CONST is set. */
2441 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2442 switch (TREE_CODE (arg0))
2444 case INTEGER_CST:
2445 return (! TREE_CONSTANT_OVERFLOW (arg0)
2446 && ! TREE_CONSTANT_OVERFLOW (arg1)
2447 && tree_int_cst_equal (arg0, arg1));
2449 case REAL_CST:
2450 return (! TREE_CONSTANT_OVERFLOW (arg0)
2451 && ! TREE_CONSTANT_OVERFLOW (arg1)
2452 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2453 TREE_REAL_CST (arg1)));
2455 case VECTOR_CST:
2457 tree v1, v2;
2459 if (TREE_CONSTANT_OVERFLOW (arg0)
2460 || TREE_CONSTANT_OVERFLOW (arg1))
2461 return 0;
2463 v1 = TREE_VECTOR_CST_ELTS (arg0);
2464 v2 = TREE_VECTOR_CST_ELTS (arg1);
2465 while (v1 && v2)
2467 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2468 flags))
2469 return 0;
2470 v1 = TREE_CHAIN (v1);
2471 v2 = TREE_CHAIN (v2);
2474 return v1 == v2;
2477 case COMPLEX_CST:
2478 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2479 flags)
2480 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2481 flags));
2483 case STRING_CST:
2484 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2485 && ! memcmp (TREE_STRING_POINTER (arg0),
2486 TREE_STRING_POINTER (arg1),
2487 TREE_STRING_LENGTH (arg0)));
2489 case ADDR_EXPR:
2490 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2492 default:
2493 break;
2496 if (flags & OEP_ONLY_CONST)
2497 return 0;
2499 /* Define macros to test an operand from arg0 and arg1 for equality and a
2500 variant that allows null and views null as being different from any
2501 non-null value. In the latter case, if either is null, the both
2502 must be; otherwise, do the normal comparison. */
2503 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2504 TREE_OPERAND (arg1, N), flags)
2506 #define OP_SAME_WITH_NULL(N) \
2507 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2508 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2510 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2512 case tcc_unary:
2513 /* Two conversions are equal only if signedness and modes match. */
2514 switch (TREE_CODE (arg0))
2516 case NOP_EXPR:
2517 case CONVERT_EXPR:
2518 case FIX_CEIL_EXPR:
2519 case FIX_TRUNC_EXPR:
2520 case FIX_FLOOR_EXPR:
2521 case FIX_ROUND_EXPR:
2522 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2523 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2524 return 0;
2525 break;
2526 default:
2527 break;
2530 return OP_SAME (0);
2533 case tcc_comparison:
2534 case tcc_binary:
2535 if (OP_SAME (0) && OP_SAME (1))
2536 return 1;
2538 /* For commutative ops, allow the other order. */
2539 return (commutative_tree_code (TREE_CODE (arg0))
2540 && operand_equal_p (TREE_OPERAND (arg0, 0),
2541 TREE_OPERAND (arg1, 1), flags)
2542 && operand_equal_p (TREE_OPERAND (arg0, 1),
2543 TREE_OPERAND (arg1, 0), flags));
2545 case tcc_reference:
2546 /* If either of the pointer (or reference) expressions we are
2547 dereferencing contain a side effect, these cannot be equal. */
2548 if (TREE_SIDE_EFFECTS (arg0)
2549 || TREE_SIDE_EFFECTS (arg1))
2550 return 0;
2552 switch (TREE_CODE (arg0))
2554 case INDIRECT_REF:
2555 case ALIGN_INDIRECT_REF:
2556 case MISALIGNED_INDIRECT_REF:
2557 case REALPART_EXPR:
2558 case IMAGPART_EXPR:
2559 return OP_SAME (0);
2561 case ARRAY_REF:
2562 case ARRAY_RANGE_REF:
2563 /* Operands 2 and 3 may be null. */
2564 return (OP_SAME (0)
2565 && OP_SAME (1)
2566 && OP_SAME_WITH_NULL (2)
2567 && OP_SAME_WITH_NULL (3));
2569 case COMPONENT_REF:
2570 /* Handle operand 2 the same as for ARRAY_REF. */
2571 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2573 case BIT_FIELD_REF:
2574 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2576 default:
2577 return 0;
2580 case tcc_expression:
2581 switch (TREE_CODE (arg0))
2583 case ADDR_EXPR:
2584 case TRUTH_NOT_EXPR:
2585 return OP_SAME (0);
2587 case TRUTH_ANDIF_EXPR:
2588 case TRUTH_ORIF_EXPR:
2589 return OP_SAME (0) && OP_SAME (1);
2591 case TRUTH_AND_EXPR:
2592 case TRUTH_OR_EXPR:
2593 case TRUTH_XOR_EXPR:
2594 if (OP_SAME (0) && OP_SAME (1))
2595 return 1;
2597 /* Otherwise take into account this is a commutative operation. */
2598 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2599 TREE_OPERAND (arg1, 1), flags)
2600 && operand_equal_p (TREE_OPERAND (arg0, 1),
2601 TREE_OPERAND (arg1, 0), flags));
2603 case CALL_EXPR:
2604 /* If the CALL_EXPRs call different functions, then they
2605 clearly can not be equal. */
2606 if (!OP_SAME (0))
2607 return 0;
2610 unsigned int cef = call_expr_flags (arg0);
2611 if (flags & OEP_PURE_SAME)
2612 cef &= ECF_CONST | ECF_PURE;
2613 else
2614 cef &= ECF_CONST;
2615 if (!cef)
2616 return 0;
2619 /* Now see if all the arguments are the same. operand_equal_p
2620 does not handle TREE_LIST, so we walk the operands here
2621 feeding them to operand_equal_p. */
2622 arg0 = TREE_OPERAND (arg0, 1);
2623 arg1 = TREE_OPERAND (arg1, 1);
2624 while (arg0 && arg1)
2626 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2627 flags))
2628 return 0;
2630 arg0 = TREE_CHAIN (arg0);
2631 arg1 = TREE_CHAIN (arg1);
2634 /* If we get here and both argument lists are exhausted
2635 then the CALL_EXPRs are equal. */
2636 return ! (arg0 || arg1);
2638 default:
2639 return 0;
2642 case tcc_declaration:
2643 /* Consider __builtin_sqrt equal to sqrt. */
2644 return (TREE_CODE (arg0) == FUNCTION_DECL
2645 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2646 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2647 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2649 default:
2650 return 0;
2653 #undef OP_SAME
2654 #undef OP_SAME_WITH_NULL
2657 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2658 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2660 When in doubt, return 0. */
2662 static int
2663 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2665 int unsignedp1, unsignedpo;
2666 tree primarg0, primarg1, primother;
2667 unsigned int correct_width;
2669 if (operand_equal_p (arg0, arg1, 0))
2670 return 1;
2672 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2673 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2674 return 0;
2676 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2677 and see if the inner values are the same. This removes any
2678 signedness comparison, which doesn't matter here. */
2679 primarg0 = arg0, primarg1 = arg1;
2680 STRIP_NOPS (primarg0);
2681 STRIP_NOPS (primarg1);
2682 if (operand_equal_p (primarg0, primarg1, 0))
2683 return 1;
2685 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2686 actual comparison operand, ARG0.
2688 First throw away any conversions to wider types
2689 already present in the operands. */
2691 primarg1 = get_narrower (arg1, &unsignedp1);
2692 primother = get_narrower (other, &unsignedpo);
2694 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2695 if (unsignedp1 == unsignedpo
2696 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2697 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2699 tree type = TREE_TYPE (arg0);
2701 /* Make sure shorter operand is extended the right way
2702 to match the longer operand. */
2703 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2704 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2706 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2707 return 1;
2710 return 0;
2713 /* See if ARG is an expression that is either a comparison or is performing
2714 arithmetic on comparisons. The comparisons must only be comparing
2715 two different values, which will be stored in *CVAL1 and *CVAL2; if
2716 they are nonzero it means that some operands have already been found.
2717 No variables may be used anywhere else in the expression except in the
2718 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2719 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2721 If this is true, return 1. Otherwise, return zero. */
2723 static int
2724 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2726 enum tree_code code = TREE_CODE (arg);
2727 enum tree_code_class class = TREE_CODE_CLASS (code);
2729 /* We can handle some of the tcc_expression cases here. */
2730 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2731 class = tcc_unary;
2732 else if (class == tcc_expression
2733 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2734 || code == COMPOUND_EXPR))
2735 class = tcc_binary;
2737 else if (class == tcc_expression && code == SAVE_EXPR
2738 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2740 /* If we've already found a CVAL1 or CVAL2, this expression is
2741 two complex to handle. */
2742 if (*cval1 || *cval2)
2743 return 0;
2745 class = tcc_unary;
2746 *save_p = 1;
2749 switch (class)
2751 case tcc_unary:
2752 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2754 case tcc_binary:
2755 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2756 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2757 cval1, cval2, save_p));
2759 case tcc_constant:
2760 return 1;
2762 case tcc_expression:
2763 if (code == COND_EXPR)
2764 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2765 cval1, cval2, save_p)
2766 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2767 cval1, cval2, save_p)
2768 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2769 cval1, cval2, save_p));
2770 return 0;
2772 case tcc_comparison:
2773 /* First see if we can handle the first operand, then the second. For
2774 the second operand, we know *CVAL1 can't be zero. It must be that
2775 one side of the comparison is each of the values; test for the
2776 case where this isn't true by failing if the two operands
2777 are the same. */
2779 if (operand_equal_p (TREE_OPERAND (arg, 0),
2780 TREE_OPERAND (arg, 1), 0))
2781 return 0;
2783 if (*cval1 == 0)
2784 *cval1 = TREE_OPERAND (arg, 0);
2785 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2787 else if (*cval2 == 0)
2788 *cval2 = TREE_OPERAND (arg, 0);
2789 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2791 else
2792 return 0;
2794 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2796 else if (*cval2 == 0)
2797 *cval2 = TREE_OPERAND (arg, 1);
2798 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2800 else
2801 return 0;
2803 return 1;
2805 default:
2806 return 0;
2810 /* ARG is a tree that is known to contain just arithmetic operations and
2811 comparisons. Evaluate the operations in the tree substituting NEW0 for
2812 any occurrence of OLD0 as an operand of a comparison and likewise for
2813 NEW1 and OLD1. */
2815 static tree
2816 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2818 tree type = TREE_TYPE (arg);
2819 enum tree_code code = TREE_CODE (arg);
2820 enum tree_code_class class = TREE_CODE_CLASS (code);
2822 /* We can handle some of the tcc_expression cases here. */
2823 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2824 class = tcc_unary;
2825 else if (class == tcc_expression
2826 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2827 class = tcc_binary;
2829 switch (class)
2831 case tcc_unary:
2832 return fold_build1 (code, type,
2833 eval_subst (TREE_OPERAND (arg, 0),
2834 old0, new0, old1, new1));
2836 case tcc_binary:
2837 return fold_build2 (code, type,
2838 eval_subst (TREE_OPERAND (arg, 0),
2839 old0, new0, old1, new1),
2840 eval_subst (TREE_OPERAND (arg, 1),
2841 old0, new0, old1, new1));
2843 case tcc_expression:
2844 switch (code)
2846 case SAVE_EXPR:
2847 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2849 case COMPOUND_EXPR:
2850 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2852 case COND_EXPR:
2853 return fold_build3 (code, type,
2854 eval_subst (TREE_OPERAND (arg, 0),
2855 old0, new0, old1, new1),
2856 eval_subst (TREE_OPERAND (arg, 1),
2857 old0, new0, old1, new1),
2858 eval_subst (TREE_OPERAND (arg, 2),
2859 old0, new0, old1, new1));
2860 default:
2861 break;
2863 /* Fall through - ??? */
2865 case tcc_comparison:
2867 tree arg0 = TREE_OPERAND (arg, 0);
2868 tree arg1 = TREE_OPERAND (arg, 1);
2870 /* We need to check both for exact equality and tree equality. The
2871 former will be true if the operand has a side-effect. In that
2872 case, we know the operand occurred exactly once. */
2874 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2875 arg0 = new0;
2876 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2877 arg0 = new1;
2879 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2880 arg1 = new0;
2881 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2882 arg1 = new1;
2884 return fold_build2 (code, type, arg0, arg1);
2887 default:
2888 return arg;
2892 /* Return a tree for the case when the result of an expression is RESULT
2893 converted to TYPE and OMITTED was previously an operand of the expression
2894 but is now not needed (e.g., we folded OMITTED * 0).
2896 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2897 the conversion of RESULT to TYPE. */
2899 tree
2900 omit_one_operand (tree type, tree result, tree omitted)
2902 tree t = fold_convert (type, result);
2904 if (TREE_SIDE_EFFECTS (omitted))
2905 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2907 return non_lvalue (t);
2910 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2912 static tree
2913 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2915 tree t = fold_convert (type, result);
2917 if (TREE_SIDE_EFFECTS (omitted))
2918 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2920 return pedantic_non_lvalue (t);
2923 /* Return a tree for the case when the result of an expression is RESULT
2924 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2925 of the expression but are now not needed.
2927 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2928 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2929 evaluated before OMITTED2. Otherwise, if neither has side effects,
2930 just do the conversion of RESULT to TYPE. */
2932 tree
2933 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2935 tree t = fold_convert (type, result);
2937 if (TREE_SIDE_EFFECTS (omitted2))
2938 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2939 if (TREE_SIDE_EFFECTS (omitted1))
2940 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2942 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2946 /* Return a simplified tree node for the truth-negation of ARG. This
2947 never alters ARG itself. We assume that ARG is an operation that
2948 returns a truth value (0 or 1).
2950 FIXME: one would think we would fold the result, but it causes
2951 problems with the dominator optimizer. */
2952 tree
2953 invert_truthvalue (tree arg)
2955 tree type = TREE_TYPE (arg);
2956 enum tree_code code = TREE_CODE (arg);
2958 if (code == ERROR_MARK)
2959 return arg;
2961 /* If this is a comparison, we can simply invert it, except for
2962 floating-point non-equality comparisons, in which case we just
2963 enclose a TRUTH_NOT_EXPR around what we have. */
2965 if (TREE_CODE_CLASS (code) == tcc_comparison)
2967 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2968 if (FLOAT_TYPE_P (op_type)
2969 && flag_trapping_math
2970 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2971 && code != NE_EXPR && code != EQ_EXPR)
2972 return build1 (TRUTH_NOT_EXPR, type, arg);
2973 else
2975 code = invert_tree_comparison (code,
2976 HONOR_NANS (TYPE_MODE (op_type)));
2977 if (code == ERROR_MARK)
2978 return build1 (TRUTH_NOT_EXPR, type, arg);
2979 else
2980 return build2 (code, type,
2981 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2985 switch (code)
2987 case INTEGER_CST:
2988 return constant_boolean_node (integer_zerop (arg), type);
2990 case TRUTH_AND_EXPR:
2991 return build2 (TRUTH_OR_EXPR, type,
2992 invert_truthvalue (TREE_OPERAND (arg, 0)),
2993 invert_truthvalue (TREE_OPERAND (arg, 1)));
2995 case TRUTH_OR_EXPR:
2996 return build2 (TRUTH_AND_EXPR, type,
2997 invert_truthvalue (TREE_OPERAND (arg, 0)),
2998 invert_truthvalue (TREE_OPERAND (arg, 1)));
3000 case TRUTH_XOR_EXPR:
3001 /* Here we can invert either operand. We invert the first operand
3002 unless the second operand is a TRUTH_NOT_EXPR in which case our
3003 result is the XOR of the first operand with the inside of the
3004 negation of the second operand. */
3006 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3007 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3008 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3009 else
3010 return build2 (TRUTH_XOR_EXPR, type,
3011 invert_truthvalue (TREE_OPERAND (arg, 0)),
3012 TREE_OPERAND (arg, 1));
3014 case TRUTH_ANDIF_EXPR:
3015 return build2 (TRUTH_ORIF_EXPR, type,
3016 invert_truthvalue (TREE_OPERAND (arg, 0)),
3017 invert_truthvalue (TREE_OPERAND (arg, 1)));
3019 case TRUTH_ORIF_EXPR:
3020 return build2 (TRUTH_ANDIF_EXPR, type,
3021 invert_truthvalue (TREE_OPERAND (arg, 0)),
3022 invert_truthvalue (TREE_OPERAND (arg, 1)));
3024 case TRUTH_NOT_EXPR:
3025 return TREE_OPERAND (arg, 0);
3027 case COND_EXPR:
3029 tree arg1 = TREE_OPERAND (arg, 1);
3030 tree arg2 = TREE_OPERAND (arg, 2);
3031 /* A COND_EXPR may have a throw as one operand, which
3032 then has void type. Just leave void operands
3033 as they are. */
3034 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3035 VOID_TYPE_P (TREE_TYPE (arg1))
3036 ? arg1 : invert_truthvalue (arg1),
3037 VOID_TYPE_P (TREE_TYPE (arg2))
3038 ? arg2 : invert_truthvalue (arg2));
3041 case COMPOUND_EXPR:
3042 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3043 invert_truthvalue (TREE_OPERAND (arg, 1)));
3045 case NON_LVALUE_EXPR:
3046 return invert_truthvalue (TREE_OPERAND (arg, 0));
3048 case NOP_EXPR:
3049 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3050 break;
3052 case CONVERT_EXPR:
3053 case FLOAT_EXPR:
3054 return build1 (TREE_CODE (arg), type,
3055 invert_truthvalue (TREE_OPERAND (arg, 0)));
3057 case BIT_AND_EXPR:
3058 if (!integer_onep (TREE_OPERAND (arg, 1)))
3059 break;
3060 return build2 (EQ_EXPR, type, arg,
3061 fold_convert (type, integer_zero_node));
3063 case SAVE_EXPR:
3064 return build1 (TRUTH_NOT_EXPR, type, arg);
3066 case CLEANUP_POINT_EXPR:
3067 return build1 (CLEANUP_POINT_EXPR, type,
3068 invert_truthvalue (TREE_OPERAND (arg, 0)));
3070 default:
3071 break;
3073 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3074 return build1 (TRUTH_NOT_EXPR, type, arg);
3077 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3078 operands are another bit-wise operation with a common input. If so,
3079 distribute the bit operations to save an operation and possibly two if
3080 constants are involved. For example, convert
3081 (A | B) & (A | C) into A | (B & C)
3082 Further simplification will occur if B and C are constants.
3084 If this optimization cannot be done, 0 will be returned. */
3086 static tree
3087 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3089 tree common;
3090 tree left, right;
3092 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3093 || TREE_CODE (arg0) == code
3094 || (TREE_CODE (arg0) != BIT_AND_EXPR
3095 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3096 return 0;
3098 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3100 common = TREE_OPERAND (arg0, 0);
3101 left = TREE_OPERAND (arg0, 1);
3102 right = TREE_OPERAND (arg1, 1);
3104 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3106 common = TREE_OPERAND (arg0, 0);
3107 left = TREE_OPERAND (arg0, 1);
3108 right = TREE_OPERAND (arg1, 0);
3110 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3112 common = TREE_OPERAND (arg0, 1);
3113 left = TREE_OPERAND (arg0, 0);
3114 right = TREE_OPERAND (arg1, 1);
3116 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3118 common = TREE_OPERAND (arg0, 1);
3119 left = TREE_OPERAND (arg0, 0);
3120 right = TREE_OPERAND (arg1, 0);
3122 else
3123 return 0;
3125 return fold_build2 (TREE_CODE (arg0), type, common,
3126 fold_build2 (code, type, left, right));
3129 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3130 with code CODE. This optimization is unsafe. */
3131 static tree
3132 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3134 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3135 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3137 /* (A / C) +- (B / C) -> (A +- B) / C. */
3138 if (mul0 == mul1
3139 && operand_equal_p (TREE_OPERAND (arg0, 1),
3140 TREE_OPERAND (arg1, 1), 0))
3141 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3142 fold_build2 (code, type,
3143 TREE_OPERAND (arg0, 0),
3144 TREE_OPERAND (arg1, 0)),
3145 TREE_OPERAND (arg0, 1));
3147 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3148 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3149 TREE_OPERAND (arg1, 0), 0)
3150 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3151 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3153 REAL_VALUE_TYPE r0, r1;
3154 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3155 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3156 if (!mul0)
3157 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3158 if (!mul1)
3159 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3160 real_arithmetic (&r0, code, &r0, &r1);
3161 return fold_build2 (MULT_EXPR, type,
3162 TREE_OPERAND (arg0, 0),
3163 build_real (type, r0));
3166 return NULL_TREE;
3169 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3170 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3172 static tree
3173 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3174 int unsignedp)
3176 tree result;
3178 if (bitpos == 0)
3180 tree size = TYPE_SIZE (TREE_TYPE (inner));
3181 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3182 || POINTER_TYPE_P (TREE_TYPE (inner)))
3183 && host_integerp (size, 0)
3184 && tree_low_cst (size, 0) == bitsize)
3185 return fold_convert (type, inner);
3188 result = build3 (BIT_FIELD_REF, type, inner,
3189 size_int (bitsize), bitsize_int (bitpos));
3191 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3193 return result;
3196 /* Optimize a bit-field compare.
3198 There are two cases: First is a compare against a constant and the
3199 second is a comparison of two items where the fields are at the same
3200 bit position relative to the start of a chunk (byte, halfword, word)
3201 large enough to contain it. In these cases we can avoid the shift
3202 implicit in bitfield extractions.
3204 For constants, we emit a compare of the shifted constant with the
3205 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3206 compared. For two fields at the same position, we do the ANDs with the
3207 similar mask and compare the result of the ANDs.
3209 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3210 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3211 are the left and right operands of the comparison, respectively.
3213 If the optimization described above can be done, we return the resulting
3214 tree. Otherwise we return zero. */
3216 static tree
3217 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3218 tree lhs, tree rhs)
3220 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3221 tree type = TREE_TYPE (lhs);
3222 tree signed_type, unsigned_type;
3223 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3224 enum machine_mode lmode, rmode, nmode;
3225 int lunsignedp, runsignedp;
3226 int lvolatilep = 0, rvolatilep = 0;
3227 tree linner, rinner = NULL_TREE;
3228 tree mask;
3229 tree offset;
3231 /* Get all the information about the extractions being done. If the bit size
3232 if the same as the size of the underlying object, we aren't doing an
3233 extraction at all and so can do nothing. We also don't want to
3234 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3235 then will no longer be able to replace it. */
3236 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3237 &lunsignedp, &lvolatilep, false);
3238 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3239 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3240 return 0;
3242 if (!const_p)
3244 /* If this is not a constant, we can only do something if bit positions,
3245 sizes, and signedness are the same. */
3246 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3247 &runsignedp, &rvolatilep, false);
3249 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3250 || lunsignedp != runsignedp || offset != 0
3251 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3252 return 0;
3255 /* See if we can find a mode to refer to this field. We should be able to,
3256 but fail if we can't. */
3257 nmode = get_best_mode (lbitsize, lbitpos,
3258 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3259 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3260 TYPE_ALIGN (TREE_TYPE (rinner))),
3261 word_mode, lvolatilep || rvolatilep);
3262 if (nmode == VOIDmode)
3263 return 0;
3265 /* Set signed and unsigned types of the precision of this mode for the
3266 shifts below. */
3267 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3268 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3270 /* Compute the bit position and size for the new reference and our offset
3271 within it. If the new reference is the same size as the original, we
3272 won't optimize anything, so return zero. */
3273 nbitsize = GET_MODE_BITSIZE (nmode);
3274 nbitpos = lbitpos & ~ (nbitsize - 1);
3275 lbitpos -= nbitpos;
3276 if (nbitsize == lbitsize)
3277 return 0;
3279 if (BYTES_BIG_ENDIAN)
3280 lbitpos = nbitsize - lbitsize - lbitpos;
3282 /* Make the mask to be used against the extracted field. */
3283 mask = build_int_cst (unsigned_type, -1);
3284 mask = force_fit_type (mask, 0, false, false);
3285 mask = fold_convert (unsigned_type, mask);
3286 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3287 mask = const_binop (RSHIFT_EXPR, mask,
3288 size_int (nbitsize - lbitsize - lbitpos), 0);
3290 if (! const_p)
3291 /* If not comparing with constant, just rework the comparison
3292 and return. */
3293 return build2 (code, compare_type,
3294 build2 (BIT_AND_EXPR, unsigned_type,
3295 make_bit_field_ref (linner, unsigned_type,
3296 nbitsize, nbitpos, 1),
3297 mask),
3298 build2 (BIT_AND_EXPR, unsigned_type,
3299 make_bit_field_ref (rinner, unsigned_type,
3300 nbitsize, nbitpos, 1),
3301 mask));
3303 /* Otherwise, we are handling the constant case. See if the constant is too
3304 big for the field. Warn and return a tree of for 0 (false) if so. We do
3305 this not only for its own sake, but to avoid having to test for this
3306 error case below. If we didn't, we might generate wrong code.
3308 For unsigned fields, the constant shifted right by the field length should
3309 be all zero. For signed fields, the high-order bits should agree with
3310 the sign bit. */
3312 if (lunsignedp)
3314 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3315 fold_convert (unsigned_type, rhs),
3316 size_int (lbitsize), 0)))
3318 warning (0, "comparison is always %d due to width of bit-field",
3319 code == NE_EXPR);
3320 return constant_boolean_node (code == NE_EXPR, compare_type);
3323 else
3325 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3326 size_int (lbitsize - 1), 0);
3327 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3329 warning (0, "comparison is always %d due to width of bit-field",
3330 code == NE_EXPR);
3331 return constant_boolean_node (code == NE_EXPR, compare_type);
3335 /* Single-bit compares should always be against zero. */
3336 if (lbitsize == 1 && ! integer_zerop (rhs))
3338 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3339 rhs = fold_convert (type, integer_zero_node);
3342 /* Make a new bitfield reference, shift the constant over the
3343 appropriate number of bits and mask it with the computed mask
3344 (in case this was a signed field). If we changed it, make a new one. */
3345 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3346 if (lvolatilep)
3348 TREE_SIDE_EFFECTS (lhs) = 1;
3349 TREE_THIS_VOLATILE (lhs) = 1;
3352 rhs = const_binop (BIT_AND_EXPR,
3353 const_binop (LSHIFT_EXPR,
3354 fold_convert (unsigned_type, rhs),
3355 size_int (lbitpos), 0),
3356 mask, 0);
3358 return build2 (code, compare_type,
3359 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3360 rhs);
3363 /* Subroutine for fold_truthop: decode a field reference.
3365 If EXP is a comparison reference, we return the innermost reference.
3367 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3368 set to the starting bit number.
3370 If the innermost field can be completely contained in a mode-sized
3371 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3373 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3374 otherwise it is not changed.
3376 *PUNSIGNEDP is set to the signedness of the field.
3378 *PMASK is set to the mask used. This is either contained in a
3379 BIT_AND_EXPR or derived from the width of the field.
3381 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3383 Return 0 if this is not a component reference or is one that we can't
3384 do anything with. */
3386 static tree
3387 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3388 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3389 int *punsignedp, int *pvolatilep,
3390 tree *pmask, tree *pand_mask)
3392 tree outer_type = 0;
3393 tree and_mask = 0;
3394 tree mask, inner, offset;
3395 tree unsigned_type;
3396 unsigned int precision;
3398 /* All the optimizations using this function assume integer fields.
3399 There are problems with FP fields since the type_for_size call
3400 below can fail for, e.g., XFmode. */
3401 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3402 return 0;
3404 /* We are interested in the bare arrangement of bits, so strip everything
3405 that doesn't affect the machine mode. However, record the type of the
3406 outermost expression if it may matter below. */
3407 if (TREE_CODE (exp) == NOP_EXPR
3408 || TREE_CODE (exp) == CONVERT_EXPR
3409 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3410 outer_type = TREE_TYPE (exp);
3411 STRIP_NOPS (exp);
3413 if (TREE_CODE (exp) == BIT_AND_EXPR)
3415 and_mask = TREE_OPERAND (exp, 1);
3416 exp = TREE_OPERAND (exp, 0);
3417 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3418 if (TREE_CODE (and_mask) != INTEGER_CST)
3419 return 0;
3422 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3423 punsignedp, pvolatilep, false);
3424 if ((inner == exp && and_mask == 0)
3425 || *pbitsize < 0 || offset != 0
3426 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3427 return 0;
3429 /* If the number of bits in the reference is the same as the bitsize of
3430 the outer type, then the outer type gives the signedness. Otherwise
3431 (in case of a small bitfield) the signedness is unchanged. */
3432 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3433 *punsignedp = TYPE_UNSIGNED (outer_type);
3435 /* Compute the mask to access the bitfield. */
3436 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3437 precision = TYPE_PRECISION (unsigned_type);
3439 mask = build_int_cst (unsigned_type, -1);
3440 mask = force_fit_type (mask, 0, false, false);
3442 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3443 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3445 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3446 if (and_mask != 0)
3447 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3448 fold_convert (unsigned_type, and_mask), mask);
3450 *pmask = mask;
3451 *pand_mask = and_mask;
3452 return inner;
3455 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3456 bit positions. */
3458 static int
3459 all_ones_mask_p (tree mask, int size)
3461 tree type = TREE_TYPE (mask);
3462 unsigned int precision = TYPE_PRECISION (type);
3463 tree tmask;
3465 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3466 tmask = force_fit_type (tmask, 0, false, false);
3468 return
3469 tree_int_cst_equal (mask,
3470 const_binop (RSHIFT_EXPR,
3471 const_binop (LSHIFT_EXPR, tmask,
3472 size_int (precision - size),
3474 size_int (precision - size), 0));
3477 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3478 represents the sign bit of EXP's type. If EXP represents a sign
3479 or zero extension, also test VAL against the unextended type.
3480 The return value is the (sub)expression whose sign bit is VAL,
3481 or NULL_TREE otherwise. */
3483 static tree
3484 sign_bit_p (tree exp, tree val)
3486 unsigned HOST_WIDE_INT mask_lo, lo;
3487 HOST_WIDE_INT mask_hi, hi;
3488 int width;
3489 tree t;
3491 /* Tree EXP must have an integral type. */
3492 t = TREE_TYPE (exp);
3493 if (! INTEGRAL_TYPE_P (t))
3494 return NULL_TREE;
3496 /* Tree VAL must be an integer constant. */
3497 if (TREE_CODE (val) != INTEGER_CST
3498 || TREE_CONSTANT_OVERFLOW (val))
3499 return NULL_TREE;
3501 width = TYPE_PRECISION (t);
3502 if (width > HOST_BITS_PER_WIDE_INT)
3504 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3505 lo = 0;
3507 mask_hi = ((unsigned HOST_WIDE_INT) -1
3508 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3509 mask_lo = -1;
3511 else
3513 hi = 0;
3514 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3516 mask_hi = 0;
3517 mask_lo = ((unsigned HOST_WIDE_INT) -1
3518 >> (HOST_BITS_PER_WIDE_INT - width));
3521 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3522 treat VAL as if it were unsigned. */
3523 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3524 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3525 return exp;
3527 /* Handle extension from a narrower type. */
3528 if (TREE_CODE (exp) == NOP_EXPR
3529 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3530 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3532 return NULL_TREE;
3535 /* Subroutine for fold_truthop: determine if an operand is simple enough
3536 to be evaluated unconditionally. */
3538 static int
3539 simple_operand_p (tree exp)
3541 /* Strip any conversions that don't change the machine mode. */
3542 STRIP_NOPS (exp);
3544 return (CONSTANT_CLASS_P (exp)
3545 || TREE_CODE (exp) == SSA_NAME
3546 || (DECL_P (exp)
3547 && ! TREE_ADDRESSABLE (exp)
3548 && ! TREE_THIS_VOLATILE (exp)
3549 && ! DECL_NONLOCAL (exp)
3550 /* Don't regard global variables as simple. They may be
3551 allocated in ways unknown to the compiler (shared memory,
3552 #pragma weak, etc). */
3553 && ! TREE_PUBLIC (exp)
3554 && ! DECL_EXTERNAL (exp)
3555 /* Loading a static variable is unduly expensive, but global
3556 registers aren't expensive. */
3557 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3560 /* The following functions are subroutines to fold_range_test and allow it to
3561 try to change a logical combination of comparisons into a range test.
3563 For example, both
3564 X == 2 || X == 3 || X == 4 || X == 5
3566 X >= 2 && X <= 5
3567 are converted to
3568 (unsigned) (X - 2) <= 3
3570 We describe each set of comparisons as being either inside or outside
3571 a range, using a variable named like IN_P, and then describe the
3572 range with a lower and upper bound. If one of the bounds is omitted,
3573 it represents either the highest or lowest value of the type.
3575 In the comments below, we represent a range by two numbers in brackets
3576 preceded by a "+" to designate being inside that range, or a "-" to
3577 designate being outside that range, so the condition can be inverted by
3578 flipping the prefix. An omitted bound is represented by a "-". For
3579 example, "- [-, 10]" means being outside the range starting at the lowest
3580 possible value and ending at 10, in other words, being greater than 10.
3581 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3582 always false.
3584 We set up things so that the missing bounds are handled in a consistent
3585 manner so neither a missing bound nor "true" and "false" need to be
3586 handled using a special case. */
3588 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3589 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3590 and UPPER1_P are nonzero if the respective argument is an upper bound
3591 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3592 must be specified for a comparison. ARG1 will be converted to ARG0's
3593 type if both are specified. */
3595 static tree
3596 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3597 tree arg1, int upper1_p)
3599 tree tem;
3600 int result;
3601 int sgn0, sgn1;
3603 /* If neither arg represents infinity, do the normal operation.
3604 Else, if not a comparison, return infinity. Else handle the special
3605 comparison rules. Note that most of the cases below won't occur, but
3606 are handled for consistency. */
3608 if (arg0 != 0 && arg1 != 0)
3610 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3611 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3612 STRIP_NOPS (tem);
3613 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3616 if (TREE_CODE_CLASS (code) != tcc_comparison)
3617 return 0;
3619 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3620 for neither. In real maths, we cannot assume open ended ranges are
3621 the same. But, this is computer arithmetic, where numbers are finite.
3622 We can therefore make the transformation of any unbounded range with
3623 the value Z, Z being greater than any representable number. This permits
3624 us to treat unbounded ranges as equal. */
3625 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3626 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3627 switch (code)
3629 case EQ_EXPR:
3630 result = sgn0 == sgn1;
3631 break;
3632 case NE_EXPR:
3633 result = sgn0 != sgn1;
3634 break;
3635 case LT_EXPR:
3636 result = sgn0 < sgn1;
3637 break;
3638 case LE_EXPR:
3639 result = sgn0 <= sgn1;
3640 break;
3641 case GT_EXPR:
3642 result = sgn0 > sgn1;
3643 break;
3644 case GE_EXPR:
3645 result = sgn0 >= sgn1;
3646 break;
3647 default:
3648 gcc_unreachable ();
3651 return constant_boolean_node (result, type);
3654 /* Given EXP, a logical expression, set the range it is testing into
3655 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3656 actually being tested. *PLOW and *PHIGH will be made of the same type
3657 as the returned expression. If EXP is not a comparison, we will most
3658 likely not be returning a useful value and range. */
3660 static tree
3661 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3663 enum tree_code code;
3664 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3665 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3666 int in_p, n_in_p;
3667 tree low, high, n_low, n_high;
3669 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3670 and see if we can refine the range. Some of the cases below may not
3671 happen, but it doesn't seem worth worrying about this. We "continue"
3672 the outer loop when we've changed something; otherwise we "break"
3673 the switch, which will "break" the while. */
3675 in_p = 0;
3676 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3678 while (1)
3680 code = TREE_CODE (exp);
3681 exp_type = TREE_TYPE (exp);
3683 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3685 if (TREE_CODE_LENGTH (code) > 0)
3686 arg0 = TREE_OPERAND (exp, 0);
3687 if (TREE_CODE_CLASS (code) == tcc_comparison
3688 || TREE_CODE_CLASS (code) == tcc_unary
3689 || TREE_CODE_CLASS (code) == tcc_binary)
3690 arg0_type = TREE_TYPE (arg0);
3691 if (TREE_CODE_CLASS (code) == tcc_binary
3692 || TREE_CODE_CLASS (code) == tcc_comparison
3693 || (TREE_CODE_CLASS (code) == tcc_expression
3694 && TREE_CODE_LENGTH (code) > 1))
3695 arg1 = TREE_OPERAND (exp, 1);
3698 switch (code)
3700 case TRUTH_NOT_EXPR:
3701 in_p = ! in_p, exp = arg0;
3702 continue;
3704 case EQ_EXPR: case NE_EXPR:
3705 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3706 /* We can only do something if the range is testing for zero
3707 and if the second operand is an integer constant. Note that
3708 saying something is "in" the range we make is done by
3709 complementing IN_P since it will set in the initial case of
3710 being not equal to zero; "out" is leaving it alone. */
3711 if (low == 0 || high == 0
3712 || ! integer_zerop (low) || ! integer_zerop (high)
3713 || TREE_CODE (arg1) != INTEGER_CST)
3714 break;
3716 switch (code)
3718 case NE_EXPR: /* - [c, c] */
3719 low = high = arg1;
3720 break;
3721 case EQ_EXPR: /* + [c, c] */
3722 in_p = ! in_p, low = high = arg1;
3723 break;
3724 case GT_EXPR: /* - [-, c] */
3725 low = 0, high = arg1;
3726 break;
3727 case GE_EXPR: /* + [c, -] */
3728 in_p = ! in_p, low = arg1, high = 0;
3729 break;
3730 case LT_EXPR: /* - [c, -] */
3731 low = arg1, high = 0;
3732 break;
3733 case LE_EXPR: /* + [-, c] */
3734 in_p = ! in_p, low = 0, high = arg1;
3735 break;
3736 default:
3737 gcc_unreachable ();
3740 /* If this is an unsigned comparison, we also know that EXP is
3741 greater than or equal to zero. We base the range tests we make
3742 on that fact, so we record it here so we can parse existing
3743 range tests. We test arg0_type since often the return type
3744 of, e.g. EQ_EXPR, is boolean. */
3745 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3747 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3748 in_p, low, high, 1,
3749 fold_convert (arg0_type, integer_zero_node),
3750 NULL_TREE))
3751 break;
3753 in_p = n_in_p, low = n_low, high = n_high;
3755 /* If the high bound is missing, but we have a nonzero low
3756 bound, reverse the range so it goes from zero to the low bound
3757 minus 1. */
3758 if (high == 0 && low && ! integer_zerop (low))
3760 in_p = ! in_p;
3761 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3762 integer_one_node, 0);
3763 low = fold_convert (arg0_type, integer_zero_node);
3767 exp = arg0;
3768 continue;
3770 case NEGATE_EXPR:
3771 /* (-x) IN [a,b] -> x in [-b, -a] */
3772 n_low = range_binop (MINUS_EXPR, exp_type,
3773 fold_convert (exp_type, integer_zero_node),
3774 0, high, 1);
3775 n_high = range_binop (MINUS_EXPR, exp_type,
3776 fold_convert (exp_type, integer_zero_node),
3777 0, low, 0);
3778 low = n_low, high = n_high;
3779 exp = arg0;
3780 continue;
3782 case BIT_NOT_EXPR:
3783 /* ~ X -> -X - 1 */
3784 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3785 fold_convert (exp_type, integer_one_node));
3786 continue;
3788 case PLUS_EXPR: case MINUS_EXPR:
3789 if (TREE_CODE (arg1) != INTEGER_CST)
3790 break;
3792 /* If EXP is signed, any overflow in the computation is undefined,
3793 so we don't worry about it so long as our computations on
3794 the bounds don't overflow. For unsigned, overflow is defined
3795 and this is exactly the right thing. */
3796 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3797 arg0_type, low, 0, arg1, 0);
3798 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3799 arg0_type, high, 1, arg1, 0);
3800 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3801 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3802 break;
3804 /* Check for an unsigned range which has wrapped around the maximum
3805 value thus making n_high < n_low, and normalize it. */
3806 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3808 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3809 integer_one_node, 0);
3810 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3811 integer_one_node, 0);
3813 /* If the range is of the form +/- [ x+1, x ], we won't
3814 be able to normalize it. But then, it represents the
3815 whole range or the empty set, so make it
3816 +/- [ -, - ]. */
3817 if (tree_int_cst_equal (n_low, low)
3818 && tree_int_cst_equal (n_high, high))
3819 low = high = 0;
3820 else
3821 in_p = ! in_p;
3823 else
3824 low = n_low, high = n_high;
3826 exp = arg0;
3827 continue;
3829 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3830 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3831 break;
3833 if (! INTEGRAL_TYPE_P (arg0_type)
3834 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3835 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3836 break;
3838 n_low = low, n_high = high;
3840 if (n_low != 0)
3841 n_low = fold_convert (arg0_type, n_low);
3843 if (n_high != 0)
3844 n_high = fold_convert (arg0_type, n_high);
3847 /* If we're converting arg0 from an unsigned type, to exp,
3848 a signed type, we will be doing the comparison as unsigned.
3849 The tests above have already verified that LOW and HIGH
3850 are both positive.
3852 So we have to ensure that we will handle large unsigned
3853 values the same way that the current signed bounds treat
3854 negative values. */
3856 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3858 tree high_positive;
3859 tree equiv_type = lang_hooks.types.type_for_mode
3860 (TYPE_MODE (arg0_type), 1);
3862 /* A range without an upper bound is, naturally, unbounded.
3863 Since convert would have cropped a very large value, use
3864 the max value for the destination type. */
3865 high_positive
3866 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3867 : TYPE_MAX_VALUE (arg0_type);
3869 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3870 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3871 fold_convert (arg0_type,
3872 high_positive),
3873 fold_convert (arg0_type,
3874 integer_one_node));
3876 /* If the low bound is specified, "and" the range with the
3877 range for which the original unsigned value will be
3878 positive. */
3879 if (low != 0)
3881 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3882 1, n_low, n_high, 1,
3883 fold_convert (arg0_type,
3884 integer_zero_node),
3885 high_positive))
3886 break;
3888 in_p = (n_in_p == in_p);
3890 else
3892 /* Otherwise, "or" the range with the range of the input
3893 that will be interpreted as negative. */
3894 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3895 0, n_low, n_high, 1,
3896 fold_convert (arg0_type,
3897 integer_zero_node),
3898 high_positive))
3899 break;
3901 in_p = (in_p != n_in_p);
3905 exp = arg0;
3906 low = n_low, high = n_high;
3907 continue;
3909 default:
3910 break;
3913 break;
3916 /* If EXP is a constant, we can evaluate whether this is true or false. */
3917 if (TREE_CODE (exp) == INTEGER_CST)
3919 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3920 exp, 0, low, 0))
3921 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3922 exp, 1, high, 1)));
3923 low = high = 0;
3924 exp = 0;
3927 *pin_p = in_p, *plow = low, *phigh = high;
3928 return exp;
3931 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3932 type, TYPE, return an expression to test if EXP is in (or out of, depending
3933 on IN_P) the range. Return 0 if the test couldn't be created. */
3935 static tree
3936 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3938 tree etype = TREE_TYPE (exp);
3939 tree value;
3941 #ifdef HAVE_canonicalize_funcptr_for_compare
3942 /* Disable this optimization for function pointer expressions
3943 on targets that require function pointer canonicalization. */
3944 if (HAVE_canonicalize_funcptr_for_compare
3945 && TREE_CODE (etype) == POINTER_TYPE
3946 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
3947 return NULL_TREE;
3948 #endif
3950 if (! in_p)
3952 value = build_range_check (type, exp, 1, low, high);
3953 if (value != 0)
3954 return invert_truthvalue (value);
3956 return 0;
3959 if (low == 0 && high == 0)
3960 return fold_convert (type, integer_one_node);
3962 if (low == 0)
3963 return fold_build2 (LE_EXPR, type, exp,
3964 fold_convert (etype, high));
3966 if (high == 0)
3967 return fold_build2 (GE_EXPR, type, exp,
3968 fold_convert (etype, low));
3970 if (operand_equal_p (low, high, 0))
3971 return fold_build2 (EQ_EXPR, type, exp,
3972 fold_convert (etype, low));
3974 if (integer_zerop (low))
3976 if (! TYPE_UNSIGNED (etype))
3978 etype = lang_hooks.types.unsigned_type (etype);
3979 high = fold_convert (etype, high);
3980 exp = fold_convert (etype, exp);
3982 return build_range_check (type, exp, 1, 0, high);
3985 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3986 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3988 unsigned HOST_WIDE_INT lo;
3989 HOST_WIDE_INT hi;
3990 int prec;
3992 prec = TYPE_PRECISION (etype);
3993 if (prec <= HOST_BITS_PER_WIDE_INT)
3995 hi = 0;
3996 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3998 else
4000 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4001 lo = (unsigned HOST_WIDE_INT) -1;
4004 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4006 if (TYPE_UNSIGNED (etype))
4008 etype = lang_hooks.types.signed_type (etype);
4009 exp = fold_convert (etype, exp);
4011 return fold_build2 (GT_EXPR, type, exp,
4012 fold_convert (etype, integer_zero_node));
4016 value = const_binop (MINUS_EXPR, high, low, 0);
4017 if (value != 0 && (!flag_wrapv || TREE_OVERFLOW (value))
4018 && ! TYPE_UNSIGNED (etype))
4020 tree utype, minv, maxv;
4022 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4023 for the type in question, as we rely on this here. */
4024 switch (TREE_CODE (etype))
4026 case INTEGER_TYPE:
4027 case ENUMERAL_TYPE:
4028 case CHAR_TYPE:
4029 /* There is no requirement that LOW be within the range of ETYPE
4030 if the latter is a subtype. It must, however, be within the base
4031 type of ETYPE. So be sure we do the subtraction in that type. */
4032 if (TREE_TYPE (etype))
4033 etype = TREE_TYPE (etype);
4034 utype = lang_hooks.types.unsigned_type (etype);
4035 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4036 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4037 integer_one_node, 1);
4038 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4039 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4040 minv, 1, maxv, 1)))
4042 etype = utype;
4043 high = fold_convert (etype, high);
4044 low = fold_convert (etype, low);
4045 exp = fold_convert (etype, exp);
4046 value = const_binop (MINUS_EXPR, high, low, 0);
4048 break;
4049 default:
4050 break;
4054 if (value != 0 && ! TREE_OVERFLOW (value))
4055 return build_range_check (type,
4056 fold_build2 (MINUS_EXPR, etype, exp, low),
4057 1, fold_convert (etype, integer_zero_node),
4058 value);
4060 return 0;
4063 /* Given two ranges, see if we can merge them into one. Return 1 if we
4064 can, 0 if we can't. Set the output range into the specified parameters. */
4066 static int
4067 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4068 tree high0, int in1_p, tree low1, tree high1)
4070 int no_overlap;
4071 int subset;
4072 int temp;
4073 tree tem;
4074 int in_p;
4075 tree low, high;
4076 int lowequal = ((low0 == 0 && low1 == 0)
4077 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4078 low0, 0, low1, 0)));
4079 int highequal = ((high0 == 0 && high1 == 0)
4080 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4081 high0, 1, high1, 1)));
4083 /* Make range 0 be the range that starts first, or ends last if they
4084 start at the same value. Swap them if it isn't. */
4085 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4086 low0, 0, low1, 0))
4087 || (lowequal
4088 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4089 high1, 1, high0, 1))))
4091 temp = in0_p, in0_p = in1_p, in1_p = temp;
4092 tem = low0, low0 = low1, low1 = tem;
4093 tem = high0, high0 = high1, high1 = tem;
4096 /* Now flag two cases, whether the ranges are disjoint or whether the
4097 second range is totally subsumed in the first. Note that the tests
4098 below are simplified by the ones above. */
4099 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4100 high0, 1, low1, 0));
4101 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4102 high1, 1, high0, 1));
4104 /* We now have four cases, depending on whether we are including or
4105 excluding the two ranges. */
4106 if (in0_p && in1_p)
4108 /* If they don't overlap, the result is false. If the second range
4109 is a subset it is the result. Otherwise, the range is from the start
4110 of the second to the end of the first. */
4111 if (no_overlap)
4112 in_p = 0, low = high = 0;
4113 else if (subset)
4114 in_p = 1, low = low1, high = high1;
4115 else
4116 in_p = 1, low = low1, high = high0;
4119 else if (in0_p && ! in1_p)
4121 /* If they don't overlap, the result is the first range. If they are
4122 equal, the result is false. If the second range is a subset of the
4123 first, and the ranges begin at the same place, we go from just after
4124 the end of the first range to the end of the second. If the second
4125 range is not a subset of the first, or if it is a subset and both
4126 ranges end at the same place, the range starts at the start of the
4127 first range and ends just before the second range.
4128 Otherwise, we can't describe this as a single range. */
4129 if (no_overlap)
4130 in_p = 1, low = low0, high = high0;
4131 else if (lowequal && highequal)
4132 in_p = 0, low = high = 0;
4133 else if (subset && lowequal)
4135 in_p = 1, high = high0;
4136 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4137 integer_one_node, 0);
4139 else if (! subset || highequal)
4141 in_p = 1, low = low0;
4142 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4143 integer_one_node, 0);
4145 else
4146 return 0;
4149 else if (! in0_p && in1_p)
4151 /* If they don't overlap, the result is the second range. If the second
4152 is a subset of the first, the result is false. Otherwise,
4153 the range starts just after the first range and ends at the
4154 end of the second. */
4155 if (no_overlap)
4156 in_p = 1, low = low1, high = high1;
4157 else if (subset || highequal)
4158 in_p = 0, low = high = 0;
4159 else
4161 in_p = 1, high = high1;
4162 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4163 integer_one_node, 0);
4167 else
4169 /* The case where we are excluding both ranges. Here the complex case
4170 is if they don't overlap. In that case, the only time we have a
4171 range is if they are adjacent. If the second is a subset of the
4172 first, the result is the first. Otherwise, the range to exclude
4173 starts at the beginning of the first range and ends at the end of the
4174 second. */
4175 if (no_overlap)
4177 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4178 range_binop (PLUS_EXPR, NULL_TREE,
4179 high0, 1,
4180 integer_one_node, 1),
4181 1, low1, 0)))
4182 in_p = 0, low = low0, high = high1;
4183 else
4185 /* Canonicalize - [min, x] into - [-, x]. */
4186 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4187 switch (TREE_CODE (TREE_TYPE (low0)))
4189 case ENUMERAL_TYPE:
4190 if (TYPE_PRECISION (TREE_TYPE (low0))
4191 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4192 break;
4193 /* FALLTHROUGH */
4194 case INTEGER_TYPE:
4195 case CHAR_TYPE:
4196 if (tree_int_cst_equal (low0,
4197 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4198 low0 = 0;
4199 break;
4200 case POINTER_TYPE:
4201 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4202 && integer_zerop (low0))
4203 low0 = 0;
4204 break;
4205 default:
4206 break;
4209 /* Canonicalize - [x, max] into - [x, -]. */
4210 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4211 switch (TREE_CODE (TREE_TYPE (high1)))
4213 case ENUMERAL_TYPE:
4214 if (TYPE_PRECISION (TREE_TYPE (high1))
4215 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4216 break;
4217 /* FALLTHROUGH */
4218 case INTEGER_TYPE:
4219 case CHAR_TYPE:
4220 if (tree_int_cst_equal (high1,
4221 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4222 high1 = 0;
4223 break;
4224 case POINTER_TYPE:
4225 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4226 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4227 high1, 1,
4228 integer_one_node, 1)))
4229 high1 = 0;
4230 break;
4231 default:
4232 break;
4235 /* The ranges might be also adjacent between the maximum and
4236 minimum values of the given type. For
4237 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4238 return + [x + 1, y - 1]. */
4239 if (low0 == 0 && high1 == 0)
4241 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4242 integer_one_node, 1);
4243 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4244 integer_one_node, 0);
4245 if (low == 0 || high == 0)
4246 return 0;
4248 in_p = 1;
4250 else
4251 return 0;
4254 else if (subset)
4255 in_p = 0, low = low0, high = high0;
4256 else
4257 in_p = 0, low = low0, high = high1;
4260 *pin_p = in_p, *plow = low, *phigh = high;
4261 return 1;
4265 /* Subroutine of fold, looking inside expressions of the form
4266 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4267 of the COND_EXPR. This function is being used also to optimize
4268 A op B ? C : A, by reversing the comparison first.
4270 Return a folded expression whose code is not a COND_EXPR
4271 anymore, or NULL_TREE if no folding opportunity is found. */
4273 static tree
4274 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4276 enum tree_code comp_code = TREE_CODE (arg0);
4277 tree arg00 = TREE_OPERAND (arg0, 0);
4278 tree arg01 = TREE_OPERAND (arg0, 1);
4279 tree arg1_type = TREE_TYPE (arg1);
4280 tree tem;
4282 STRIP_NOPS (arg1);
4283 STRIP_NOPS (arg2);
4285 /* If we have A op 0 ? A : -A, consider applying the following
4286 transformations:
4288 A == 0? A : -A same as -A
4289 A != 0? A : -A same as A
4290 A >= 0? A : -A same as abs (A)
4291 A > 0? A : -A same as abs (A)
4292 A <= 0? A : -A same as -abs (A)
4293 A < 0? A : -A same as -abs (A)
4295 None of these transformations work for modes with signed
4296 zeros. If A is +/-0, the first two transformations will
4297 change the sign of the result (from +0 to -0, or vice
4298 versa). The last four will fix the sign of the result,
4299 even though the original expressions could be positive or
4300 negative, depending on the sign of A.
4302 Note that all these transformations are correct if A is
4303 NaN, since the two alternatives (A and -A) are also NaNs. */
4304 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4305 ? real_zerop (arg01)
4306 : integer_zerop (arg01))
4307 && ((TREE_CODE (arg2) == NEGATE_EXPR
4308 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4309 /* In the case that A is of the form X-Y, '-A' (arg2) may
4310 have already been folded to Y-X, check for that. */
4311 || (TREE_CODE (arg1) == MINUS_EXPR
4312 && TREE_CODE (arg2) == MINUS_EXPR
4313 && operand_equal_p (TREE_OPERAND (arg1, 0),
4314 TREE_OPERAND (arg2, 1), 0)
4315 && operand_equal_p (TREE_OPERAND (arg1, 1),
4316 TREE_OPERAND (arg2, 0), 0))))
4317 switch (comp_code)
4319 case EQ_EXPR:
4320 case UNEQ_EXPR:
4321 tem = fold_convert (arg1_type, arg1);
4322 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4323 case NE_EXPR:
4324 case LTGT_EXPR:
4325 return pedantic_non_lvalue (fold_convert (type, arg1));
4326 case UNGE_EXPR:
4327 case UNGT_EXPR:
4328 if (flag_trapping_math)
4329 break;
4330 /* Fall through. */
4331 case GE_EXPR:
4332 case GT_EXPR:
4333 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4334 arg1 = fold_convert (lang_hooks.types.signed_type
4335 (TREE_TYPE (arg1)), arg1);
4336 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4337 return pedantic_non_lvalue (fold_convert (type, tem));
4338 case UNLE_EXPR:
4339 case UNLT_EXPR:
4340 if (flag_trapping_math)
4341 break;
4342 case LE_EXPR:
4343 case LT_EXPR:
4344 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4345 arg1 = fold_convert (lang_hooks.types.signed_type
4346 (TREE_TYPE (arg1)), arg1);
4347 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4348 return negate_expr (fold_convert (type, tem));
4349 default:
4350 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4351 break;
4354 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4355 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4356 both transformations are correct when A is NaN: A != 0
4357 is then true, and A == 0 is false. */
4359 if (integer_zerop (arg01) && integer_zerop (arg2))
4361 if (comp_code == NE_EXPR)
4362 return pedantic_non_lvalue (fold_convert (type, arg1));
4363 else if (comp_code == EQ_EXPR)
4364 return fold_convert (type, integer_zero_node);
4367 /* Try some transformations of A op B ? A : B.
4369 A == B? A : B same as B
4370 A != B? A : B same as A
4371 A >= B? A : B same as max (A, B)
4372 A > B? A : B same as max (B, A)
4373 A <= B? A : B same as min (A, B)
4374 A < B? A : B same as min (B, A)
4376 As above, these transformations don't work in the presence
4377 of signed zeros. For example, if A and B are zeros of
4378 opposite sign, the first two transformations will change
4379 the sign of the result. In the last four, the original
4380 expressions give different results for (A=+0, B=-0) and
4381 (A=-0, B=+0), but the transformed expressions do not.
4383 The first two transformations are correct if either A or B
4384 is a NaN. In the first transformation, the condition will
4385 be false, and B will indeed be chosen. In the case of the
4386 second transformation, the condition A != B will be true,
4387 and A will be chosen.
4389 The conversions to max() and min() are not correct if B is
4390 a number and A is not. The conditions in the original
4391 expressions will be false, so all four give B. The min()
4392 and max() versions would give a NaN instead. */
4393 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4394 /* Avoid these transformations if the COND_EXPR may be used
4395 as an lvalue in the C++ front-end. PR c++/19199. */
4396 && (in_gimple_form
4397 || strcmp (lang_hooks.name, "GNU C++") != 0
4398 || ! maybe_lvalue_p (arg1)
4399 || ! maybe_lvalue_p (arg2)))
4401 tree comp_op0 = arg00;
4402 tree comp_op1 = arg01;
4403 tree comp_type = TREE_TYPE (comp_op0);
4405 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4406 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4408 comp_type = type;
4409 comp_op0 = arg1;
4410 comp_op1 = arg2;
4413 switch (comp_code)
4415 case EQ_EXPR:
4416 return pedantic_non_lvalue (fold_convert (type, arg2));
4417 case NE_EXPR:
4418 return pedantic_non_lvalue (fold_convert (type, arg1));
4419 case LE_EXPR:
4420 case LT_EXPR:
4421 case UNLE_EXPR:
4422 case UNLT_EXPR:
4423 /* In C++ a ?: expression can be an lvalue, so put the
4424 operand which will be used if they are equal first
4425 so that we can convert this back to the
4426 corresponding COND_EXPR. */
4427 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4429 comp_op0 = fold_convert (comp_type, comp_op0);
4430 comp_op1 = fold_convert (comp_type, comp_op1);
4431 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4432 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4433 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4434 return pedantic_non_lvalue (fold_convert (type, tem));
4436 break;
4437 case GE_EXPR:
4438 case GT_EXPR:
4439 case UNGE_EXPR:
4440 case UNGT_EXPR:
4441 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4443 comp_op0 = fold_convert (comp_type, comp_op0);
4444 comp_op1 = fold_convert (comp_type, comp_op1);
4445 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4446 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4447 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4448 return pedantic_non_lvalue (fold_convert (type, tem));
4450 break;
4451 case UNEQ_EXPR:
4452 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4453 return pedantic_non_lvalue (fold_convert (type, arg2));
4454 break;
4455 case LTGT_EXPR:
4456 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4457 return pedantic_non_lvalue (fold_convert (type, arg1));
4458 break;
4459 default:
4460 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4461 break;
4465 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4466 we might still be able to simplify this. For example,
4467 if C1 is one less or one more than C2, this might have started
4468 out as a MIN or MAX and been transformed by this function.
4469 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4471 if (INTEGRAL_TYPE_P (type)
4472 && TREE_CODE (arg01) == INTEGER_CST
4473 && TREE_CODE (arg2) == INTEGER_CST)
4474 switch (comp_code)
4476 case EQ_EXPR:
4477 /* We can replace A with C1 in this case. */
4478 arg1 = fold_convert (type, arg01);
4479 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4481 case LT_EXPR:
4482 /* If C1 is C2 + 1, this is min(A, C2). */
4483 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4484 OEP_ONLY_CONST)
4485 && operand_equal_p (arg01,
4486 const_binop (PLUS_EXPR, arg2,
4487 integer_one_node, 0),
4488 OEP_ONLY_CONST))
4489 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4490 type, arg1, arg2));
4491 break;
4493 case LE_EXPR:
4494 /* If C1 is C2 - 1, this is min(A, C2). */
4495 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4496 OEP_ONLY_CONST)
4497 && operand_equal_p (arg01,
4498 const_binop (MINUS_EXPR, arg2,
4499 integer_one_node, 0),
4500 OEP_ONLY_CONST))
4501 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4502 type, arg1, arg2));
4503 break;
4505 case GT_EXPR:
4506 /* If C1 is C2 - 1, this is max(A, C2). */
4507 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4508 OEP_ONLY_CONST)
4509 && operand_equal_p (arg01,
4510 const_binop (MINUS_EXPR, arg2,
4511 integer_one_node, 0),
4512 OEP_ONLY_CONST))
4513 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4514 type, arg1, arg2));
4515 break;
4517 case GE_EXPR:
4518 /* If C1 is C2 + 1, this is max(A, C2). */
4519 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4520 OEP_ONLY_CONST)
4521 && operand_equal_p (arg01,
4522 const_binop (PLUS_EXPR, arg2,
4523 integer_one_node, 0),
4524 OEP_ONLY_CONST))
4525 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4526 type, arg1, arg2));
4527 break;
4528 case NE_EXPR:
4529 break;
4530 default:
4531 gcc_unreachable ();
4534 return NULL_TREE;
4539 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4540 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4541 #endif
4543 /* EXP is some logical combination of boolean tests. See if we can
4544 merge it into some range test. Return the new tree if so. */
4546 static tree
4547 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4549 int or_op = (code == TRUTH_ORIF_EXPR
4550 || code == TRUTH_OR_EXPR);
4551 int in0_p, in1_p, in_p;
4552 tree low0, low1, low, high0, high1, high;
4553 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4554 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4555 tree tem;
4557 /* If this is an OR operation, invert both sides; we will invert
4558 again at the end. */
4559 if (or_op)
4560 in0_p = ! in0_p, in1_p = ! in1_p;
4562 /* If both expressions are the same, if we can merge the ranges, and we
4563 can build the range test, return it or it inverted. If one of the
4564 ranges is always true or always false, consider it to be the same
4565 expression as the other. */
4566 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4567 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4568 in1_p, low1, high1)
4569 && 0 != (tem = (build_range_check (type,
4570 lhs != 0 ? lhs
4571 : rhs != 0 ? rhs : integer_zero_node,
4572 in_p, low, high))))
4573 return or_op ? invert_truthvalue (tem) : tem;
4575 /* On machines where the branch cost is expensive, if this is a
4576 short-circuited branch and the underlying object on both sides
4577 is the same, make a non-short-circuit operation. */
4578 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4579 && lhs != 0 && rhs != 0
4580 && (code == TRUTH_ANDIF_EXPR
4581 || code == TRUTH_ORIF_EXPR)
4582 && operand_equal_p (lhs, rhs, 0))
4584 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4585 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4586 which cases we can't do this. */
4587 if (simple_operand_p (lhs))
4588 return build2 (code == TRUTH_ANDIF_EXPR
4589 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4590 type, op0, op1);
4592 else if (lang_hooks.decls.global_bindings_p () == 0
4593 && ! CONTAINS_PLACEHOLDER_P (lhs))
4595 tree common = save_expr (lhs);
4597 if (0 != (lhs = build_range_check (type, common,
4598 or_op ? ! in0_p : in0_p,
4599 low0, high0))
4600 && (0 != (rhs = build_range_check (type, common,
4601 or_op ? ! in1_p : in1_p,
4602 low1, high1))))
4603 return build2 (code == TRUTH_ANDIF_EXPR
4604 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4605 type, lhs, rhs);
4609 return 0;
4612 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4613 bit value. Arrange things so the extra bits will be set to zero if and
4614 only if C is signed-extended to its full width. If MASK is nonzero,
4615 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4617 static tree
4618 unextend (tree c, int p, int unsignedp, tree mask)
4620 tree type = TREE_TYPE (c);
4621 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4622 tree temp;
4624 if (p == modesize || unsignedp)
4625 return c;
4627 /* We work by getting just the sign bit into the low-order bit, then
4628 into the high-order bit, then sign-extend. We then XOR that value
4629 with C. */
4630 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4631 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4633 /* We must use a signed type in order to get an arithmetic right shift.
4634 However, we must also avoid introducing accidental overflows, so that
4635 a subsequent call to integer_zerop will work. Hence we must
4636 do the type conversion here. At this point, the constant is either
4637 zero or one, and the conversion to a signed type can never overflow.
4638 We could get an overflow if this conversion is done anywhere else. */
4639 if (TYPE_UNSIGNED (type))
4640 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4642 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4643 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4644 if (mask != 0)
4645 temp = const_binop (BIT_AND_EXPR, temp,
4646 fold_convert (TREE_TYPE (c), mask), 0);
4647 /* If necessary, convert the type back to match the type of C. */
4648 if (TYPE_UNSIGNED (type))
4649 temp = fold_convert (type, temp);
4651 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4654 /* Find ways of folding logical expressions of LHS and RHS:
4655 Try to merge two comparisons to the same innermost item.
4656 Look for range tests like "ch >= '0' && ch <= '9'".
4657 Look for combinations of simple terms on machines with expensive branches
4658 and evaluate the RHS unconditionally.
4660 For example, if we have p->a == 2 && p->b == 4 and we can make an
4661 object large enough to span both A and B, we can do this with a comparison
4662 against the object ANDed with the a mask.
4664 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4665 operations to do this with one comparison.
4667 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4668 function and the one above.
4670 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4671 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4673 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4674 two operands.
4676 We return the simplified tree or 0 if no optimization is possible. */
4678 static tree
4679 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4681 /* If this is the "or" of two comparisons, we can do something if
4682 the comparisons are NE_EXPR. If this is the "and", we can do something
4683 if the comparisons are EQ_EXPR. I.e.,
4684 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4686 WANTED_CODE is this operation code. For single bit fields, we can
4687 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4688 comparison for one-bit fields. */
4690 enum tree_code wanted_code;
4691 enum tree_code lcode, rcode;
4692 tree ll_arg, lr_arg, rl_arg, rr_arg;
4693 tree ll_inner, lr_inner, rl_inner, rr_inner;
4694 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4695 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4696 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4697 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4698 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4699 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4700 enum machine_mode lnmode, rnmode;
4701 tree ll_mask, lr_mask, rl_mask, rr_mask;
4702 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4703 tree l_const, r_const;
4704 tree lntype, rntype, result;
4705 int first_bit, end_bit;
4706 int volatilep;
4708 /* Start by getting the comparison codes. Fail if anything is volatile.
4709 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4710 it were surrounded with a NE_EXPR. */
4712 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4713 return 0;
4715 lcode = TREE_CODE (lhs);
4716 rcode = TREE_CODE (rhs);
4718 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4720 lhs = build2 (NE_EXPR, truth_type, lhs,
4721 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4722 lcode = NE_EXPR;
4725 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4727 rhs = build2 (NE_EXPR, truth_type, rhs,
4728 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4729 rcode = NE_EXPR;
4732 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4733 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4734 return 0;
4736 ll_arg = TREE_OPERAND (lhs, 0);
4737 lr_arg = TREE_OPERAND (lhs, 1);
4738 rl_arg = TREE_OPERAND (rhs, 0);
4739 rr_arg = TREE_OPERAND (rhs, 1);
4741 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4742 if (simple_operand_p (ll_arg)
4743 && simple_operand_p (lr_arg))
4745 tree result;
4746 if (operand_equal_p (ll_arg, rl_arg, 0)
4747 && operand_equal_p (lr_arg, rr_arg, 0))
4749 result = combine_comparisons (code, lcode, rcode,
4750 truth_type, ll_arg, lr_arg);
4751 if (result)
4752 return result;
4754 else if (operand_equal_p (ll_arg, rr_arg, 0)
4755 && operand_equal_p (lr_arg, rl_arg, 0))
4757 result = combine_comparisons (code, lcode,
4758 swap_tree_comparison (rcode),
4759 truth_type, ll_arg, lr_arg);
4760 if (result)
4761 return result;
4765 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4766 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4768 /* If the RHS can be evaluated unconditionally and its operands are
4769 simple, it wins to evaluate the RHS unconditionally on machines
4770 with expensive branches. In this case, this isn't a comparison
4771 that can be merged. Avoid doing this if the RHS is a floating-point
4772 comparison since those can trap. */
4774 if (BRANCH_COST >= 2
4775 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4776 && simple_operand_p (rl_arg)
4777 && simple_operand_p (rr_arg))
4779 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4780 if (code == TRUTH_OR_EXPR
4781 && lcode == NE_EXPR && integer_zerop (lr_arg)
4782 && rcode == NE_EXPR && integer_zerop (rr_arg)
4783 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4784 return build2 (NE_EXPR, truth_type,
4785 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4786 ll_arg, rl_arg),
4787 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4789 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4790 if (code == TRUTH_AND_EXPR
4791 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4792 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4793 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4794 return build2 (EQ_EXPR, truth_type,
4795 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4796 ll_arg, rl_arg),
4797 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4799 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4800 return build2 (code, truth_type, lhs, rhs);
4803 /* See if the comparisons can be merged. Then get all the parameters for
4804 each side. */
4806 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4807 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4808 return 0;
4810 volatilep = 0;
4811 ll_inner = decode_field_reference (ll_arg,
4812 &ll_bitsize, &ll_bitpos, &ll_mode,
4813 &ll_unsignedp, &volatilep, &ll_mask,
4814 &ll_and_mask);
4815 lr_inner = decode_field_reference (lr_arg,
4816 &lr_bitsize, &lr_bitpos, &lr_mode,
4817 &lr_unsignedp, &volatilep, &lr_mask,
4818 &lr_and_mask);
4819 rl_inner = decode_field_reference (rl_arg,
4820 &rl_bitsize, &rl_bitpos, &rl_mode,
4821 &rl_unsignedp, &volatilep, &rl_mask,
4822 &rl_and_mask);
4823 rr_inner = decode_field_reference (rr_arg,
4824 &rr_bitsize, &rr_bitpos, &rr_mode,
4825 &rr_unsignedp, &volatilep, &rr_mask,
4826 &rr_and_mask);
4828 /* It must be true that the inner operation on the lhs of each
4829 comparison must be the same if we are to be able to do anything.
4830 Then see if we have constants. If not, the same must be true for
4831 the rhs's. */
4832 if (volatilep || ll_inner == 0 || rl_inner == 0
4833 || ! operand_equal_p (ll_inner, rl_inner, 0))
4834 return 0;
4836 if (TREE_CODE (lr_arg) == INTEGER_CST
4837 && TREE_CODE (rr_arg) == INTEGER_CST)
4838 l_const = lr_arg, r_const = rr_arg;
4839 else if (lr_inner == 0 || rr_inner == 0
4840 || ! operand_equal_p (lr_inner, rr_inner, 0))
4841 return 0;
4842 else
4843 l_const = r_const = 0;
4845 /* If either comparison code is not correct for our logical operation,
4846 fail. However, we can convert a one-bit comparison against zero into
4847 the opposite comparison against that bit being set in the field. */
4849 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4850 if (lcode != wanted_code)
4852 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4854 /* Make the left operand unsigned, since we are only interested
4855 in the value of one bit. Otherwise we are doing the wrong
4856 thing below. */
4857 ll_unsignedp = 1;
4858 l_const = ll_mask;
4860 else
4861 return 0;
4864 /* This is analogous to the code for l_const above. */
4865 if (rcode != wanted_code)
4867 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4869 rl_unsignedp = 1;
4870 r_const = rl_mask;
4872 else
4873 return 0;
4876 /* After this point all optimizations will generate bit-field
4877 references, which we might not want. */
4878 if (! lang_hooks.can_use_bit_fields_p ())
4879 return 0;
4881 /* See if we can find a mode that contains both fields being compared on
4882 the left. If we can't, fail. Otherwise, update all constants and masks
4883 to be relative to a field of that size. */
4884 first_bit = MIN (ll_bitpos, rl_bitpos);
4885 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4886 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4887 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4888 volatilep);
4889 if (lnmode == VOIDmode)
4890 return 0;
4892 lnbitsize = GET_MODE_BITSIZE (lnmode);
4893 lnbitpos = first_bit & ~ (lnbitsize - 1);
4894 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4895 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4897 if (BYTES_BIG_ENDIAN)
4899 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4900 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4903 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4904 size_int (xll_bitpos), 0);
4905 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4906 size_int (xrl_bitpos), 0);
4908 if (l_const)
4910 l_const = fold_convert (lntype, l_const);
4911 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4912 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4913 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4914 fold_build1 (BIT_NOT_EXPR,
4915 lntype, ll_mask),
4916 0)))
4918 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4920 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4923 if (r_const)
4925 r_const = fold_convert (lntype, r_const);
4926 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4927 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4928 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4929 fold_build1 (BIT_NOT_EXPR,
4930 lntype, rl_mask),
4931 0)))
4933 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4935 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4939 /* If the right sides are not constant, do the same for it. Also,
4940 disallow this optimization if a size or signedness mismatch occurs
4941 between the left and right sides. */
4942 if (l_const == 0)
4944 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4945 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4946 /* Make sure the two fields on the right
4947 correspond to the left without being swapped. */
4948 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4949 return 0;
4951 first_bit = MIN (lr_bitpos, rr_bitpos);
4952 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4953 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4954 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4955 volatilep);
4956 if (rnmode == VOIDmode)
4957 return 0;
4959 rnbitsize = GET_MODE_BITSIZE (rnmode);
4960 rnbitpos = first_bit & ~ (rnbitsize - 1);
4961 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4962 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4964 if (BYTES_BIG_ENDIAN)
4966 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4967 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4970 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4971 size_int (xlr_bitpos), 0);
4972 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4973 size_int (xrr_bitpos), 0);
4975 /* Make a mask that corresponds to both fields being compared.
4976 Do this for both items being compared. If the operands are the
4977 same size and the bits being compared are in the same position
4978 then we can do this by masking both and comparing the masked
4979 results. */
4980 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4981 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4982 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4984 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4985 ll_unsignedp || rl_unsignedp);
4986 if (! all_ones_mask_p (ll_mask, lnbitsize))
4987 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4989 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4990 lr_unsignedp || rr_unsignedp);
4991 if (! all_ones_mask_p (lr_mask, rnbitsize))
4992 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4994 return build2 (wanted_code, truth_type, lhs, rhs);
4997 /* There is still another way we can do something: If both pairs of
4998 fields being compared are adjacent, we may be able to make a wider
4999 field containing them both.
5001 Note that we still must mask the lhs/rhs expressions. Furthermore,
5002 the mask must be shifted to account for the shift done by
5003 make_bit_field_ref. */
5004 if ((ll_bitsize + ll_bitpos == rl_bitpos
5005 && lr_bitsize + lr_bitpos == rr_bitpos)
5006 || (ll_bitpos == rl_bitpos + rl_bitsize
5007 && lr_bitpos == rr_bitpos + rr_bitsize))
5009 tree type;
5011 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5012 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5013 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5014 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5016 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5017 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5018 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5019 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5021 /* Convert to the smaller type before masking out unwanted bits. */
5022 type = lntype;
5023 if (lntype != rntype)
5025 if (lnbitsize > rnbitsize)
5027 lhs = fold_convert (rntype, lhs);
5028 ll_mask = fold_convert (rntype, ll_mask);
5029 type = rntype;
5031 else if (lnbitsize < rnbitsize)
5033 rhs = fold_convert (lntype, rhs);
5034 lr_mask = fold_convert (lntype, lr_mask);
5035 type = lntype;
5039 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5040 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5042 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5043 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5045 return build2 (wanted_code, truth_type, lhs, rhs);
5048 return 0;
5051 /* Handle the case of comparisons with constants. If there is something in
5052 common between the masks, those bits of the constants must be the same.
5053 If not, the condition is always false. Test for this to avoid generating
5054 incorrect code below. */
5055 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5056 if (! integer_zerop (result)
5057 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5058 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5060 if (wanted_code == NE_EXPR)
5062 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5063 return constant_boolean_node (true, truth_type);
5065 else
5067 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5068 return constant_boolean_node (false, truth_type);
5072 /* Construct the expression we will return. First get the component
5073 reference we will make. Unless the mask is all ones the width of
5074 that field, perform the mask operation. Then compare with the
5075 merged constant. */
5076 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5077 ll_unsignedp || rl_unsignedp);
5079 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5080 if (! all_ones_mask_p (ll_mask, lnbitsize))
5081 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5083 return build2 (wanted_code, truth_type, result,
5084 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5087 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5088 constant. */
5090 static tree
5091 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5093 tree arg0 = op0;
5094 enum tree_code op_code;
5095 tree comp_const = op1;
5096 tree minmax_const;
5097 int consts_equal, consts_lt;
5098 tree inner;
5100 STRIP_SIGN_NOPS (arg0);
5102 op_code = TREE_CODE (arg0);
5103 minmax_const = TREE_OPERAND (arg0, 1);
5104 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5105 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5106 inner = TREE_OPERAND (arg0, 0);
5108 /* If something does not permit us to optimize, return the original tree. */
5109 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5110 || TREE_CODE (comp_const) != INTEGER_CST
5111 || TREE_CONSTANT_OVERFLOW (comp_const)
5112 || TREE_CODE (minmax_const) != INTEGER_CST
5113 || TREE_CONSTANT_OVERFLOW (minmax_const))
5114 return NULL_TREE;
5116 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5117 and GT_EXPR, doing the rest with recursive calls using logical
5118 simplifications. */
5119 switch (code)
5121 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5123 /* FIXME: We should be able to invert code without building a
5124 scratch tree node, but doing so would require us to
5125 duplicate a part of invert_truthvalue here. */
5126 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5127 tem = optimize_minmax_comparison (TREE_CODE (tem),
5128 TREE_TYPE (tem),
5129 TREE_OPERAND (tem, 0),
5130 TREE_OPERAND (tem, 1));
5131 return invert_truthvalue (tem);
5134 case GE_EXPR:
5135 return
5136 fold_build2 (TRUTH_ORIF_EXPR, type,
5137 optimize_minmax_comparison
5138 (EQ_EXPR, type, arg0, comp_const),
5139 optimize_minmax_comparison
5140 (GT_EXPR, type, arg0, comp_const));
5142 case EQ_EXPR:
5143 if (op_code == MAX_EXPR && consts_equal)
5144 /* MAX (X, 0) == 0 -> X <= 0 */
5145 return fold_build2 (LE_EXPR, type, inner, comp_const);
5147 else if (op_code == MAX_EXPR && consts_lt)
5148 /* MAX (X, 0) == 5 -> X == 5 */
5149 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5151 else if (op_code == MAX_EXPR)
5152 /* MAX (X, 0) == -1 -> false */
5153 return omit_one_operand (type, integer_zero_node, inner);
5155 else if (consts_equal)
5156 /* MIN (X, 0) == 0 -> X >= 0 */
5157 return fold_build2 (GE_EXPR, type, inner, comp_const);
5159 else if (consts_lt)
5160 /* MIN (X, 0) == 5 -> false */
5161 return omit_one_operand (type, integer_zero_node, inner);
5163 else
5164 /* MIN (X, 0) == -1 -> X == -1 */
5165 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5167 case GT_EXPR:
5168 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5169 /* MAX (X, 0) > 0 -> X > 0
5170 MAX (X, 0) > 5 -> X > 5 */
5171 return fold_build2 (GT_EXPR, type, inner, comp_const);
5173 else if (op_code == MAX_EXPR)
5174 /* MAX (X, 0) > -1 -> true */
5175 return omit_one_operand (type, integer_one_node, inner);
5177 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5178 /* MIN (X, 0) > 0 -> false
5179 MIN (X, 0) > 5 -> false */
5180 return omit_one_operand (type, integer_zero_node, inner);
5182 else
5183 /* MIN (X, 0) > -1 -> X > -1 */
5184 return fold_build2 (GT_EXPR, type, inner, comp_const);
5186 default:
5187 return NULL_TREE;
5191 /* T is an integer expression that is being multiplied, divided, or taken a
5192 modulus (CODE says which and what kind of divide or modulus) by a
5193 constant C. See if we can eliminate that operation by folding it with
5194 other operations already in T. WIDE_TYPE, if non-null, is a type that
5195 should be used for the computation if wider than our type.
5197 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5198 (X * 2) + (Y * 4). We must, however, be assured that either the original
5199 expression would not overflow or that overflow is undefined for the type
5200 in the language in question.
5202 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5203 the machine has a multiply-accumulate insn or that this is part of an
5204 addressing calculation.
5206 If we return a non-null expression, it is an equivalent form of the
5207 original computation, but need not be in the original type. */
5209 static tree
5210 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5212 /* To avoid exponential search depth, refuse to allow recursion past
5213 three levels. Beyond that (1) it's highly unlikely that we'll find
5214 something interesting and (2) we've probably processed it before
5215 when we built the inner expression. */
5217 static int depth;
5218 tree ret;
5220 if (depth > 3)
5221 return NULL;
5223 depth++;
5224 ret = extract_muldiv_1 (t, c, code, wide_type);
5225 depth--;
5227 return ret;
5230 static tree
5231 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5233 tree type = TREE_TYPE (t);
5234 enum tree_code tcode = TREE_CODE (t);
5235 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5236 > GET_MODE_SIZE (TYPE_MODE (type)))
5237 ? wide_type : type);
5238 tree t1, t2;
5239 int same_p = tcode == code;
5240 tree op0 = NULL_TREE, op1 = NULL_TREE;
5242 /* Don't deal with constants of zero here; they confuse the code below. */
5243 if (integer_zerop (c))
5244 return NULL_TREE;
5246 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5247 op0 = TREE_OPERAND (t, 0);
5249 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5250 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5252 /* Note that we need not handle conditional operations here since fold
5253 already handles those cases. So just do arithmetic here. */
5254 switch (tcode)
5256 case INTEGER_CST:
5257 /* For a constant, we can always simplify if we are a multiply
5258 or (for divide and modulus) if it is a multiple of our constant. */
5259 if (code == MULT_EXPR
5260 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5261 return const_binop (code, fold_convert (ctype, t),
5262 fold_convert (ctype, c), 0);
5263 break;
5265 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5266 /* If op0 is an expression ... */
5267 if ((COMPARISON_CLASS_P (op0)
5268 || UNARY_CLASS_P (op0)
5269 || BINARY_CLASS_P (op0)
5270 || EXPRESSION_CLASS_P (op0))
5271 /* ... and is unsigned, and its type is smaller than ctype,
5272 then we cannot pass through as widening. */
5273 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5274 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5275 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5276 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5277 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5278 /* ... or this is a truncation (t is narrower than op0),
5279 then we cannot pass through this narrowing. */
5280 || (GET_MODE_SIZE (TYPE_MODE (type))
5281 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5282 /* ... or signedness changes for division or modulus,
5283 then we cannot pass through this conversion. */
5284 || (code != MULT_EXPR
5285 && (TYPE_UNSIGNED (ctype)
5286 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5287 break;
5289 /* Pass the constant down and see if we can make a simplification. If
5290 we can, replace this expression with the inner simplification for
5291 possible later conversion to our or some other type. */
5292 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5293 && TREE_CODE (t2) == INTEGER_CST
5294 && ! TREE_CONSTANT_OVERFLOW (t2)
5295 && (0 != (t1 = extract_muldiv (op0, t2, code,
5296 code == MULT_EXPR
5297 ? ctype : NULL_TREE))))
5298 return t1;
5299 break;
5301 case ABS_EXPR:
5302 /* If widening the type changes it from signed to unsigned, then we
5303 must avoid building ABS_EXPR itself as unsigned. */
5304 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5306 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5307 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5309 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5310 return fold_convert (ctype, t1);
5312 break;
5314 /* FALLTHROUGH */
5315 case NEGATE_EXPR:
5316 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5317 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5318 break;
5320 case MIN_EXPR: case MAX_EXPR:
5321 /* If widening the type changes the signedness, then we can't perform
5322 this optimization as that changes the result. */
5323 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5324 break;
5326 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5327 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5328 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5330 if (tree_int_cst_sgn (c) < 0)
5331 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5333 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5334 fold_convert (ctype, t2));
5336 break;
5338 case LSHIFT_EXPR: case RSHIFT_EXPR:
5339 /* If the second operand is constant, this is a multiplication
5340 or floor division, by a power of two, so we can treat it that
5341 way unless the multiplier or divisor overflows. Signed
5342 left-shift overflow is implementation-defined rather than
5343 undefined in C90, so do not convert signed left shift into
5344 multiplication. */
5345 if (TREE_CODE (op1) == INTEGER_CST
5346 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5347 /* const_binop may not detect overflow correctly,
5348 so check for it explicitly here. */
5349 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5350 && TREE_INT_CST_HIGH (op1) == 0
5351 && 0 != (t1 = fold_convert (ctype,
5352 const_binop (LSHIFT_EXPR,
5353 size_one_node,
5354 op1, 0)))
5355 && ! TREE_OVERFLOW (t1))
5356 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5357 ? MULT_EXPR : FLOOR_DIV_EXPR,
5358 ctype, fold_convert (ctype, op0), t1),
5359 c, code, wide_type);
5360 break;
5362 case PLUS_EXPR: case MINUS_EXPR:
5363 /* See if we can eliminate the operation on both sides. If we can, we
5364 can return a new PLUS or MINUS. If we can't, the only remaining
5365 cases where we can do anything are if the second operand is a
5366 constant. */
5367 t1 = extract_muldiv (op0, c, code, wide_type);
5368 t2 = extract_muldiv (op1, c, code, wide_type);
5369 if (t1 != 0 && t2 != 0
5370 && (code == MULT_EXPR
5371 /* If not multiplication, we can only do this if both operands
5372 are divisible by c. */
5373 || (multiple_of_p (ctype, op0, c)
5374 && multiple_of_p (ctype, op1, c))))
5375 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5376 fold_convert (ctype, t2));
5378 /* If this was a subtraction, negate OP1 and set it to be an addition.
5379 This simplifies the logic below. */
5380 if (tcode == MINUS_EXPR)
5381 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5383 if (TREE_CODE (op1) != INTEGER_CST)
5384 break;
5386 /* If either OP1 or C are negative, this optimization is not safe for
5387 some of the division and remainder types while for others we need
5388 to change the code. */
5389 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5391 if (code == CEIL_DIV_EXPR)
5392 code = FLOOR_DIV_EXPR;
5393 else if (code == FLOOR_DIV_EXPR)
5394 code = CEIL_DIV_EXPR;
5395 else if (code != MULT_EXPR
5396 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5397 break;
5400 /* If it's a multiply or a division/modulus operation of a multiple
5401 of our constant, do the operation and verify it doesn't overflow. */
5402 if (code == MULT_EXPR
5403 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5405 op1 = const_binop (code, fold_convert (ctype, op1),
5406 fold_convert (ctype, c), 0);
5407 /* We allow the constant to overflow with wrapping semantics. */
5408 if (op1 == 0
5409 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5410 break;
5412 else
5413 break;
5415 /* If we have an unsigned type is not a sizetype, we cannot widen
5416 the operation since it will change the result if the original
5417 computation overflowed. */
5418 if (TYPE_UNSIGNED (ctype)
5419 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5420 && ctype != type)
5421 break;
5423 /* If we were able to eliminate our operation from the first side,
5424 apply our operation to the second side and reform the PLUS. */
5425 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5426 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5428 /* The last case is if we are a multiply. In that case, we can
5429 apply the distributive law to commute the multiply and addition
5430 if the multiplication of the constants doesn't overflow. */
5431 if (code == MULT_EXPR)
5432 return fold_build2 (tcode, ctype,
5433 fold_build2 (code, ctype,
5434 fold_convert (ctype, op0),
5435 fold_convert (ctype, c)),
5436 op1);
5438 break;
5440 case MULT_EXPR:
5441 /* We have a special case here if we are doing something like
5442 (C * 8) % 4 since we know that's zero. */
5443 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5444 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5445 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5446 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5447 return omit_one_operand (type, integer_zero_node, op0);
5449 /* ... fall through ... */
5451 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5452 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5453 /* If we can extract our operation from the LHS, do so and return a
5454 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5455 do something only if the second operand is a constant. */
5456 if (same_p
5457 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5458 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5459 fold_convert (ctype, op1));
5460 else if (tcode == MULT_EXPR && code == MULT_EXPR
5461 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5462 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5463 fold_convert (ctype, t1));
5464 else if (TREE_CODE (op1) != INTEGER_CST)
5465 return 0;
5467 /* If these are the same operation types, we can associate them
5468 assuming no overflow. */
5469 if (tcode == code
5470 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5471 fold_convert (ctype, c), 0))
5472 && ! TREE_OVERFLOW (t1))
5473 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5475 /* If these operations "cancel" each other, we have the main
5476 optimizations of this pass, which occur when either constant is a
5477 multiple of the other, in which case we replace this with either an
5478 operation or CODE or TCODE.
5480 If we have an unsigned type that is not a sizetype, we cannot do
5481 this since it will change the result if the original computation
5482 overflowed. */
5483 if ((! TYPE_UNSIGNED (ctype)
5484 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5485 && ! flag_wrapv
5486 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5487 || (tcode == MULT_EXPR
5488 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5489 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5491 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5492 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5493 fold_convert (ctype,
5494 const_binop (TRUNC_DIV_EXPR,
5495 op1, c, 0)));
5496 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5497 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5498 fold_convert (ctype,
5499 const_binop (TRUNC_DIV_EXPR,
5500 c, op1, 0)));
5502 break;
5504 default:
5505 break;
5508 return 0;
5511 /* Return a node which has the indicated constant VALUE (either 0 or
5512 1), and is of the indicated TYPE. */
5514 tree
5515 constant_boolean_node (int value, tree type)
5517 if (type == integer_type_node)
5518 return value ? integer_one_node : integer_zero_node;
5519 else if (type == boolean_type_node)
5520 return value ? boolean_true_node : boolean_false_node;
5521 else
5522 return build_int_cst (type, value);
5526 /* Return true if expr looks like an ARRAY_REF and set base and
5527 offset to the appropriate trees. If there is no offset,
5528 offset is set to NULL_TREE. Base will be canonicalized to
5529 something you can get the element type from using
5530 TREE_TYPE (TREE_TYPE (base)). */
5532 static bool
5533 extract_array_ref (tree expr, tree *base, tree *offset)
5535 /* One canonical form is a PLUS_EXPR with the first
5536 argument being an ADDR_EXPR with a possible NOP_EXPR
5537 attached. */
5538 if (TREE_CODE (expr) == PLUS_EXPR)
5540 tree op0 = TREE_OPERAND (expr, 0);
5541 tree inner_base, dummy1;
5542 /* Strip NOP_EXPRs here because the C frontends and/or
5543 folders present us (int *)&x.a + 4B possibly. */
5544 STRIP_NOPS (op0);
5545 if (extract_array_ref (op0, &inner_base, &dummy1))
5547 *base = inner_base;
5548 if (dummy1 == NULL_TREE)
5549 *offset = TREE_OPERAND (expr, 1);
5550 else
5551 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5552 dummy1, TREE_OPERAND (expr, 1));
5553 return true;
5556 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5557 which we transform into an ADDR_EXPR with appropriate
5558 offset. For other arguments to the ADDR_EXPR we assume
5559 zero offset and as such do not care about the ADDR_EXPR
5560 type and strip possible nops from it. */
5561 else if (TREE_CODE (expr) == ADDR_EXPR)
5563 tree op0 = TREE_OPERAND (expr, 0);
5564 if (TREE_CODE (op0) == ARRAY_REF)
5566 *base = TREE_OPERAND (op0, 0);
5567 *offset = TREE_OPERAND (op0, 1);
5569 else
5571 /* Handle array-to-pointer decay as &a. */
5572 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5573 *base = TREE_OPERAND (expr, 0);
5574 else
5575 *base = expr;
5576 *offset = NULL_TREE;
5578 return true;
5580 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5581 else if (SSA_VAR_P (expr)
5582 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5584 *base = expr;
5585 *offset = NULL_TREE;
5586 return true;
5589 return false;
5593 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5594 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5595 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5596 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5597 COND is the first argument to CODE; otherwise (as in the example
5598 given here), it is the second argument. TYPE is the type of the
5599 original expression. Return NULL_TREE if no simplification is
5600 possible. */
5602 static tree
5603 fold_binary_op_with_conditional_arg (enum tree_code code,
5604 tree type, tree op0, tree op1,
5605 tree cond, tree arg, int cond_first_p)
5607 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5608 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5609 tree test, true_value, false_value;
5610 tree lhs = NULL_TREE;
5611 tree rhs = NULL_TREE;
5613 /* This transformation is only worthwhile if we don't have to wrap
5614 arg in a SAVE_EXPR, and the operation can be simplified on at least
5615 one of the branches once its pushed inside the COND_EXPR. */
5616 if (!TREE_CONSTANT (arg))
5617 return NULL_TREE;
5619 if (TREE_CODE (cond) == COND_EXPR)
5621 test = TREE_OPERAND (cond, 0);
5622 true_value = TREE_OPERAND (cond, 1);
5623 false_value = TREE_OPERAND (cond, 2);
5624 /* If this operand throws an expression, then it does not make
5625 sense to try to perform a logical or arithmetic operation
5626 involving it. */
5627 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5628 lhs = true_value;
5629 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5630 rhs = false_value;
5632 else
5634 tree testtype = TREE_TYPE (cond);
5635 test = cond;
5636 true_value = constant_boolean_node (true, testtype);
5637 false_value = constant_boolean_node (false, testtype);
5640 arg = fold_convert (arg_type, arg);
5641 if (lhs == 0)
5643 true_value = fold_convert (cond_type, true_value);
5644 if (cond_first_p)
5645 lhs = fold_build2 (code, type, true_value, arg);
5646 else
5647 lhs = fold_build2 (code, type, arg, true_value);
5649 if (rhs == 0)
5651 false_value = fold_convert (cond_type, false_value);
5652 if (cond_first_p)
5653 rhs = fold_build2 (code, type, false_value, arg);
5654 else
5655 rhs = fold_build2 (code, type, arg, false_value);
5658 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5659 return fold_convert (type, test);
5663 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5665 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5666 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5667 ADDEND is the same as X.
5669 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5670 and finite. The problematic cases are when X is zero, and its mode
5671 has signed zeros. In the case of rounding towards -infinity,
5672 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5673 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5675 static bool
5676 fold_real_zero_addition_p (tree type, tree addend, int negate)
5678 if (!real_zerop (addend))
5679 return false;
5681 /* Don't allow the fold with -fsignaling-nans. */
5682 if (HONOR_SNANS (TYPE_MODE (type)))
5683 return false;
5685 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5686 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5687 return true;
5689 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5690 if (TREE_CODE (addend) == REAL_CST
5691 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5692 negate = !negate;
5694 /* The mode has signed zeros, and we have to honor their sign.
5695 In this situation, there is only one case we can return true for.
5696 X - 0 is the same as X unless rounding towards -infinity is
5697 supported. */
5698 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5701 /* Subroutine of fold() that checks comparisons of built-in math
5702 functions against real constants.
5704 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5705 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5706 is the type of the result and ARG0 and ARG1 are the operands of the
5707 comparison. ARG1 must be a TREE_REAL_CST.
5709 The function returns the constant folded tree if a simplification
5710 can be made, and NULL_TREE otherwise. */
5712 static tree
5713 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5714 tree type, tree arg0, tree arg1)
5716 REAL_VALUE_TYPE c;
5718 if (BUILTIN_SQRT_P (fcode))
5720 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5721 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5723 c = TREE_REAL_CST (arg1);
5724 if (REAL_VALUE_NEGATIVE (c))
5726 /* sqrt(x) < y is always false, if y is negative. */
5727 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5728 return omit_one_operand (type, integer_zero_node, arg);
5730 /* sqrt(x) > y is always true, if y is negative and we
5731 don't care about NaNs, i.e. negative values of x. */
5732 if (code == NE_EXPR || !HONOR_NANS (mode))
5733 return omit_one_operand (type, integer_one_node, arg);
5735 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5736 return fold_build2 (GE_EXPR, type, arg,
5737 build_real (TREE_TYPE (arg), dconst0));
5739 else if (code == GT_EXPR || code == GE_EXPR)
5741 REAL_VALUE_TYPE c2;
5743 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5744 real_convert (&c2, mode, &c2);
5746 if (REAL_VALUE_ISINF (c2))
5748 /* sqrt(x) > y is x == +Inf, when y is very large. */
5749 if (HONOR_INFINITIES (mode))
5750 return fold_build2 (EQ_EXPR, type, arg,
5751 build_real (TREE_TYPE (arg), c2));
5753 /* sqrt(x) > y is always false, when y is very large
5754 and we don't care about infinities. */
5755 return omit_one_operand (type, integer_zero_node, arg);
5758 /* sqrt(x) > c is the same as x > c*c. */
5759 return fold_build2 (code, type, arg,
5760 build_real (TREE_TYPE (arg), c2));
5762 else if (code == LT_EXPR || code == LE_EXPR)
5764 REAL_VALUE_TYPE c2;
5766 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5767 real_convert (&c2, mode, &c2);
5769 if (REAL_VALUE_ISINF (c2))
5771 /* sqrt(x) < y is always true, when y is a very large
5772 value and we don't care about NaNs or Infinities. */
5773 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5774 return omit_one_operand (type, integer_one_node, arg);
5776 /* sqrt(x) < y is x != +Inf when y is very large and we
5777 don't care about NaNs. */
5778 if (! HONOR_NANS (mode))
5779 return fold_build2 (NE_EXPR, type, arg,
5780 build_real (TREE_TYPE (arg), c2));
5782 /* sqrt(x) < y is x >= 0 when y is very large and we
5783 don't care about Infinities. */
5784 if (! HONOR_INFINITIES (mode))
5785 return fold_build2 (GE_EXPR, type, arg,
5786 build_real (TREE_TYPE (arg), dconst0));
5788 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5789 if (lang_hooks.decls.global_bindings_p () != 0
5790 || CONTAINS_PLACEHOLDER_P (arg))
5791 return NULL_TREE;
5793 arg = save_expr (arg);
5794 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5795 fold_build2 (GE_EXPR, type, arg,
5796 build_real (TREE_TYPE (arg),
5797 dconst0)),
5798 fold_build2 (NE_EXPR, type, arg,
5799 build_real (TREE_TYPE (arg),
5800 c2)));
5803 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5804 if (! HONOR_NANS (mode))
5805 return fold_build2 (code, type, arg,
5806 build_real (TREE_TYPE (arg), c2));
5808 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5809 if (lang_hooks.decls.global_bindings_p () == 0
5810 && ! CONTAINS_PLACEHOLDER_P (arg))
5812 arg = save_expr (arg);
5813 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5814 fold_build2 (GE_EXPR, type, arg,
5815 build_real (TREE_TYPE (arg),
5816 dconst0)),
5817 fold_build2 (code, type, arg,
5818 build_real (TREE_TYPE (arg),
5819 c2)));
5824 return NULL_TREE;
5827 /* Subroutine of fold() that optimizes comparisons against Infinities,
5828 either +Inf or -Inf.
5830 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5831 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5832 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5834 The function returns the constant folded tree if a simplification
5835 can be made, and NULL_TREE otherwise. */
5837 static tree
5838 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5840 enum machine_mode mode;
5841 REAL_VALUE_TYPE max;
5842 tree temp;
5843 bool neg;
5845 mode = TYPE_MODE (TREE_TYPE (arg0));
5847 /* For negative infinity swap the sense of the comparison. */
5848 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5849 if (neg)
5850 code = swap_tree_comparison (code);
5852 switch (code)
5854 case GT_EXPR:
5855 /* x > +Inf is always false, if with ignore sNANs. */
5856 if (HONOR_SNANS (mode))
5857 return NULL_TREE;
5858 return omit_one_operand (type, integer_zero_node, arg0);
5860 case LE_EXPR:
5861 /* x <= +Inf is always true, if we don't case about NaNs. */
5862 if (! HONOR_NANS (mode))
5863 return omit_one_operand (type, integer_one_node, arg0);
5865 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5866 if (lang_hooks.decls.global_bindings_p () == 0
5867 && ! CONTAINS_PLACEHOLDER_P (arg0))
5869 arg0 = save_expr (arg0);
5870 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5872 break;
5874 case EQ_EXPR:
5875 case GE_EXPR:
5876 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5877 real_maxval (&max, neg, mode);
5878 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5879 arg0, build_real (TREE_TYPE (arg0), max));
5881 case LT_EXPR:
5882 /* x < +Inf is always equal to x <= DBL_MAX. */
5883 real_maxval (&max, neg, mode);
5884 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5885 arg0, build_real (TREE_TYPE (arg0), max));
5887 case NE_EXPR:
5888 /* x != +Inf is always equal to !(x > DBL_MAX). */
5889 real_maxval (&max, neg, mode);
5890 if (! HONOR_NANS (mode))
5891 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5892 arg0, build_real (TREE_TYPE (arg0), max));
5894 /* The transformation below creates non-gimple code and thus is
5895 not appropriate if we are in gimple form. */
5896 if (in_gimple_form)
5897 return NULL_TREE;
5899 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5900 arg0, build_real (TREE_TYPE (arg0), max));
5901 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5903 default:
5904 break;
5907 return NULL_TREE;
5910 /* Subroutine of fold() that optimizes comparisons of a division by
5911 a nonzero integer constant against an integer constant, i.e.
5912 X/C1 op C2.
5914 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5915 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5916 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5918 The function returns the constant folded tree if a simplification
5919 can be made, and NULL_TREE otherwise. */
5921 static tree
5922 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5924 tree prod, tmp, hi, lo;
5925 tree arg00 = TREE_OPERAND (arg0, 0);
5926 tree arg01 = TREE_OPERAND (arg0, 1);
5927 unsigned HOST_WIDE_INT lpart;
5928 HOST_WIDE_INT hpart;
5929 int overflow;
5931 /* We have to do this the hard way to detect unsigned overflow.
5932 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5933 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5934 TREE_INT_CST_HIGH (arg01),
5935 TREE_INT_CST_LOW (arg1),
5936 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5937 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5938 prod = force_fit_type (prod, -1, overflow, false);
5940 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5942 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5943 lo = prod;
5945 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5946 overflow = add_double (TREE_INT_CST_LOW (prod),
5947 TREE_INT_CST_HIGH (prod),
5948 TREE_INT_CST_LOW (tmp),
5949 TREE_INT_CST_HIGH (tmp),
5950 &lpart, &hpart);
5951 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5952 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5953 TREE_CONSTANT_OVERFLOW (prod));
5955 else if (tree_int_cst_sgn (arg01) >= 0)
5957 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5958 switch (tree_int_cst_sgn (arg1))
5960 case -1:
5961 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5962 hi = prod;
5963 break;
5965 case 0:
5966 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5967 hi = tmp;
5968 break;
5970 case 1:
5971 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5972 lo = prod;
5973 break;
5975 default:
5976 gcc_unreachable ();
5979 else
5981 /* A negative divisor reverses the relational operators. */
5982 code = swap_tree_comparison (code);
5984 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5985 switch (tree_int_cst_sgn (arg1))
5987 case -1:
5988 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5989 lo = prod;
5990 break;
5992 case 0:
5993 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5994 lo = tmp;
5995 break;
5997 case 1:
5998 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5999 hi = prod;
6000 break;
6002 default:
6003 gcc_unreachable ();
6007 switch (code)
6009 case EQ_EXPR:
6010 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6011 return omit_one_operand (type, integer_zero_node, arg00);
6012 if (TREE_OVERFLOW (hi))
6013 return fold_build2 (GE_EXPR, type, arg00, lo);
6014 if (TREE_OVERFLOW (lo))
6015 return fold_build2 (LE_EXPR, type, arg00, hi);
6016 return build_range_check (type, arg00, 1, lo, hi);
6018 case NE_EXPR:
6019 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6020 return omit_one_operand (type, integer_one_node, arg00);
6021 if (TREE_OVERFLOW (hi))
6022 return fold_build2 (LT_EXPR, type, arg00, lo);
6023 if (TREE_OVERFLOW (lo))
6024 return fold_build2 (GT_EXPR, type, arg00, hi);
6025 return build_range_check (type, arg00, 0, lo, hi);
6027 case LT_EXPR:
6028 if (TREE_OVERFLOW (lo))
6029 return omit_one_operand (type, integer_zero_node, arg00);
6030 return fold_build2 (LT_EXPR, type, arg00, lo);
6032 case LE_EXPR:
6033 if (TREE_OVERFLOW (hi))
6034 return omit_one_operand (type, integer_one_node, arg00);
6035 return fold_build2 (LE_EXPR, type, arg00, hi);
6037 case GT_EXPR:
6038 if (TREE_OVERFLOW (hi))
6039 return omit_one_operand (type, integer_zero_node, arg00);
6040 return fold_build2 (GT_EXPR, type, arg00, hi);
6042 case GE_EXPR:
6043 if (TREE_OVERFLOW (lo))
6044 return omit_one_operand (type, integer_one_node, arg00);
6045 return fold_build2 (GE_EXPR, type, arg00, lo);
6047 default:
6048 break;
6051 return NULL_TREE;
6055 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6056 equality/inequality test, then return a simplified form of the test
6057 using a sign testing. Otherwise return NULL. TYPE is the desired
6058 result type. */
6060 static tree
6061 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6062 tree result_type)
6064 /* If this is testing a single bit, we can optimize the test. */
6065 if ((code == NE_EXPR || code == EQ_EXPR)
6066 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6067 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6069 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6070 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6071 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6073 if (arg00 != NULL_TREE
6074 /* This is only a win if casting to a signed type is cheap,
6075 i.e. when arg00's type is not a partial mode. */
6076 && TYPE_PRECISION (TREE_TYPE (arg00))
6077 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6079 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6080 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6081 result_type, fold_convert (stype, arg00),
6082 fold_convert (stype, integer_zero_node));
6086 return NULL_TREE;
6089 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6090 equality/inequality test, then return a simplified form of
6091 the test using shifts and logical operations. Otherwise return
6092 NULL. TYPE is the desired result type. */
6094 tree
6095 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6096 tree result_type)
6098 /* If this is testing a single bit, we can optimize the test. */
6099 if ((code == NE_EXPR || code == EQ_EXPR)
6100 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6101 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6103 tree inner = TREE_OPERAND (arg0, 0);
6104 tree type = TREE_TYPE (arg0);
6105 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6106 enum machine_mode operand_mode = TYPE_MODE (type);
6107 int ops_unsigned;
6108 tree signed_type, unsigned_type, intermediate_type;
6109 tree tem;
6111 /* First, see if we can fold the single bit test into a sign-bit
6112 test. */
6113 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6114 result_type);
6115 if (tem)
6116 return tem;
6118 /* Otherwise we have (A & C) != 0 where C is a single bit,
6119 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6120 Similarly for (A & C) == 0. */
6122 /* If INNER is a right shift of a constant and it plus BITNUM does
6123 not overflow, adjust BITNUM and INNER. */
6124 if (TREE_CODE (inner) == RSHIFT_EXPR
6125 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6126 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6127 && bitnum < TYPE_PRECISION (type)
6128 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6129 bitnum - TYPE_PRECISION (type)))
6131 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6132 inner = TREE_OPERAND (inner, 0);
6135 /* If we are going to be able to omit the AND below, we must do our
6136 operations as unsigned. If we must use the AND, we have a choice.
6137 Normally unsigned is faster, but for some machines signed is. */
6138 #ifdef LOAD_EXTEND_OP
6139 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6140 && !flag_syntax_only) ? 0 : 1;
6141 #else
6142 ops_unsigned = 1;
6143 #endif
6145 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6146 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6147 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6148 inner = fold_convert (intermediate_type, inner);
6150 if (bitnum != 0)
6151 inner = build2 (RSHIFT_EXPR, intermediate_type,
6152 inner, size_int (bitnum));
6154 if (code == EQ_EXPR)
6155 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6156 inner, integer_one_node);
6158 /* Put the AND last so it can combine with more things. */
6159 inner = build2 (BIT_AND_EXPR, intermediate_type,
6160 inner, integer_one_node);
6162 /* Make sure to return the proper type. */
6163 inner = fold_convert (result_type, inner);
6165 return inner;
6167 return NULL_TREE;
6170 /* Check whether we are allowed to reorder operands arg0 and arg1,
6171 such that the evaluation of arg1 occurs before arg0. */
6173 static bool
6174 reorder_operands_p (tree arg0, tree arg1)
6176 if (! flag_evaluation_order)
6177 return true;
6178 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6179 return true;
6180 return ! TREE_SIDE_EFFECTS (arg0)
6181 && ! TREE_SIDE_EFFECTS (arg1);
6184 /* Test whether it is preferable two swap two operands, ARG0 and
6185 ARG1, for example because ARG0 is an integer constant and ARG1
6186 isn't. If REORDER is true, only recommend swapping if we can
6187 evaluate the operands in reverse order. */
6189 bool
6190 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6192 STRIP_SIGN_NOPS (arg0);
6193 STRIP_SIGN_NOPS (arg1);
6195 if (TREE_CODE (arg1) == INTEGER_CST)
6196 return 0;
6197 if (TREE_CODE (arg0) == INTEGER_CST)
6198 return 1;
6200 if (TREE_CODE (arg1) == REAL_CST)
6201 return 0;
6202 if (TREE_CODE (arg0) == REAL_CST)
6203 return 1;
6205 if (TREE_CODE (arg1) == COMPLEX_CST)
6206 return 0;
6207 if (TREE_CODE (arg0) == COMPLEX_CST)
6208 return 1;
6210 if (TREE_CONSTANT (arg1))
6211 return 0;
6212 if (TREE_CONSTANT (arg0))
6213 return 1;
6215 if (optimize_size)
6216 return 0;
6218 if (reorder && flag_evaluation_order
6219 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6220 return 0;
6222 if (DECL_P (arg1))
6223 return 0;
6224 if (DECL_P (arg0))
6225 return 1;
6227 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6228 for commutative and comparison operators. Ensuring a canonical
6229 form allows the optimizers to find additional redundancies without
6230 having to explicitly check for both orderings. */
6231 if (TREE_CODE (arg0) == SSA_NAME
6232 && TREE_CODE (arg1) == SSA_NAME
6233 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6234 return 1;
6236 return 0;
6239 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6240 ARG0 is extended to a wider type. */
6242 static tree
6243 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6245 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6246 tree arg1_unw;
6247 tree shorter_type, outer_type;
6248 tree min, max;
6249 bool above, below;
6251 if (arg0_unw == arg0)
6252 return NULL_TREE;
6253 shorter_type = TREE_TYPE (arg0_unw);
6255 #ifdef HAVE_canonicalize_funcptr_for_compare
6256 /* Disable this optimization if we're casting a function pointer
6257 type on targets that require function pointer canonicalization. */
6258 if (HAVE_canonicalize_funcptr_for_compare
6259 && TREE_CODE (shorter_type) == POINTER_TYPE
6260 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6261 return NULL_TREE;
6262 #endif
6264 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6265 return NULL_TREE;
6267 arg1_unw = get_unwidened (arg1, shorter_type);
6269 /* If possible, express the comparison in the shorter mode. */
6270 if ((code == EQ_EXPR || code == NE_EXPR
6271 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6272 && (TREE_TYPE (arg1_unw) == shorter_type
6273 || (TREE_CODE (arg1_unw) == INTEGER_CST
6274 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6275 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6276 && int_fits_type_p (arg1_unw, shorter_type))))
6277 return fold_build2 (code, type, arg0_unw,
6278 fold_convert (shorter_type, arg1_unw));
6280 if (TREE_CODE (arg1_unw) != INTEGER_CST
6281 || TREE_CODE (shorter_type) != INTEGER_TYPE
6282 || !int_fits_type_p (arg1_unw, shorter_type))
6283 return NULL_TREE;
6285 /* If we are comparing with the integer that does not fit into the range
6286 of the shorter type, the result is known. */
6287 outer_type = TREE_TYPE (arg1_unw);
6288 min = lower_bound_in_type (outer_type, shorter_type);
6289 max = upper_bound_in_type (outer_type, shorter_type);
6291 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6292 max, arg1_unw));
6293 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6294 arg1_unw, min));
6296 switch (code)
6298 case EQ_EXPR:
6299 if (above || below)
6300 return omit_one_operand (type, integer_zero_node, arg0);
6301 break;
6303 case NE_EXPR:
6304 if (above || below)
6305 return omit_one_operand (type, integer_one_node, arg0);
6306 break;
6308 case LT_EXPR:
6309 case LE_EXPR:
6310 if (above)
6311 return omit_one_operand (type, integer_one_node, arg0);
6312 else if (below)
6313 return omit_one_operand (type, integer_zero_node, arg0);
6315 case GT_EXPR:
6316 case GE_EXPR:
6317 if (above)
6318 return omit_one_operand (type, integer_zero_node, arg0);
6319 else if (below)
6320 return omit_one_operand (type, integer_one_node, arg0);
6322 default:
6323 break;
6326 return NULL_TREE;
6329 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6330 ARG0 just the signedness is changed. */
6332 static tree
6333 fold_sign_changed_comparison (enum tree_code code, tree type,
6334 tree arg0, tree arg1)
6336 tree arg0_inner, tmp;
6337 tree inner_type, outer_type;
6339 if (TREE_CODE (arg0) != NOP_EXPR
6340 && TREE_CODE (arg0) != CONVERT_EXPR)
6341 return NULL_TREE;
6343 outer_type = TREE_TYPE (arg0);
6344 arg0_inner = TREE_OPERAND (arg0, 0);
6345 inner_type = TREE_TYPE (arg0_inner);
6347 #ifdef HAVE_canonicalize_funcptr_for_compare
6348 /* Disable this optimization if we're casting a function pointer
6349 type on targets that require function pointer canonicalization. */
6350 if (HAVE_canonicalize_funcptr_for_compare
6351 && TREE_CODE (inner_type) == POINTER_TYPE
6352 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6353 return NULL_TREE;
6354 #endif
6356 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6357 return NULL_TREE;
6359 if (TREE_CODE (arg1) != INTEGER_CST
6360 && !((TREE_CODE (arg1) == NOP_EXPR
6361 || TREE_CODE (arg1) == CONVERT_EXPR)
6362 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6363 return NULL_TREE;
6365 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6366 && code != NE_EXPR
6367 && code != EQ_EXPR)
6368 return NULL_TREE;
6370 if (TREE_CODE (arg1) == INTEGER_CST)
6372 tmp = build_int_cst_wide (inner_type,
6373 TREE_INT_CST_LOW (arg1),
6374 TREE_INT_CST_HIGH (arg1));
6375 arg1 = force_fit_type (tmp, 0,
6376 TREE_OVERFLOW (arg1),
6377 TREE_CONSTANT_OVERFLOW (arg1));
6379 else
6380 arg1 = fold_convert (inner_type, arg1);
6382 return fold_build2 (code, type, arg0_inner, arg1);
6385 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6386 step of the array. Reconstructs s and delta in the case of s * delta
6387 being an integer constant (and thus already folded).
6388 ADDR is the address. MULT is the multiplicative expression.
6389 If the function succeeds, the new address expression is returned. Otherwise
6390 NULL_TREE is returned. */
6392 static tree
6393 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6395 tree s, delta, step;
6396 tree ref = TREE_OPERAND (addr, 0), pref;
6397 tree ret, pos;
6398 tree itype;
6400 /* Canonicalize op1 into a possibly non-constant delta
6401 and an INTEGER_CST s. */
6402 if (TREE_CODE (op1) == MULT_EXPR)
6404 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6406 STRIP_NOPS (arg0);
6407 STRIP_NOPS (arg1);
6409 if (TREE_CODE (arg0) == INTEGER_CST)
6411 s = arg0;
6412 delta = arg1;
6414 else if (TREE_CODE (arg1) == INTEGER_CST)
6416 s = arg1;
6417 delta = arg0;
6419 else
6420 return NULL_TREE;
6422 else if (TREE_CODE (op1) == INTEGER_CST)
6424 delta = op1;
6425 s = NULL_TREE;
6427 else
6429 /* Simulate we are delta * 1. */
6430 delta = op1;
6431 s = integer_one_node;
6434 for (;; ref = TREE_OPERAND (ref, 0))
6436 if (TREE_CODE (ref) == ARRAY_REF)
6438 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6439 if (! itype)
6440 continue;
6442 step = array_ref_element_size (ref);
6443 if (TREE_CODE (step) != INTEGER_CST)
6444 continue;
6446 if (s)
6448 if (! tree_int_cst_equal (step, s))
6449 continue;
6451 else
6453 /* Try if delta is a multiple of step. */
6454 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6455 if (! tmp)
6456 continue;
6457 delta = tmp;
6460 break;
6463 if (!handled_component_p (ref))
6464 return NULL_TREE;
6467 /* We found the suitable array reference. So copy everything up to it,
6468 and replace the index. */
6470 pref = TREE_OPERAND (addr, 0);
6471 ret = copy_node (pref);
6472 pos = ret;
6474 while (pref != ref)
6476 pref = TREE_OPERAND (pref, 0);
6477 TREE_OPERAND (pos, 0) = copy_node (pref);
6478 pos = TREE_OPERAND (pos, 0);
6481 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6482 fold_convert (itype,
6483 TREE_OPERAND (pos, 1)),
6484 fold_convert (itype, delta));
6486 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6490 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6491 means A >= Y && A != MAX, but in this case we know that
6492 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6494 static tree
6495 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6497 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6499 if (TREE_CODE (bound) == LT_EXPR)
6500 a = TREE_OPERAND (bound, 0);
6501 else if (TREE_CODE (bound) == GT_EXPR)
6502 a = TREE_OPERAND (bound, 1);
6503 else
6504 return NULL_TREE;
6506 typea = TREE_TYPE (a);
6507 if (!INTEGRAL_TYPE_P (typea)
6508 && !POINTER_TYPE_P (typea))
6509 return NULL_TREE;
6511 if (TREE_CODE (ineq) == LT_EXPR)
6513 a1 = TREE_OPERAND (ineq, 1);
6514 y = TREE_OPERAND (ineq, 0);
6516 else if (TREE_CODE (ineq) == GT_EXPR)
6518 a1 = TREE_OPERAND (ineq, 0);
6519 y = TREE_OPERAND (ineq, 1);
6521 else
6522 return NULL_TREE;
6524 if (TREE_TYPE (a1) != typea)
6525 return NULL_TREE;
6527 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6528 if (!integer_onep (diff))
6529 return NULL_TREE;
6531 return fold_build2 (GE_EXPR, type, a, y);
6534 /* Fold a unary expression of code CODE and type TYPE with operand
6535 OP0. Return the folded expression if folding is successful.
6536 Otherwise, return NULL_TREE. */
6538 tree
6539 fold_unary (enum tree_code code, tree type, tree op0)
6541 tree tem;
6542 tree arg0;
6543 enum tree_code_class kind = TREE_CODE_CLASS (code);
6545 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6546 && TREE_CODE_LENGTH (code) == 1);
6548 arg0 = op0;
6549 if (arg0)
6551 if (code == NOP_EXPR || code == CONVERT_EXPR
6552 || code == FLOAT_EXPR || code == ABS_EXPR)
6554 /* Don't use STRIP_NOPS, because signedness of argument type
6555 matters. */
6556 STRIP_SIGN_NOPS (arg0);
6558 else
6560 /* Strip any conversions that don't change the mode. This
6561 is safe for every expression, except for a comparison
6562 expression because its signedness is derived from its
6563 operands.
6565 Note that this is done as an internal manipulation within
6566 the constant folder, in order to find the simplest
6567 representation of the arguments so that their form can be
6568 studied. In any cases, the appropriate type conversions
6569 should be put back in the tree that will get out of the
6570 constant folder. */
6571 STRIP_NOPS (arg0);
6575 if (TREE_CODE_CLASS (code) == tcc_unary)
6577 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6578 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6579 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6580 else if (TREE_CODE (arg0) == COND_EXPR)
6582 tree arg01 = TREE_OPERAND (arg0, 1);
6583 tree arg02 = TREE_OPERAND (arg0, 2);
6584 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6585 arg01 = fold_build1 (code, type, arg01);
6586 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6587 arg02 = fold_build1 (code, type, arg02);
6588 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6589 arg01, arg02);
6591 /* If this was a conversion, and all we did was to move into
6592 inside the COND_EXPR, bring it back out. But leave it if
6593 it is a conversion from integer to integer and the
6594 result precision is no wider than a word since such a
6595 conversion is cheap and may be optimized away by combine,
6596 while it couldn't if it were outside the COND_EXPR. Then return
6597 so we don't get into an infinite recursion loop taking the
6598 conversion out and then back in. */
6600 if ((code == NOP_EXPR || code == CONVERT_EXPR
6601 || code == NON_LVALUE_EXPR)
6602 && TREE_CODE (tem) == COND_EXPR
6603 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6604 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6605 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6606 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6607 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6608 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6609 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6610 && (INTEGRAL_TYPE_P
6611 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6612 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6613 || flag_syntax_only))
6614 tem = build1 (code, type,
6615 build3 (COND_EXPR,
6616 TREE_TYPE (TREE_OPERAND
6617 (TREE_OPERAND (tem, 1), 0)),
6618 TREE_OPERAND (tem, 0),
6619 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6620 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6621 return tem;
6623 else if (COMPARISON_CLASS_P (arg0))
6625 if (TREE_CODE (type) == BOOLEAN_TYPE)
6627 arg0 = copy_node (arg0);
6628 TREE_TYPE (arg0) = type;
6629 return arg0;
6631 else if (TREE_CODE (type) != INTEGER_TYPE)
6632 return fold_build3 (COND_EXPR, type, arg0,
6633 fold_build1 (code, type,
6634 integer_one_node),
6635 fold_build1 (code, type,
6636 integer_zero_node));
6640 switch (code)
6642 case NOP_EXPR:
6643 case FLOAT_EXPR:
6644 case CONVERT_EXPR:
6645 case FIX_TRUNC_EXPR:
6646 case FIX_CEIL_EXPR:
6647 case FIX_FLOOR_EXPR:
6648 case FIX_ROUND_EXPR:
6649 if (TREE_TYPE (op0) == type)
6650 return op0;
6652 /* Handle cases of two conversions in a row. */
6653 if (TREE_CODE (op0) == NOP_EXPR
6654 || TREE_CODE (op0) == CONVERT_EXPR)
6656 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6657 tree inter_type = TREE_TYPE (op0);
6658 int inside_int = INTEGRAL_TYPE_P (inside_type);
6659 int inside_ptr = POINTER_TYPE_P (inside_type);
6660 int inside_float = FLOAT_TYPE_P (inside_type);
6661 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6662 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6663 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6664 int inter_int = INTEGRAL_TYPE_P (inter_type);
6665 int inter_ptr = POINTER_TYPE_P (inter_type);
6666 int inter_float = FLOAT_TYPE_P (inter_type);
6667 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6668 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6669 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6670 int final_int = INTEGRAL_TYPE_P (type);
6671 int final_ptr = POINTER_TYPE_P (type);
6672 int final_float = FLOAT_TYPE_P (type);
6673 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6674 unsigned int final_prec = TYPE_PRECISION (type);
6675 int final_unsignedp = TYPE_UNSIGNED (type);
6677 /* In addition to the cases of two conversions in a row
6678 handled below, if we are converting something to its own
6679 type via an object of identical or wider precision, neither
6680 conversion is needed. */
6681 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6682 && ((inter_int && final_int) || (inter_float && final_float))
6683 && inter_prec >= final_prec)
6684 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6686 /* Likewise, if the intermediate and final types are either both
6687 float or both integer, we don't need the middle conversion if
6688 it is wider than the final type and doesn't change the signedness
6689 (for integers). Avoid this if the final type is a pointer
6690 since then we sometimes need the inner conversion. Likewise if
6691 the outer has a precision not equal to the size of its mode. */
6692 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6693 || (inter_float && inside_float)
6694 || (inter_vec && inside_vec))
6695 && inter_prec >= inside_prec
6696 && (inter_float || inter_vec
6697 || inter_unsignedp == inside_unsignedp)
6698 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6699 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6700 && ! final_ptr
6701 && (! final_vec || inter_prec == inside_prec))
6702 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6704 /* If we have a sign-extension of a zero-extended value, we can
6705 replace that by a single zero-extension. */
6706 if (inside_int && inter_int && final_int
6707 && inside_prec < inter_prec && inter_prec < final_prec
6708 && inside_unsignedp && !inter_unsignedp)
6709 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6711 /* Two conversions in a row are not needed unless:
6712 - some conversion is floating-point (overstrict for now), or
6713 - some conversion is a vector (overstrict for now), or
6714 - the intermediate type is narrower than both initial and
6715 final, or
6716 - the intermediate type and innermost type differ in signedness,
6717 and the outermost type is wider than the intermediate, or
6718 - the initial type is a pointer type and the precisions of the
6719 intermediate and final types differ, or
6720 - the final type is a pointer type and the precisions of the
6721 initial and intermediate types differ. */
6722 if (! inside_float && ! inter_float && ! final_float
6723 && ! inside_vec && ! inter_vec && ! final_vec
6724 && (inter_prec > inside_prec || inter_prec > final_prec)
6725 && ! (inside_int && inter_int
6726 && inter_unsignedp != inside_unsignedp
6727 && inter_prec < final_prec)
6728 && ((inter_unsignedp && inter_prec > inside_prec)
6729 == (final_unsignedp && final_prec > inter_prec))
6730 && ! (inside_ptr && inter_prec != final_prec)
6731 && ! (final_ptr && inside_prec != inter_prec)
6732 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6733 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6734 && ! final_ptr)
6735 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6738 /* Handle (T *)&A.B.C for A being of type T and B and C
6739 living at offset zero. This occurs frequently in
6740 C++ upcasting and then accessing the base. */
6741 if (TREE_CODE (op0) == ADDR_EXPR
6742 && POINTER_TYPE_P (type)
6743 && handled_component_p (TREE_OPERAND (op0, 0)))
6745 HOST_WIDE_INT bitsize, bitpos;
6746 tree offset;
6747 enum machine_mode mode;
6748 int unsignedp, volatilep;
6749 tree base = TREE_OPERAND (op0, 0);
6750 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6751 &mode, &unsignedp, &volatilep, false);
6752 /* If the reference was to a (constant) zero offset, we can use
6753 the address of the base if it has the same base type
6754 as the result type. */
6755 if (! offset && bitpos == 0
6756 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
6757 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
6758 return fold_convert (type, build_fold_addr_expr (base));
6761 if (TREE_CODE (op0) == MODIFY_EXPR
6762 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6763 /* Detect assigning a bitfield. */
6764 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6765 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6767 /* Don't leave an assignment inside a conversion
6768 unless assigning a bitfield. */
6769 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6770 /* First do the assignment, then return converted constant. */
6771 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6772 TREE_NO_WARNING (tem) = 1;
6773 TREE_USED (tem) = 1;
6774 return tem;
6777 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6778 constants (if x has signed type, the sign bit cannot be set
6779 in c). This folds extension into the BIT_AND_EXPR. */
6780 if (INTEGRAL_TYPE_P (type)
6781 && TREE_CODE (type) != BOOLEAN_TYPE
6782 && TREE_CODE (op0) == BIT_AND_EXPR
6783 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6785 tree and = op0;
6786 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6787 int change = 0;
6789 if (TYPE_UNSIGNED (TREE_TYPE (and))
6790 || (TYPE_PRECISION (type)
6791 <= TYPE_PRECISION (TREE_TYPE (and))))
6792 change = 1;
6793 else if (TYPE_PRECISION (TREE_TYPE (and1))
6794 <= HOST_BITS_PER_WIDE_INT
6795 && host_integerp (and1, 1))
6797 unsigned HOST_WIDE_INT cst;
6799 cst = tree_low_cst (and1, 1);
6800 cst &= (HOST_WIDE_INT) -1
6801 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6802 change = (cst == 0);
6803 #ifdef LOAD_EXTEND_OP
6804 if (change
6805 && !flag_syntax_only
6806 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6807 == ZERO_EXTEND))
6809 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6810 and0 = fold_convert (uns, and0);
6811 and1 = fold_convert (uns, and1);
6813 #endif
6815 if (change)
6817 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6818 TREE_INT_CST_HIGH (and1));
6819 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6820 TREE_CONSTANT_OVERFLOW (and1));
6821 return fold_build2 (BIT_AND_EXPR, type,
6822 fold_convert (type, and0), tem);
6826 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6827 T2 being pointers to types of the same size. */
6828 if (POINTER_TYPE_P (type)
6829 && BINARY_CLASS_P (arg0)
6830 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6831 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6833 tree arg00 = TREE_OPERAND (arg0, 0);
6834 tree t0 = type;
6835 tree t1 = TREE_TYPE (arg00);
6836 tree tt0 = TREE_TYPE (t0);
6837 tree tt1 = TREE_TYPE (t1);
6838 tree s0 = TYPE_SIZE (tt0);
6839 tree s1 = TYPE_SIZE (tt1);
6841 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6842 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6843 TREE_OPERAND (arg0, 1));
6846 tem = fold_convert_const (code, type, arg0);
6847 return tem ? tem : NULL_TREE;
6849 case VIEW_CONVERT_EXPR:
6850 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6851 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6852 return NULL_TREE;
6854 case NEGATE_EXPR:
6855 if (negate_expr_p (arg0))
6856 return fold_convert (type, negate_expr (arg0));
6857 /* Convert - (~A) to A + 1. */
6858 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6859 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6860 build_int_cst (type, 1));
6861 return NULL_TREE;
6863 case ABS_EXPR:
6864 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6865 return fold_abs_const (arg0, type);
6866 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6867 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
6868 /* Convert fabs((double)float) into (double)fabsf(float). */
6869 else if (TREE_CODE (arg0) == NOP_EXPR
6870 && TREE_CODE (type) == REAL_TYPE)
6872 tree targ0 = strip_float_extensions (arg0);
6873 if (targ0 != arg0)
6874 return fold_convert (type, fold_build1 (ABS_EXPR,
6875 TREE_TYPE (targ0),
6876 targ0));
6878 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
6879 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
6880 return arg0;
6882 /* Strip sign ops from argument. */
6883 if (TREE_CODE (type) == REAL_TYPE)
6885 tem = fold_strip_sign_ops (arg0);
6886 if (tem)
6887 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
6889 return NULL_TREE;
6891 case CONJ_EXPR:
6892 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6893 return fold_convert (type, arg0);
6894 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6895 return build2 (COMPLEX_EXPR, type,
6896 TREE_OPERAND (arg0, 0),
6897 negate_expr (TREE_OPERAND (arg0, 1)));
6898 else if (TREE_CODE (arg0) == COMPLEX_CST)
6899 return build_complex (type, TREE_REALPART (arg0),
6900 negate_expr (TREE_IMAGPART (arg0)));
6901 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6902 return fold_build2 (TREE_CODE (arg0), type,
6903 fold_build1 (CONJ_EXPR, type,
6904 TREE_OPERAND (arg0, 0)),
6905 fold_build1 (CONJ_EXPR, type,
6906 TREE_OPERAND (arg0, 1)));
6907 else if (TREE_CODE (arg0) == CONJ_EXPR)
6908 return TREE_OPERAND (arg0, 0);
6909 return NULL_TREE;
6911 case BIT_NOT_EXPR:
6912 if (TREE_CODE (arg0) == INTEGER_CST)
6913 return fold_not_const (arg0, type);
6914 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6915 return TREE_OPERAND (arg0, 0);
6916 /* Convert ~ (-A) to A - 1. */
6917 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
6918 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
6919 build_int_cst (type, 1));
6920 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
6921 else if (INTEGRAL_TYPE_P (type)
6922 && ((TREE_CODE (arg0) == MINUS_EXPR
6923 && integer_onep (TREE_OPERAND (arg0, 1)))
6924 || (TREE_CODE (arg0) == PLUS_EXPR
6925 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
6926 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
6927 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
6928 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6929 && (tem = fold_unary (BIT_NOT_EXPR, type,
6930 fold_convert (type,
6931 TREE_OPERAND (arg0, 0)))))
6932 return fold_build2 (BIT_XOR_EXPR, type, tem,
6933 fold_convert (type, TREE_OPERAND (arg0, 1)));
6934 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6935 && (tem = fold_unary (BIT_NOT_EXPR, type,
6936 fold_convert (type,
6937 TREE_OPERAND (arg0, 1)))))
6938 return fold_build2 (BIT_XOR_EXPR, type,
6939 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
6941 return NULL_TREE;
6943 case TRUTH_NOT_EXPR:
6944 /* The argument to invert_truthvalue must have Boolean type. */
6945 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
6946 arg0 = fold_convert (boolean_type_node, arg0);
6948 /* Note that the operand of this must be an int
6949 and its values must be 0 or 1.
6950 ("true" is a fixed value perhaps depending on the language,
6951 but we don't handle values other than 1 correctly yet.) */
6952 tem = invert_truthvalue (arg0);
6953 /* Avoid infinite recursion. */
6954 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6955 return NULL_TREE;
6956 return fold_convert (type, tem);
6958 case REALPART_EXPR:
6959 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6960 return NULL_TREE;
6961 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6962 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6963 TREE_OPERAND (arg0, 1));
6964 else if (TREE_CODE (arg0) == COMPLEX_CST)
6965 return TREE_REALPART (arg0);
6966 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6967 return fold_build2 (TREE_CODE (arg0), type,
6968 fold_build1 (REALPART_EXPR, type,
6969 TREE_OPERAND (arg0, 0)),
6970 fold_build1 (REALPART_EXPR, type,
6971 TREE_OPERAND (arg0, 1)));
6972 return NULL_TREE;
6974 case IMAGPART_EXPR:
6975 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6976 return fold_convert (type, integer_zero_node);
6977 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6978 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
6979 TREE_OPERAND (arg0, 0));
6980 else if (TREE_CODE (arg0) == COMPLEX_CST)
6981 return TREE_IMAGPART (arg0);
6982 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6983 return fold_build2 (TREE_CODE (arg0), type,
6984 fold_build1 (IMAGPART_EXPR, type,
6985 TREE_OPERAND (arg0, 0)),
6986 fold_build1 (IMAGPART_EXPR, type,
6987 TREE_OPERAND (arg0, 1)));
6988 return NULL_TREE;
6990 default:
6991 return NULL_TREE;
6992 } /* switch (code) */
6995 /* Fold a binary expression of code CODE and type TYPE with operands
6996 OP0 and OP1. Return the folded expression if folding is
6997 successful. Otherwise, return NULL_TREE. */
6999 tree
7000 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7002 tree t1 = NULL_TREE;
7003 tree tem;
7004 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7005 enum tree_code_class kind = TREE_CODE_CLASS (code);
7007 /* WINS will be nonzero when the switch is done
7008 if all operands are constant. */
7009 int wins = 1;
7011 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7012 && TREE_CODE_LENGTH (code) == 2);
7014 arg0 = op0;
7015 arg1 = op1;
7017 if (arg0)
7019 tree subop;
7021 /* Strip any conversions that don't change the mode. This is
7022 safe for every expression, except for a comparison expression
7023 because its signedness is derived from its operands. So, in
7024 the latter case, only strip conversions that don't change the
7025 signedness.
7027 Note that this is done as an internal manipulation within the
7028 constant folder, in order to find the simplest representation
7029 of the arguments so that their form can be studied. In any
7030 cases, the appropriate type conversions should be put back in
7031 the tree that will get out of the constant folder. */
7032 if (kind == tcc_comparison)
7033 STRIP_SIGN_NOPS (arg0);
7034 else
7035 STRIP_NOPS (arg0);
7037 if (TREE_CODE (arg0) == COMPLEX_CST)
7038 subop = TREE_REALPART (arg0);
7039 else
7040 subop = arg0;
7042 if (TREE_CODE (subop) != INTEGER_CST
7043 && TREE_CODE (subop) != REAL_CST)
7044 /* Note that TREE_CONSTANT isn't enough:
7045 static var addresses are constant but we can't
7046 do arithmetic on them. */
7047 wins = 0;
7050 if (arg1)
7052 tree subop;
7054 /* Strip any conversions that don't change the mode. This is
7055 safe for every expression, except for a comparison expression
7056 because its signedness is derived from its operands. So, in
7057 the latter case, only strip conversions that don't change the
7058 signedness.
7060 Note that this is done as an internal manipulation within the
7061 constant folder, in order to find the simplest representation
7062 of the arguments so that their form can be studied. In any
7063 cases, the appropriate type conversions should be put back in
7064 the tree that will get out of the constant folder. */
7065 if (kind == tcc_comparison)
7066 STRIP_SIGN_NOPS (arg1);
7067 else
7068 STRIP_NOPS (arg1);
7070 if (TREE_CODE (arg1) == COMPLEX_CST)
7071 subop = TREE_REALPART (arg1);
7072 else
7073 subop = arg1;
7075 if (TREE_CODE (subop) != INTEGER_CST
7076 && TREE_CODE (subop) != REAL_CST)
7077 /* Note that TREE_CONSTANT isn't enough:
7078 static var addresses are constant but we can't
7079 do arithmetic on them. */
7080 wins = 0;
7083 /* If this is a commutative operation, and ARG0 is a constant, move it
7084 to ARG1 to reduce the number of tests below. */
7085 if (commutative_tree_code (code)
7086 && tree_swap_operands_p (arg0, arg1, true))
7087 return fold_build2 (code, type, op1, op0);
7089 /* Now WINS is set as described above,
7090 ARG0 is the first operand of EXPR,
7091 and ARG1 is the second operand (if it has more than one operand).
7093 First check for cases where an arithmetic operation is applied to a
7094 compound, conditional, or comparison operation. Push the arithmetic
7095 operation inside the compound or conditional to see if any folding
7096 can then be done. Convert comparison to conditional for this purpose.
7097 The also optimizes non-constant cases that used to be done in
7098 expand_expr.
7100 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7101 one of the operands is a comparison and the other is a comparison, a
7102 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7103 code below would make the expression more complex. Change it to a
7104 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7105 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7107 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7108 || code == EQ_EXPR || code == NE_EXPR)
7109 && ((truth_value_p (TREE_CODE (arg0))
7110 && (truth_value_p (TREE_CODE (arg1))
7111 || (TREE_CODE (arg1) == BIT_AND_EXPR
7112 && integer_onep (TREE_OPERAND (arg1, 1)))))
7113 || (truth_value_p (TREE_CODE (arg1))
7114 && (truth_value_p (TREE_CODE (arg0))
7115 || (TREE_CODE (arg0) == BIT_AND_EXPR
7116 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7118 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7119 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7120 : TRUTH_XOR_EXPR,
7121 boolean_type_node,
7122 fold_convert (boolean_type_node, arg0),
7123 fold_convert (boolean_type_node, arg1));
7125 if (code == EQ_EXPR)
7126 tem = invert_truthvalue (tem);
7128 return fold_convert (type, tem);
7131 if (TREE_CODE_CLASS (code) == tcc_binary
7132 || TREE_CODE_CLASS (code) == tcc_comparison)
7134 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7135 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7136 fold_build2 (code, type,
7137 TREE_OPERAND (arg0, 1), op1));
7138 if (TREE_CODE (arg1) == COMPOUND_EXPR
7139 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7140 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7141 fold_build2 (code, type,
7142 op0, TREE_OPERAND (arg1, 1)));
7144 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7146 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7147 arg0, arg1,
7148 /*cond_first_p=*/1);
7149 if (tem != NULL_TREE)
7150 return tem;
7153 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7155 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7156 arg1, arg0,
7157 /*cond_first_p=*/0);
7158 if (tem != NULL_TREE)
7159 return tem;
7163 switch (code)
7165 case PLUS_EXPR:
7166 /* A + (-B) -> A - B */
7167 if (TREE_CODE (arg1) == NEGATE_EXPR)
7168 return fold_build2 (MINUS_EXPR, type,
7169 fold_convert (type, arg0),
7170 fold_convert (type, TREE_OPERAND (arg1, 0)));
7171 /* (-A) + B -> B - A */
7172 if (TREE_CODE (arg0) == NEGATE_EXPR
7173 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7174 return fold_build2 (MINUS_EXPR, type,
7175 fold_convert (type, arg1),
7176 fold_convert (type, TREE_OPERAND (arg0, 0)));
7177 /* Convert ~A + 1 to -A. */
7178 if (INTEGRAL_TYPE_P (type)
7179 && TREE_CODE (arg0) == BIT_NOT_EXPR
7180 && integer_onep (arg1))
7181 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7183 if (! FLOAT_TYPE_P (type))
7185 if (integer_zerop (arg1))
7186 return non_lvalue (fold_convert (type, arg0));
7188 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7189 with a constant, and the two constants have no bits in common,
7190 we should treat this as a BIT_IOR_EXPR since this may produce more
7191 simplifications. */
7192 if (TREE_CODE (arg0) == BIT_AND_EXPR
7193 && TREE_CODE (arg1) == BIT_AND_EXPR
7194 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7195 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7196 && integer_zerop (const_binop (BIT_AND_EXPR,
7197 TREE_OPERAND (arg0, 1),
7198 TREE_OPERAND (arg1, 1), 0)))
7200 code = BIT_IOR_EXPR;
7201 goto bit_ior;
7204 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7205 (plus (plus (mult) (mult)) (foo)) so that we can
7206 take advantage of the factoring cases below. */
7207 if (((TREE_CODE (arg0) == PLUS_EXPR
7208 || TREE_CODE (arg0) == MINUS_EXPR)
7209 && TREE_CODE (arg1) == MULT_EXPR)
7210 || ((TREE_CODE (arg1) == PLUS_EXPR
7211 || TREE_CODE (arg1) == MINUS_EXPR)
7212 && TREE_CODE (arg0) == MULT_EXPR))
7214 tree parg0, parg1, parg, marg;
7215 enum tree_code pcode;
7217 if (TREE_CODE (arg1) == MULT_EXPR)
7218 parg = arg0, marg = arg1;
7219 else
7220 parg = arg1, marg = arg0;
7221 pcode = TREE_CODE (parg);
7222 parg0 = TREE_OPERAND (parg, 0);
7223 parg1 = TREE_OPERAND (parg, 1);
7224 STRIP_NOPS (parg0);
7225 STRIP_NOPS (parg1);
7227 if (TREE_CODE (parg0) == MULT_EXPR
7228 && TREE_CODE (parg1) != MULT_EXPR)
7229 return fold_build2 (pcode, type,
7230 fold_build2 (PLUS_EXPR, type,
7231 fold_convert (type, parg0),
7232 fold_convert (type, marg)),
7233 fold_convert (type, parg1));
7234 if (TREE_CODE (parg0) != MULT_EXPR
7235 && TREE_CODE (parg1) == MULT_EXPR)
7236 return fold_build2 (PLUS_EXPR, type,
7237 fold_convert (type, parg0),
7238 fold_build2 (pcode, type,
7239 fold_convert (type, marg),
7240 fold_convert (type,
7241 parg1)));
7244 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7246 tree arg00, arg01, arg10, arg11;
7247 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7249 /* (A * C) + (B * C) -> (A+B) * C.
7250 We are most concerned about the case where C is a constant,
7251 but other combinations show up during loop reduction. Since
7252 it is not difficult, try all four possibilities. */
7254 arg00 = TREE_OPERAND (arg0, 0);
7255 arg01 = TREE_OPERAND (arg0, 1);
7256 arg10 = TREE_OPERAND (arg1, 0);
7257 arg11 = TREE_OPERAND (arg1, 1);
7258 same = NULL_TREE;
7260 if (operand_equal_p (arg01, arg11, 0))
7261 same = arg01, alt0 = arg00, alt1 = arg10;
7262 else if (operand_equal_p (arg00, arg10, 0))
7263 same = arg00, alt0 = arg01, alt1 = arg11;
7264 else if (operand_equal_p (arg00, arg11, 0))
7265 same = arg00, alt0 = arg01, alt1 = arg10;
7266 else if (operand_equal_p (arg01, arg10, 0))
7267 same = arg01, alt0 = arg00, alt1 = arg11;
7269 /* No identical multiplicands; see if we can find a common
7270 power-of-two factor in non-power-of-two multiplies. This
7271 can help in multi-dimensional array access. */
7272 else if (TREE_CODE (arg01) == INTEGER_CST
7273 && TREE_CODE (arg11) == INTEGER_CST
7274 && TREE_INT_CST_HIGH (arg01) == 0
7275 && TREE_INT_CST_HIGH (arg11) == 0)
7277 HOST_WIDE_INT int01, int11, tmp;
7278 int01 = TREE_INT_CST_LOW (arg01);
7279 int11 = TREE_INT_CST_LOW (arg11);
7281 /* Move min of absolute values to int11. */
7282 if ((int01 >= 0 ? int01 : -int01)
7283 < (int11 >= 0 ? int11 : -int11))
7285 tmp = int01, int01 = int11, int11 = tmp;
7286 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7287 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7290 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7292 alt0 = fold_build2 (MULT_EXPR, type, arg00,
7293 build_int_cst (NULL_TREE,
7294 int01 / int11));
7295 alt1 = arg10;
7296 same = arg11;
7300 if (same)
7301 return fold_build2 (MULT_EXPR, type,
7302 fold_build2 (PLUS_EXPR, type,
7303 fold_convert (type, alt0),
7304 fold_convert (type, alt1)),
7305 fold_convert (type, same));
7308 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7309 of the array. Loop optimizer sometimes produce this type of
7310 expressions. */
7311 if (TREE_CODE (arg0) == ADDR_EXPR)
7313 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7314 if (tem)
7315 return fold_convert (type, tem);
7317 else if (TREE_CODE (arg1) == ADDR_EXPR)
7319 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7320 if (tem)
7321 return fold_convert (type, tem);
7324 else
7326 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7327 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7328 return non_lvalue (fold_convert (type, arg0));
7330 /* Likewise if the operands are reversed. */
7331 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7332 return non_lvalue (fold_convert (type, arg1));
7334 /* Convert X + -C into X - C. */
7335 if (TREE_CODE (arg1) == REAL_CST
7336 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7338 tem = fold_negate_const (arg1, type);
7339 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7340 return fold_build2 (MINUS_EXPR, type,
7341 fold_convert (type, arg0),
7342 fold_convert (type, tem));
7345 if (flag_unsafe_math_optimizations
7346 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7347 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7348 && (tem = distribute_real_division (code, type, arg0, arg1)))
7349 return tem;
7351 /* Convert x+x into x*2.0. */
7352 if (operand_equal_p (arg0, arg1, 0)
7353 && SCALAR_FLOAT_TYPE_P (type))
7354 return fold_build2 (MULT_EXPR, type, arg0,
7355 build_real (type, dconst2));
7357 /* Convert x*c+x into x*(c+1). */
7358 if (flag_unsafe_math_optimizations
7359 && TREE_CODE (arg0) == MULT_EXPR
7360 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7361 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7362 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7364 REAL_VALUE_TYPE c;
7366 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7367 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7368 return fold_build2 (MULT_EXPR, type, arg1,
7369 build_real (type, c));
7372 /* Convert x+x*c into x*(c+1). */
7373 if (flag_unsafe_math_optimizations
7374 && TREE_CODE (arg1) == MULT_EXPR
7375 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7376 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7377 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7379 REAL_VALUE_TYPE c;
7381 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7382 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7383 return fold_build2 (MULT_EXPR, type, arg0,
7384 build_real (type, c));
7387 /* Convert x*c1+x*c2 into x*(c1+c2). */
7388 if (flag_unsafe_math_optimizations
7389 && TREE_CODE (arg0) == MULT_EXPR
7390 && TREE_CODE (arg1) == MULT_EXPR
7391 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7392 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7393 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7394 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7395 && operand_equal_p (TREE_OPERAND (arg0, 0),
7396 TREE_OPERAND (arg1, 0), 0))
7398 REAL_VALUE_TYPE c1, c2;
7400 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7401 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7402 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7403 return fold_build2 (MULT_EXPR, type,
7404 TREE_OPERAND (arg0, 0),
7405 build_real (type, c1));
7407 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7408 if (flag_unsafe_math_optimizations
7409 && TREE_CODE (arg1) == PLUS_EXPR
7410 && TREE_CODE (arg0) != MULT_EXPR)
7412 tree tree10 = TREE_OPERAND (arg1, 0);
7413 tree tree11 = TREE_OPERAND (arg1, 1);
7414 if (TREE_CODE (tree11) == MULT_EXPR
7415 && TREE_CODE (tree10) == MULT_EXPR)
7417 tree tree0;
7418 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7419 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7422 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7423 if (flag_unsafe_math_optimizations
7424 && TREE_CODE (arg0) == PLUS_EXPR
7425 && TREE_CODE (arg1) != MULT_EXPR)
7427 tree tree00 = TREE_OPERAND (arg0, 0);
7428 tree tree01 = TREE_OPERAND (arg0, 1);
7429 if (TREE_CODE (tree01) == MULT_EXPR
7430 && TREE_CODE (tree00) == MULT_EXPR)
7432 tree tree0;
7433 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7434 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7439 bit_rotate:
7440 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7441 is a rotate of A by C1 bits. */
7442 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7443 is a rotate of A by B bits. */
7445 enum tree_code code0, code1;
7446 code0 = TREE_CODE (arg0);
7447 code1 = TREE_CODE (arg1);
7448 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7449 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7450 && operand_equal_p (TREE_OPERAND (arg0, 0),
7451 TREE_OPERAND (arg1, 0), 0)
7452 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7454 tree tree01, tree11;
7455 enum tree_code code01, code11;
7457 tree01 = TREE_OPERAND (arg0, 1);
7458 tree11 = TREE_OPERAND (arg1, 1);
7459 STRIP_NOPS (tree01);
7460 STRIP_NOPS (tree11);
7461 code01 = TREE_CODE (tree01);
7462 code11 = TREE_CODE (tree11);
7463 if (code01 == INTEGER_CST
7464 && code11 == INTEGER_CST
7465 && TREE_INT_CST_HIGH (tree01) == 0
7466 && TREE_INT_CST_HIGH (tree11) == 0
7467 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7468 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7469 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7470 code0 == LSHIFT_EXPR ? tree01 : tree11);
7471 else if (code11 == MINUS_EXPR)
7473 tree tree110, tree111;
7474 tree110 = TREE_OPERAND (tree11, 0);
7475 tree111 = TREE_OPERAND (tree11, 1);
7476 STRIP_NOPS (tree110);
7477 STRIP_NOPS (tree111);
7478 if (TREE_CODE (tree110) == INTEGER_CST
7479 && 0 == compare_tree_int (tree110,
7480 TYPE_PRECISION
7481 (TREE_TYPE (TREE_OPERAND
7482 (arg0, 0))))
7483 && operand_equal_p (tree01, tree111, 0))
7484 return build2 ((code0 == LSHIFT_EXPR
7485 ? LROTATE_EXPR
7486 : RROTATE_EXPR),
7487 type, TREE_OPERAND (arg0, 0), tree01);
7489 else if (code01 == MINUS_EXPR)
7491 tree tree010, tree011;
7492 tree010 = TREE_OPERAND (tree01, 0);
7493 tree011 = TREE_OPERAND (tree01, 1);
7494 STRIP_NOPS (tree010);
7495 STRIP_NOPS (tree011);
7496 if (TREE_CODE (tree010) == INTEGER_CST
7497 && 0 == compare_tree_int (tree010,
7498 TYPE_PRECISION
7499 (TREE_TYPE (TREE_OPERAND
7500 (arg0, 0))))
7501 && operand_equal_p (tree11, tree011, 0))
7502 return build2 ((code0 != LSHIFT_EXPR
7503 ? LROTATE_EXPR
7504 : RROTATE_EXPR),
7505 type, TREE_OPERAND (arg0, 0), tree11);
7510 associate:
7511 /* In most languages, can't associate operations on floats through
7512 parentheses. Rather than remember where the parentheses were, we
7513 don't associate floats at all, unless the user has specified
7514 -funsafe-math-optimizations. */
7516 if (! wins
7517 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7519 tree var0, con0, lit0, minus_lit0;
7520 tree var1, con1, lit1, minus_lit1;
7522 /* Split both trees into variables, constants, and literals. Then
7523 associate each group together, the constants with literals,
7524 then the result with variables. This increases the chances of
7525 literals being recombined later and of generating relocatable
7526 expressions for the sum of a constant and literal. */
7527 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7528 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7529 code == MINUS_EXPR);
7531 /* Only do something if we found more than two objects. Otherwise,
7532 nothing has changed and we risk infinite recursion. */
7533 if (2 < ((var0 != 0) + (var1 != 0)
7534 + (con0 != 0) + (con1 != 0)
7535 + (lit0 != 0) + (lit1 != 0)
7536 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7538 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7539 if (code == MINUS_EXPR)
7540 code = PLUS_EXPR;
7542 var0 = associate_trees (var0, var1, code, type);
7543 con0 = associate_trees (con0, con1, code, type);
7544 lit0 = associate_trees (lit0, lit1, code, type);
7545 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7547 /* Preserve the MINUS_EXPR if the negative part of the literal is
7548 greater than the positive part. Otherwise, the multiplicative
7549 folding code (i.e extract_muldiv) may be fooled in case
7550 unsigned constants are subtracted, like in the following
7551 example: ((X*2 + 4) - 8U)/2. */
7552 if (minus_lit0 && lit0)
7554 if (TREE_CODE (lit0) == INTEGER_CST
7555 && TREE_CODE (minus_lit0) == INTEGER_CST
7556 && tree_int_cst_lt (lit0, minus_lit0))
7558 minus_lit0 = associate_trees (minus_lit0, lit0,
7559 MINUS_EXPR, type);
7560 lit0 = 0;
7562 else
7564 lit0 = associate_trees (lit0, minus_lit0,
7565 MINUS_EXPR, type);
7566 minus_lit0 = 0;
7569 if (minus_lit0)
7571 if (con0 == 0)
7572 return fold_convert (type,
7573 associate_trees (var0, minus_lit0,
7574 MINUS_EXPR, type));
7575 else
7577 con0 = associate_trees (con0, minus_lit0,
7578 MINUS_EXPR, type);
7579 return fold_convert (type,
7580 associate_trees (var0, con0,
7581 PLUS_EXPR, type));
7585 con0 = associate_trees (con0, lit0, code, type);
7586 return fold_convert (type, associate_trees (var0, con0,
7587 code, type));
7591 binary:
7592 if (wins)
7593 t1 = const_binop (code, arg0, arg1, 0);
7594 if (t1 != NULL_TREE)
7596 /* The return value should always have
7597 the same type as the original expression. */
7598 if (TREE_TYPE (t1) != type)
7599 t1 = fold_convert (type, t1);
7601 return t1;
7603 return NULL_TREE;
7605 case MINUS_EXPR:
7606 /* A - (-B) -> A + B */
7607 if (TREE_CODE (arg1) == NEGATE_EXPR)
7608 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7609 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7610 if (TREE_CODE (arg0) == NEGATE_EXPR
7611 && (FLOAT_TYPE_P (type)
7612 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7613 && negate_expr_p (arg1)
7614 && reorder_operands_p (arg0, arg1))
7615 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7616 TREE_OPERAND (arg0, 0));
7617 /* Convert -A - 1 to ~A. */
7618 if (INTEGRAL_TYPE_P (type)
7619 && TREE_CODE (arg0) == NEGATE_EXPR
7620 && integer_onep (arg1))
7621 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7623 /* Convert -1 - A to ~A. */
7624 if (INTEGRAL_TYPE_P (type)
7625 && integer_all_onesp (arg0))
7626 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7628 if (! FLOAT_TYPE_P (type))
7630 if (! wins && integer_zerop (arg0))
7631 return negate_expr (fold_convert (type, arg1));
7632 if (integer_zerop (arg1))
7633 return non_lvalue (fold_convert (type, arg0));
7635 /* Fold A - (A & B) into ~B & A. */
7636 if (!TREE_SIDE_EFFECTS (arg0)
7637 && TREE_CODE (arg1) == BIT_AND_EXPR)
7639 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7640 return fold_build2 (BIT_AND_EXPR, type,
7641 fold_build1 (BIT_NOT_EXPR, type,
7642 TREE_OPERAND (arg1, 0)),
7643 arg0);
7644 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7645 return fold_build2 (BIT_AND_EXPR, type,
7646 fold_build1 (BIT_NOT_EXPR, type,
7647 TREE_OPERAND (arg1, 1)),
7648 arg0);
7651 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7652 any power of 2 minus 1. */
7653 if (TREE_CODE (arg0) == BIT_AND_EXPR
7654 && TREE_CODE (arg1) == BIT_AND_EXPR
7655 && operand_equal_p (TREE_OPERAND (arg0, 0),
7656 TREE_OPERAND (arg1, 0), 0))
7658 tree mask0 = TREE_OPERAND (arg0, 1);
7659 tree mask1 = TREE_OPERAND (arg1, 1);
7660 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7662 if (operand_equal_p (tem, mask1, 0))
7664 tem = fold_build2 (BIT_XOR_EXPR, type,
7665 TREE_OPERAND (arg0, 0), mask1);
7666 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7671 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7672 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7673 return non_lvalue (fold_convert (type, arg0));
7675 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7676 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7677 (-ARG1 + ARG0) reduces to -ARG1. */
7678 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7679 return negate_expr (fold_convert (type, arg1));
7681 /* Fold &x - &x. This can happen from &x.foo - &x.
7682 This is unsafe for certain floats even in non-IEEE formats.
7683 In IEEE, it is unsafe because it does wrong for NaNs.
7684 Also note that operand_equal_p is always false if an operand
7685 is volatile. */
7687 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7688 && operand_equal_p (arg0, arg1, 0))
7689 return fold_convert (type, integer_zero_node);
7691 /* A - B -> A + (-B) if B is easily negatable. */
7692 if (!wins && negate_expr_p (arg1)
7693 && ((FLOAT_TYPE_P (type)
7694 /* Avoid this transformation if B is a positive REAL_CST. */
7695 && (TREE_CODE (arg1) != REAL_CST
7696 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7697 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7698 return fold_build2 (PLUS_EXPR, type,
7699 fold_convert (type, arg0),
7700 fold_convert (type, negate_expr (arg1)));
7702 /* Try folding difference of addresses. */
7704 HOST_WIDE_INT diff;
7706 if ((TREE_CODE (arg0) == ADDR_EXPR
7707 || TREE_CODE (arg1) == ADDR_EXPR)
7708 && ptr_difference_const (arg0, arg1, &diff))
7709 return build_int_cst_type (type, diff);
7712 /* Fold &a[i] - &a[j] to i-j. */
7713 if (TREE_CODE (arg0) == ADDR_EXPR
7714 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7715 && TREE_CODE (arg1) == ADDR_EXPR
7716 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7718 tree aref0 = TREE_OPERAND (arg0, 0);
7719 tree aref1 = TREE_OPERAND (arg1, 0);
7720 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7721 TREE_OPERAND (aref1, 0), 0))
7723 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7724 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7725 tree esz = array_ref_element_size (aref0);
7726 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7727 return fold_build2 (MULT_EXPR, type, diff,
7728 fold_convert (type, esz));
7733 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7734 of the array. Loop optimizer sometimes produce this type of
7735 expressions. */
7736 if (TREE_CODE (arg0) == ADDR_EXPR)
7738 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7739 if (tem)
7740 return fold_convert (type, tem);
7743 if (flag_unsafe_math_optimizations
7744 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7745 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7746 && (tem = distribute_real_division (code, type, arg0, arg1)))
7747 return tem;
7749 if (TREE_CODE (arg0) == MULT_EXPR
7750 && TREE_CODE (arg1) == MULT_EXPR
7751 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7753 /* (A * C) - (B * C) -> (A-B) * C. */
7754 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7755 TREE_OPERAND (arg1, 1), 0))
7756 return fold_build2 (MULT_EXPR, type,
7757 fold_build2 (MINUS_EXPR, type,
7758 TREE_OPERAND (arg0, 0),
7759 TREE_OPERAND (arg1, 0)),
7760 TREE_OPERAND (arg0, 1));
7761 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7762 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7763 TREE_OPERAND (arg1, 0), 0))
7764 return fold_build2 (MULT_EXPR, type,
7765 TREE_OPERAND (arg0, 0),
7766 fold_build2 (MINUS_EXPR, type,
7767 TREE_OPERAND (arg0, 1),
7768 TREE_OPERAND (arg1, 1)));
7771 goto associate;
7773 case MULT_EXPR:
7774 /* (-A) * (-B) -> A * B */
7775 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7776 return fold_build2 (MULT_EXPR, type,
7777 TREE_OPERAND (arg0, 0),
7778 negate_expr (arg1));
7779 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7780 return fold_build2 (MULT_EXPR, type,
7781 negate_expr (arg0),
7782 TREE_OPERAND (arg1, 0));
7784 if (! FLOAT_TYPE_P (type))
7786 if (integer_zerop (arg1))
7787 return omit_one_operand (type, arg1, arg0);
7788 if (integer_onep (arg1))
7789 return non_lvalue (fold_convert (type, arg0));
7790 /* Transform x * -1 into -x. */
7791 if (integer_all_onesp (arg1))
7792 return fold_convert (type, negate_expr (arg0));
7794 /* (a * (1 << b)) is (a << b) */
7795 if (TREE_CODE (arg1) == LSHIFT_EXPR
7796 && integer_onep (TREE_OPERAND (arg1, 0)))
7797 return fold_build2 (LSHIFT_EXPR, type, arg0,
7798 TREE_OPERAND (arg1, 1));
7799 if (TREE_CODE (arg0) == LSHIFT_EXPR
7800 && integer_onep (TREE_OPERAND (arg0, 0)))
7801 return fold_build2 (LSHIFT_EXPR, type, arg1,
7802 TREE_OPERAND (arg0, 1));
7804 if (TREE_CODE (arg1) == INTEGER_CST
7805 && 0 != (tem = extract_muldiv (op0,
7806 fold_convert (type, arg1),
7807 code, NULL_TREE)))
7808 return fold_convert (type, tem);
7811 else
7813 /* Maybe fold x * 0 to 0. The expressions aren't the same
7814 when x is NaN, since x * 0 is also NaN. Nor are they the
7815 same in modes with signed zeros, since multiplying a
7816 negative value by 0 gives -0, not +0. */
7817 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7818 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7819 && real_zerop (arg1))
7820 return omit_one_operand (type, arg1, arg0);
7821 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7822 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7823 && real_onep (arg1))
7824 return non_lvalue (fold_convert (type, arg0));
7826 /* Transform x * -1.0 into -x. */
7827 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7828 && real_minus_onep (arg1))
7829 return fold_convert (type, negate_expr (arg0));
7831 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7832 if (flag_unsafe_math_optimizations
7833 && TREE_CODE (arg0) == RDIV_EXPR
7834 && TREE_CODE (arg1) == REAL_CST
7835 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7837 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7838 arg1, 0);
7839 if (tem)
7840 return fold_build2 (RDIV_EXPR, type, tem,
7841 TREE_OPERAND (arg0, 1));
7844 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7845 if (operand_equal_p (arg0, arg1, 0))
7847 tree tem = fold_strip_sign_ops (arg0);
7848 if (tem != NULL_TREE)
7850 tem = fold_convert (type, tem);
7851 return fold_build2 (MULT_EXPR, type, tem, tem);
7855 if (flag_unsafe_math_optimizations)
7857 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7858 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7860 /* Optimizations of root(...)*root(...). */
7861 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7863 tree rootfn, arg, arglist;
7864 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7865 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7867 /* Optimize sqrt(x)*sqrt(x) as x. */
7868 if (BUILTIN_SQRT_P (fcode0)
7869 && operand_equal_p (arg00, arg10, 0)
7870 && ! HONOR_SNANS (TYPE_MODE (type)))
7871 return arg00;
7873 /* Optimize root(x)*root(y) as root(x*y). */
7874 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7875 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7876 arglist = build_tree_list (NULL_TREE, arg);
7877 return build_function_call_expr (rootfn, arglist);
7880 /* Optimize expN(x)*expN(y) as expN(x+y). */
7881 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7883 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7884 tree arg = fold_build2 (PLUS_EXPR, type,
7885 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7886 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7887 tree arglist = build_tree_list (NULL_TREE, arg);
7888 return build_function_call_expr (expfn, arglist);
7891 /* Optimizations of pow(...)*pow(...). */
7892 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7893 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7894 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7896 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7897 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7898 1)));
7899 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7900 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7901 1)));
7903 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7904 if (operand_equal_p (arg01, arg11, 0))
7906 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7907 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7908 tree arglist = tree_cons (NULL_TREE, arg,
7909 build_tree_list (NULL_TREE,
7910 arg01));
7911 return build_function_call_expr (powfn, arglist);
7914 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7915 if (operand_equal_p (arg00, arg10, 0))
7917 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7918 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7919 tree arglist = tree_cons (NULL_TREE, arg00,
7920 build_tree_list (NULL_TREE,
7921 arg));
7922 return build_function_call_expr (powfn, arglist);
7926 /* Optimize tan(x)*cos(x) as sin(x). */
7927 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7928 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7929 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7930 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7931 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7932 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7933 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7934 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7936 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7938 if (sinfn != NULL_TREE)
7939 return build_function_call_expr (sinfn,
7940 TREE_OPERAND (arg0, 1));
7943 /* Optimize x*pow(x,c) as pow(x,c+1). */
7944 if (fcode1 == BUILT_IN_POW
7945 || fcode1 == BUILT_IN_POWF
7946 || fcode1 == BUILT_IN_POWL)
7948 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7949 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7950 1)));
7951 if (TREE_CODE (arg11) == REAL_CST
7952 && ! TREE_CONSTANT_OVERFLOW (arg11)
7953 && operand_equal_p (arg0, arg10, 0))
7955 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7956 REAL_VALUE_TYPE c;
7957 tree arg, arglist;
7959 c = TREE_REAL_CST (arg11);
7960 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7961 arg = build_real (type, c);
7962 arglist = build_tree_list (NULL_TREE, arg);
7963 arglist = tree_cons (NULL_TREE, arg0, arglist);
7964 return build_function_call_expr (powfn, arglist);
7968 /* Optimize pow(x,c)*x as pow(x,c+1). */
7969 if (fcode0 == BUILT_IN_POW
7970 || fcode0 == BUILT_IN_POWF
7971 || fcode0 == BUILT_IN_POWL)
7973 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7974 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7975 1)));
7976 if (TREE_CODE (arg01) == REAL_CST
7977 && ! TREE_CONSTANT_OVERFLOW (arg01)
7978 && operand_equal_p (arg1, arg00, 0))
7980 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7981 REAL_VALUE_TYPE c;
7982 tree arg, arglist;
7984 c = TREE_REAL_CST (arg01);
7985 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7986 arg = build_real (type, c);
7987 arglist = build_tree_list (NULL_TREE, arg);
7988 arglist = tree_cons (NULL_TREE, arg1, arglist);
7989 return build_function_call_expr (powfn, arglist);
7993 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7994 if (! optimize_size
7995 && operand_equal_p (arg0, arg1, 0))
7997 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7999 if (powfn)
8001 tree arg = build_real (type, dconst2);
8002 tree arglist = build_tree_list (NULL_TREE, arg);
8003 arglist = tree_cons (NULL_TREE, arg0, arglist);
8004 return build_function_call_expr (powfn, arglist);
8009 goto associate;
8011 case BIT_IOR_EXPR:
8012 bit_ior:
8013 if (integer_all_onesp (arg1))
8014 return omit_one_operand (type, arg1, arg0);
8015 if (integer_zerop (arg1))
8016 return non_lvalue (fold_convert (type, arg0));
8017 if (operand_equal_p (arg0, arg1, 0))
8018 return non_lvalue (fold_convert (type, arg0));
8020 /* ~X | X is -1. */
8021 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8022 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8024 t1 = build_int_cst (type, -1);
8025 t1 = force_fit_type (t1, 0, false, false);
8026 return omit_one_operand (type, t1, arg1);
8029 /* X | ~X is -1. */
8030 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8031 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8033 t1 = build_int_cst (type, -1);
8034 t1 = force_fit_type (t1, 0, false, false);
8035 return omit_one_operand (type, t1, arg0);
8038 t1 = distribute_bit_expr (code, type, arg0, arg1);
8039 if (t1 != NULL_TREE)
8040 return t1;
8042 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8044 This results in more efficient code for machines without a NAND
8045 instruction. Combine will canonicalize to the first form
8046 which will allow use of NAND instructions provided by the
8047 backend if they exist. */
8048 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8049 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8051 return fold_build1 (BIT_NOT_EXPR, type,
8052 build2 (BIT_AND_EXPR, type,
8053 TREE_OPERAND (arg0, 0),
8054 TREE_OPERAND (arg1, 0)));
8057 /* See if this can be simplified into a rotate first. If that
8058 is unsuccessful continue in the association code. */
8059 goto bit_rotate;
8061 case BIT_XOR_EXPR:
8062 if (integer_zerop (arg1))
8063 return non_lvalue (fold_convert (type, arg0));
8064 if (integer_all_onesp (arg1))
8065 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8066 if (operand_equal_p (arg0, arg1, 0))
8067 return omit_one_operand (type, integer_zero_node, arg0);
8069 /* ~X ^ X is -1. */
8070 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8071 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8073 t1 = build_int_cst (type, -1);
8074 t1 = force_fit_type (t1, 0, false, false);
8075 return omit_one_operand (type, t1, arg1);
8078 /* X ^ ~X is -1. */
8079 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8080 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8082 t1 = build_int_cst (type, -1);
8083 t1 = force_fit_type (t1, 0, false, false);
8084 return omit_one_operand (type, t1, arg0);
8087 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8088 with a constant, and the two constants have no bits in common,
8089 we should treat this as a BIT_IOR_EXPR since this may produce more
8090 simplifications. */
8091 if (TREE_CODE (arg0) == BIT_AND_EXPR
8092 && TREE_CODE (arg1) == BIT_AND_EXPR
8093 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8094 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8095 && integer_zerop (const_binop (BIT_AND_EXPR,
8096 TREE_OPERAND (arg0, 1),
8097 TREE_OPERAND (arg1, 1), 0)))
8099 code = BIT_IOR_EXPR;
8100 goto bit_ior;
8103 /* (X | Y) ^ X -> Y & ~ X*/
8104 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8105 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8107 tree t2 = TREE_OPERAND (arg0, 1);
8108 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8109 arg1);
8110 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8111 fold_convert (type, t1));
8112 return t1;
8115 /* (Y | X) ^ X -> Y & ~ X*/
8116 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8117 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8119 tree t2 = TREE_OPERAND (arg0, 0);
8120 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8121 arg1);
8122 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8123 fold_convert (type, t1));
8124 return t1;
8127 /* X ^ (X | Y) -> Y & ~ X*/
8128 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8129 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8131 tree t2 = TREE_OPERAND (arg1, 1);
8132 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8133 arg0);
8134 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8135 fold_convert (type, t1));
8136 return t1;
8139 /* X ^ (Y | X) -> Y & ~ X*/
8140 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8141 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8143 tree t2 = TREE_OPERAND (arg1, 0);
8144 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8145 arg0);
8146 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8147 fold_convert (type, t1));
8148 return t1;
8151 /* Convert ~X ^ ~Y to X ^ Y. */
8152 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8153 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8154 return fold_build2 (code, type,
8155 fold_convert (type, TREE_OPERAND (arg0, 0)),
8156 fold_convert (type, TREE_OPERAND (arg1, 0)));
8158 /* See if this can be simplified into a rotate first. If that
8159 is unsuccessful continue in the association code. */
8160 goto bit_rotate;
8162 case BIT_AND_EXPR:
8163 if (integer_all_onesp (arg1))
8164 return non_lvalue (fold_convert (type, arg0));
8165 if (integer_zerop (arg1))
8166 return omit_one_operand (type, arg1, arg0);
8167 if (operand_equal_p (arg0, arg1, 0))
8168 return non_lvalue (fold_convert (type, arg0));
8170 /* ~X & X is always zero. */
8171 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8172 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8173 return omit_one_operand (type, integer_zero_node, arg1);
8175 /* X & ~X is always zero. */
8176 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8177 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8178 return omit_one_operand (type, integer_zero_node, arg0);
8180 t1 = distribute_bit_expr (code, type, arg0, arg1);
8181 if (t1 != NULL_TREE)
8182 return t1;
8183 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8184 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8185 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8187 unsigned int prec
8188 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8190 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8191 && (~TREE_INT_CST_LOW (arg1)
8192 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8193 return fold_convert (type, TREE_OPERAND (arg0, 0));
8196 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8198 This results in more efficient code for machines without a NOR
8199 instruction. Combine will canonicalize to the first form
8200 which will allow use of NOR instructions provided by the
8201 backend if they exist. */
8202 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8203 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8205 return fold_build1 (BIT_NOT_EXPR, type,
8206 build2 (BIT_IOR_EXPR, type,
8207 TREE_OPERAND (arg0, 0),
8208 TREE_OPERAND (arg1, 0)));
8211 goto associate;
8213 case RDIV_EXPR:
8214 /* Don't touch a floating-point divide by zero unless the mode
8215 of the constant can represent infinity. */
8216 if (TREE_CODE (arg1) == REAL_CST
8217 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8218 && real_zerop (arg1))
8219 return NULL_TREE;
8221 /* (-A) / (-B) -> A / B */
8222 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8223 return fold_build2 (RDIV_EXPR, type,
8224 TREE_OPERAND (arg0, 0),
8225 negate_expr (arg1));
8226 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8227 return fold_build2 (RDIV_EXPR, type,
8228 negate_expr (arg0),
8229 TREE_OPERAND (arg1, 0));
8231 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8232 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8233 && real_onep (arg1))
8234 return non_lvalue (fold_convert (type, arg0));
8236 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8237 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8238 && real_minus_onep (arg1))
8239 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8241 /* If ARG1 is a constant, we can convert this to a multiply by the
8242 reciprocal. This does not have the same rounding properties,
8243 so only do this if -funsafe-math-optimizations. We can actually
8244 always safely do it if ARG1 is a power of two, but it's hard to
8245 tell if it is or not in a portable manner. */
8246 if (TREE_CODE (arg1) == REAL_CST)
8248 if (flag_unsafe_math_optimizations
8249 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8250 arg1, 0)))
8251 return fold_build2 (MULT_EXPR, type, arg0, tem);
8252 /* Find the reciprocal if optimizing and the result is exact. */
8253 if (optimize)
8255 REAL_VALUE_TYPE r;
8256 r = TREE_REAL_CST (arg1);
8257 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8259 tem = build_real (type, r);
8260 return fold_build2 (MULT_EXPR, type,
8261 fold_convert (type, arg0), tem);
8265 /* Convert A/B/C to A/(B*C). */
8266 if (flag_unsafe_math_optimizations
8267 && TREE_CODE (arg0) == RDIV_EXPR)
8268 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8269 fold_build2 (MULT_EXPR, type,
8270 TREE_OPERAND (arg0, 1), arg1));
8272 /* Convert A/(B/C) to (A/B)*C. */
8273 if (flag_unsafe_math_optimizations
8274 && TREE_CODE (arg1) == RDIV_EXPR)
8275 return fold_build2 (MULT_EXPR, type,
8276 fold_build2 (RDIV_EXPR, type, arg0,
8277 TREE_OPERAND (arg1, 0)),
8278 TREE_OPERAND (arg1, 1));
8280 /* Convert C1/(X*C2) into (C1/C2)/X. */
8281 if (flag_unsafe_math_optimizations
8282 && TREE_CODE (arg1) == MULT_EXPR
8283 && TREE_CODE (arg0) == REAL_CST
8284 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8286 tree tem = const_binop (RDIV_EXPR, arg0,
8287 TREE_OPERAND (arg1, 1), 0);
8288 if (tem)
8289 return fold_build2 (RDIV_EXPR, type, tem,
8290 TREE_OPERAND (arg1, 0));
8293 if (flag_unsafe_math_optimizations)
8295 enum built_in_function fcode = builtin_mathfn_code (arg1);
8296 /* Optimize x/expN(y) into x*expN(-y). */
8297 if (BUILTIN_EXPONENT_P (fcode))
8299 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8300 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8301 tree arglist = build_tree_list (NULL_TREE,
8302 fold_convert (type, arg));
8303 arg1 = build_function_call_expr (expfn, arglist);
8304 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8307 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8308 if (fcode == BUILT_IN_POW
8309 || fcode == BUILT_IN_POWF
8310 || fcode == BUILT_IN_POWL)
8312 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8313 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8314 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8315 tree neg11 = fold_convert (type, negate_expr (arg11));
8316 tree arglist = tree_cons(NULL_TREE, arg10,
8317 build_tree_list (NULL_TREE, neg11));
8318 arg1 = build_function_call_expr (powfn, arglist);
8319 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8323 if (flag_unsafe_math_optimizations)
8325 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8326 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8328 /* Optimize sin(x)/cos(x) as tan(x). */
8329 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8330 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8331 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8332 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8333 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8335 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8337 if (tanfn != NULL_TREE)
8338 return build_function_call_expr (tanfn,
8339 TREE_OPERAND (arg0, 1));
8342 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8343 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8344 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8345 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8346 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8347 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8349 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8351 if (tanfn != NULL_TREE)
8353 tree tmp = TREE_OPERAND (arg0, 1);
8354 tmp = build_function_call_expr (tanfn, tmp);
8355 return fold_build2 (RDIV_EXPR, type,
8356 build_real (type, dconst1), tmp);
8360 /* Optimize pow(x,c)/x as pow(x,c-1). */
8361 if (fcode0 == BUILT_IN_POW
8362 || fcode0 == BUILT_IN_POWF
8363 || fcode0 == BUILT_IN_POWL)
8365 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8366 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8367 if (TREE_CODE (arg01) == REAL_CST
8368 && ! TREE_CONSTANT_OVERFLOW (arg01)
8369 && operand_equal_p (arg1, arg00, 0))
8371 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8372 REAL_VALUE_TYPE c;
8373 tree arg, arglist;
8375 c = TREE_REAL_CST (arg01);
8376 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8377 arg = build_real (type, c);
8378 arglist = build_tree_list (NULL_TREE, arg);
8379 arglist = tree_cons (NULL_TREE, arg1, arglist);
8380 return build_function_call_expr (powfn, arglist);
8384 goto binary;
8386 case TRUNC_DIV_EXPR:
8387 case ROUND_DIV_EXPR:
8388 case FLOOR_DIV_EXPR:
8389 case CEIL_DIV_EXPR:
8390 case EXACT_DIV_EXPR:
8391 if (integer_onep (arg1))
8392 return non_lvalue (fold_convert (type, arg0));
8393 if (integer_zerop (arg1))
8394 return NULL_TREE;
8395 /* X / -1 is -X. */
8396 if (!TYPE_UNSIGNED (type)
8397 && TREE_CODE (arg1) == INTEGER_CST
8398 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8399 && TREE_INT_CST_HIGH (arg1) == -1)
8400 return fold_convert (type, negate_expr (arg0));
8402 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8403 operation, EXACT_DIV_EXPR.
8405 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8406 At one time others generated faster code, it's not clear if they do
8407 after the last round to changes to the DIV code in expmed.c. */
8408 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8409 && multiple_of_p (type, arg0, arg1))
8410 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8412 if (TREE_CODE (arg1) == INTEGER_CST
8413 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8414 return fold_convert (type, tem);
8416 goto binary;
8418 case CEIL_MOD_EXPR:
8419 case FLOOR_MOD_EXPR:
8420 case ROUND_MOD_EXPR:
8421 case TRUNC_MOD_EXPR:
8422 /* X % 1 is always zero, but be sure to preserve any side
8423 effects in X. */
8424 if (integer_onep (arg1))
8425 return omit_one_operand (type, integer_zero_node, arg0);
8427 /* X % 0, return X % 0 unchanged so that we can get the
8428 proper warnings and errors. */
8429 if (integer_zerop (arg1))
8430 return NULL_TREE;
8432 /* 0 % X is always zero, but be sure to preserve any side
8433 effects in X. Place this after checking for X == 0. */
8434 if (integer_zerop (arg0))
8435 return omit_one_operand (type, integer_zero_node, arg1);
8437 /* X % -1 is zero. */
8438 if (!TYPE_UNSIGNED (type)
8439 && TREE_CODE (arg1) == INTEGER_CST
8440 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8441 && TREE_INT_CST_HIGH (arg1) == -1)
8442 return omit_one_operand (type, integer_zero_node, arg0);
8444 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8445 i.e. "X % C" into "X & C2", if X and C are positive. */
8446 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8447 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8448 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8450 unsigned HOST_WIDE_INT high, low;
8451 tree mask;
8452 int l;
8454 l = tree_log2 (arg1);
8455 if (l >= HOST_BITS_PER_WIDE_INT)
8457 high = ((unsigned HOST_WIDE_INT) 1
8458 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8459 low = -1;
8461 else
8463 high = 0;
8464 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8467 mask = build_int_cst_wide (type, low, high);
8468 return fold_build2 (BIT_AND_EXPR, type,
8469 fold_convert (type, arg0), mask);
8472 /* X % -C is the same as X % C. */
8473 if (code == TRUNC_MOD_EXPR
8474 && !TYPE_UNSIGNED (type)
8475 && TREE_CODE (arg1) == INTEGER_CST
8476 && !TREE_CONSTANT_OVERFLOW (arg1)
8477 && TREE_INT_CST_HIGH (arg1) < 0
8478 && !flag_trapv
8479 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8480 && !sign_bit_p (arg1, arg1))
8481 return fold_build2 (code, type, fold_convert (type, arg0),
8482 fold_convert (type, negate_expr (arg1)));
8484 /* X % -Y is the same as X % Y. */
8485 if (code == TRUNC_MOD_EXPR
8486 && !TYPE_UNSIGNED (type)
8487 && TREE_CODE (arg1) == NEGATE_EXPR
8488 && !flag_trapv)
8489 return fold_build2 (code, type, fold_convert (type, arg0),
8490 fold_convert (type, TREE_OPERAND (arg1, 0)));
8492 if (TREE_CODE (arg1) == INTEGER_CST
8493 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8494 return fold_convert (type, tem);
8496 goto binary;
8498 case LROTATE_EXPR:
8499 case RROTATE_EXPR:
8500 if (integer_all_onesp (arg0))
8501 return omit_one_operand (type, arg0, arg1);
8502 goto shift;
8504 case RSHIFT_EXPR:
8505 /* Optimize -1 >> x for arithmetic right shifts. */
8506 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8507 return omit_one_operand (type, arg0, arg1);
8508 /* ... fall through ... */
8510 case LSHIFT_EXPR:
8511 shift:
8512 if (integer_zerop (arg1))
8513 return non_lvalue (fold_convert (type, arg0));
8514 if (integer_zerop (arg0))
8515 return omit_one_operand (type, arg0, arg1);
8517 /* Since negative shift count is not well-defined,
8518 don't try to compute it in the compiler. */
8519 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8520 return NULL_TREE;
8522 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8523 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
8524 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8525 && host_integerp (TREE_OPERAND (arg0, 1), false)
8526 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8528 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8529 + TREE_INT_CST_LOW (arg1));
8531 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8532 being well defined. */
8533 if (low >= TYPE_PRECISION (type))
8535 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8536 low = low % TYPE_PRECISION (type);
8537 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8538 return build_int_cst (type, 0);
8539 else
8540 low = TYPE_PRECISION (type) - 1;
8543 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8544 build_int_cst (type, low));
8547 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8548 into x & ((unsigned)-1 >> c) for unsigned types. */
8549 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8550 || (TYPE_UNSIGNED (type)
8551 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8552 && host_integerp (arg1, false)
8553 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8554 && host_integerp (TREE_OPERAND (arg0, 1), false)
8555 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8557 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8558 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8559 tree lshift;
8560 tree arg00;
8562 if (low0 == low1)
8564 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8566 lshift = build_int_cst (type, -1);
8567 lshift = int_const_binop (code, lshift, arg1, 0);
8569 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8573 /* Rewrite an LROTATE_EXPR by a constant into an
8574 RROTATE_EXPR by a new constant. */
8575 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8577 tree tem = build_int_cst (NULL_TREE,
8578 GET_MODE_BITSIZE (TYPE_MODE (type)));
8579 tem = fold_convert (TREE_TYPE (arg1), tem);
8580 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8581 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8584 /* If we have a rotate of a bit operation with the rotate count and
8585 the second operand of the bit operation both constant,
8586 permute the two operations. */
8587 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8588 && (TREE_CODE (arg0) == BIT_AND_EXPR
8589 || TREE_CODE (arg0) == BIT_IOR_EXPR
8590 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8591 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8592 return fold_build2 (TREE_CODE (arg0), type,
8593 fold_build2 (code, type,
8594 TREE_OPERAND (arg0, 0), arg1),
8595 fold_build2 (code, type,
8596 TREE_OPERAND (arg0, 1), arg1));
8598 /* Two consecutive rotates adding up to the width of the mode can
8599 be ignored. */
8600 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8601 && TREE_CODE (arg0) == RROTATE_EXPR
8602 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8603 && TREE_INT_CST_HIGH (arg1) == 0
8604 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8605 && ((TREE_INT_CST_LOW (arg1)
8606 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8607 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8608 return TREE_OPERAND (arg0, 0);
8610 goto binary;
8612 case MIN_EXPR:
8613 if (operand_equal_p (arg0, arg1, 0))
8614 return omit_one_operand (type, arg0, arg1);
8615 if (INTEGRAL_TYPE_P (type)
8616 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8617 return omit_one_operand (type, arg1, arg0);
8618 goto associate;
8620 case MAX_EXPR:
8621 if (operand_equal_p (arg0, arg1, 0))
8622 return omit_one_operand (type, arg0, arg1);
8623 if (INTEGRAL_TYPE_P (type)
8624 && TYPE_MAX_VALUE (type)
8625 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8626 return omit_one_operand (type, arg1, arg0);
8627 goto associate;
8629 case TRUTH_ANDIF_EXPR:
8630 /* Note that the operands of this must be ints
8631 and their values must be 0 or 1.
8632 ("true" is a fixed value perhaps depending on the language.) */
8633 /* If first arg is constant zero, return it. */
8634 if (integer_zerop (arg0))
8635 return fold_convert (type, arg0);
8636 case TRUTH_AND_EXPR:
8637 /* If either arg is constant true, drop it. */
8638 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8639 return non_lvalue (fold_convert (type, arg1));
8640 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8641 /* Preserve sequence points. */
8642 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8643 return non_lvalue (fold_convert (type, arg0));
8644 /* If second arg is constant zero, result is zero, but first arg
8645 must be evaluated. */
8646 if (integer_zerop (arg1))
8647 return omit_one_operand (type, arg1, arg0);
8648 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8649 case will be handled here. */
8650 if (integer_zerop (arg0))
8651 return omit_one_operand (type, arg0, arg1);
8653 /* !X && X is always false. */
8654 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8655 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8656 return omit_one_operand (type, integer_zero_node, arg1);
8657 /* X && !X is always false. */
8658 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8659 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8660 return omit_one_operand (type, integer_zero_node, arg0);
8662 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8663 means A >= Y && A != MAX, but in this case we know that
8664 A < X <= MAX. */
8666 if (!TREE_SIDE_EFFECTS (arg0)
8667 && !TREE_SIDE_EFFECTS (arg1))
8669 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8670 if (tem && !operand_equal_p (tem, arg0, 0))
8671 return fold_build2 (code, type, tem, arg1);
8673 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8674 if (tem && !operand_equal_p (tem, arg1, 0))
8675 return fold_build2 (code, type, arg0, tem);
8678 truth_andor:
8679 /* We only do these simplifications if we are optimizing. */
8680 if (!optimize)
8681 return NULL_TREE;
8683 /* Check for things like (A || B) && (A || C). We can convert this
8684 to A || (B && C). Note that either operator can be any of the four
8685 truth and/or operations and the transformation will still be
8686 valid. Also note that we only care about order for the
8687 ANDIF and ORIF operators. If B contains side effects, this
8688 might change the truth-value of A. */
8689 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8690 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8691 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8692 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8693 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8694 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8696 tree a00 = TREE_OPERAND (arg0, 0);
8697 tree a01 = TREE_OPERAND (arg0, 1);
8698 tree a10 = TREE_OPERAND (arg1, 0);
8699 tree a11 = TREE_OPERAND (arg1, 1);
8700 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8701 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8702 && (code == TRUTH_AND_EXPR
8703 || code == TRUTH_OR_EXPR));
8705 if (operand_equal_p (a00, a10, 0))
8706 return fold_build2 (TREE_CODE (arg0), type, a00,
8707 fold_build2 (code, type, a01, a11));
8708 else if (commutative && operand_equal_p (a00, a11, 0))
8709 return fold_build2 (TREE_CODE (arg0), type, a00,
8710 fold_build2 (code, type, a01, a10));
8711 else if (commutative && operand_equal_p (a01, a10, 0))
8712 return fold_build2 (TREE_CODE (arg0), type, a01,
8713 fold_build2 (code, type, a00, a11));
8715 /* This case if tricky because we must either have commutative
8716 operators or else A10 must not have side-effects. */
8718 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8719 && operand_equal_p (a01, a11, 0))
8720 return fold_build2 (TREE_CODE (arg0), type,
8721 fold_build2 (code, type, a00, a10),
8722 a01);
8725 /* See if we can build a range comparison. */
8726 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8727 return tem;
8729 /* Check for the possibility of merging component references. If our
8730 lhs is another similar operation, try to merge its rhs with our
8731 rhs. Then try to merge our lhs and rhs. */
8732 if (TREE_CODE (arg0) == code
8733 && 0 != (tem = fold_truthop (code, type,
8734 TREE_OPERAND (arg0, 1), arg1)))
8735 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8737 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8738 return tem;
8740 return NULL_TREE;
8742 case TRUTH_ORIF_EXPR:
8743 /* Note that the operands of this must be ints
8744 and their values must be 0 or true.
8745 ("true" is a fixed value perhaps depending on the language.) */
8746 /* If first arg is constant true, return it. */
8747 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8748 return fold_convert (type, arg0);
8749 case TRUTH_OR_EXPR:
8750 /* If either arg is constant zero, drop it. */
8751 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8752 return non_lvalue (fold_convert (type, arg1));
8753 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8754 /* Preserve sequence points. */
8755 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8756 return non_lvalue (fold_convert (type, arg0));
8757 /* If second arg is constant true, result is true, but we must
8758 evaluate first arg. */
8759 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8760 return omit_one_operand (type, arg1, arg0);
8761 /* Likewise for first arg, but note this only occurs here for
8762 TRUTH_OR_EXPR. */
8763 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8764 return omit_one_operand (type, arg0, arg1);
8766 /* !X || X is always true. */
8767 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8768 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8769 return omit_one_operand (type, integer_one_node, arg1);
8770 /* X || !X is always true. */
8771 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8772 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8773 return omit_one_operand (type, integer_one_node, arg0);
8775 goto truth_andor;
8777 case TRUTH_XOR_EXPR:
8778 /* If the second arg is constant zero, drop it. */
8779 if (integer_zerop (arg1))
8780 return non_lvalue (fold_convert (type, arg0));
8781 /* If the second arg is constant true, this is a logical inversion. */
8782 if (integer_onep (arg1))
8784 /* Only call invert_truthvalue if operand is a truth value. */
8785 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8786 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8787 else
8788 tem = invert_truthvalue (arg0);
8789 return non_lvalue (fold_convert (type, tem));
8791 /* Identical arguments cancel to zero. */
8792 if (operand_equal_p (arg0, arg1, 0))
8793 return omit_one_operand (type, integer_zero_node, arg0);
8795 /* !X ^ X is always true. */
8796 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8797 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8798 return omit_one_operand (type, integer_one_node, arg1);
8800 /* X ^ !X is always true. */
8801 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8802 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8803 return omit_one_operand (type, integer_one_node, arg0);
8805 return NULL_TREE;
8807 case EQ_EXPR:
8808 case NE_EXPR:
8809 case LT_EXPR:
8810 case GT_EXPR:
8811 case LE_EXPR:
8812 case GE_EXPR:
8813 /* If one arg is a real or integer constant, put it last. */
8814 if (tree_swap_operands_p (arg0, arg1, true))
8815 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8817 /* bool_var != 0 becomes bool_var. */
8818 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8819 && code == NE_EXPR)
8820 return non_lvalue (fold_convert (type, arg0));
8822 /* bool_var == 1 becomes bool_var. */
8823 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8824 && code == EQ_EXPR)
8825 return non_lvalue (fold_convert (type, arg0));
8827 /* If this is an equality comparison of the address of a non-weak
8828 object against zero, then we know the result. */
8829 if ((code == EQ_EXPR || code == NE_EXPR)
8830 && TREE_CODE (arg0) == ADDR_EXPR
8831 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8832 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8833 && integer_zerop (arg1))
8834 return constant_boolean_node (code != EQ_EXPR, type);
8836 /* If this is an equality comparison of the address of two non-weak,
8837 unaliased symbols neither of which are extern (since we do not
8838 have access to attributes for externs), then we know the result. */
8839 if ((code == EQ_EXPR || code == NE_EXPR)
8840 && TREE_CODE (arg0) == ADDR_EXPR
8841 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8842 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8843 && ! lookup_attribute ("alias",
8844 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8845 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8846 && TREE_CODE (arg1) == ADDR_EXPR
8847 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
8848 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8849 && ! lookup_attribute ("alias",
8850 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8851 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8853 /* We know that we're looking at the address of two
8854 non-weak, unaliased, static _DECL nodes.
8856 It is both wasteful and incorrect to call operand_equal_p
8857 to compare the two ADDR_EXPR nodes. It is wasteful in that
8858 all we need to do is test pointer equality for the arguments
8859 to the two ADDR_EXPR nodes. It is incorrect to use
8860 operand_equal_p as that function is NOT equivalent to a
8861 C equality test. It can in fact return false for two
8862 objects which would test as equal using the C equality
8863 operator. */
8864 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
8865 return constant_boolean_node (equal
8866 ? code == EQ_EXPR : code != EQ_EXPR,
8867 type);
8870 /* If this is a comparison of two exprs that look like an
8871 ARRAY_REF of the same object, then we can fold this to a
8872 comparison of the two offsets. */
8873 if (TREE_CODE_CLASS (code) == tcc_comparison)
8875 tree base0, offset0, base1, offset1;
8877 if (extract_array_ref (arg0, &base0, &offset0)
8878 && extract_array_ref (arg1, &base1, &offset1)
8879 && operand_equal_p (base0, base1, 0))
8881 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))
8882 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))))
8883 offset0 = NULL_TREE;
8884 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))
8885 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))))
8886 offset1 = NULL_TREE;
8887 if (offset0 == NULL_TREE
8888 && offset1 == NULL_TREE)
8890 offset0 = integer_zero_node;
8891 offset1 = integer_zero_node;
8893 else if (offset0 == NULL_TREE)
8894 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8895 else if (offset1 == NULL_TREE)
8896 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8898 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
8899 return fold_build2 (code, type, offset0, offset1);
8903 /* Transform comparisons of the form X +- C CMP X. */
8904 if ((code != EQ_EXPR && code != NE_EXPR)
8905 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8906 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8907 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8908 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
8909 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8910 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8911 && !(flag_wrapv || flag_trapv))))
8913 tree arg01 = TREE_OPERAND (arg0, 1);
8914 enum tree_code code0 = TREE_CODE (arg0);
8915 int is_positive;
8917 if (TREE_CODE (arg01) == REAL_CST)
8918 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
8919 else
8920 is_positive = tree_int_cst_sgn (arg01);
8922 /* (X - c) > X becomes false. */
8923 if (code == GT_EXPR
8924 && ((code0 == MINUS_EXPR && is_positive >= 0)
8925 || (code0 == PLUS_EXPR && is_positive <= 0)))
8926 return constant_boolean_node (0, type);
8928 /* Likewise (X + c) < X becomes false. */
8929 if (code == LT_EXPR
8930 && ((code0 == PLUS_EXPR && is_positive >= 0)
8931 || (code0 == MINUS_EXPR && is_positive <= 0)))
8932 return constant_boolean_node (0, type);
8934 /* Convert (X - c) <= X to true. */
8935 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8936 && code == LE_EXPR
8937 && ((code0 == MINUS_EXPR && is_positive >= 0)
8938 || (code0 == PLUS_EXPR && is_positive <= 0)))
8939 return constant_boolean_node (1, type);
8941 /* Convert (X + c) >= X to true. */
8942 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8943 && code == GE_EXPR
8944 && ((code0 == PLUS_EXPR && is_positive >= 0)
8945 || (code0 == MINUS_EXPR && is_positive <= 0)))
8946 return constant_boolean_node (1, type);
8948 if (TREE_CODE (arg01) == INTEGER_CST)
8950 /* Convert X + c > X and X - c < X to true for integers. */
8951 if (code == GT_EXPR
8952 && ((code0 == PLUS_EXPR && is_positive > 0)
8953 || (code0 == MINUS_EXPR && is_positive < 0)))
8954 return constant_boolean_node (1, type);
8956 if (code == LT_EXPR
8957 && ((code0 == MINUS_EXPR && is_positive > 0)
8958 || (code0 == PLUS_EXPR && is_positive < 0)))
8959 return constant_boolean_node (1, type);
8961 /* Convert X + c <= X and X - c >= X to false for integers. */
8962 if (code == LE_EXPR
8963 && ((code0 == PLUS_EXPR && is_positive > 0)
8964 || (code0 == MINUS_EXPR && is_positive < 0)))
8965 return constant_boolean_node (0, type);
8967 if (code == GE_EXPR
8968 && ((code0 == MINUS_EXPR && is_positive > 0)
8969 || (code0 == PLUS_EXPR && is_positive < 0)))
8970 return constant_boolean_node (0, type);
8974 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8975 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8976 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8977 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8978 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8979 && !(flag_wrapv || flag_trapv))
8980 && (TREE_CODE (arg1) == INTEGER_CST
8981 && !TREE_OVERFLOW (arg1)))
8983 tree const1 = TREE_OPERAND (arg0, 1);
8984 tree const2 = arg1;
8985 tree variable = TREE_OPERAND (arg0, 0);
8986 tree lhs;
8987 int lhs_add;
8988 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8990 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8991 TREE_TYPE (arg1), const2, const1);
8992 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8993 && (TREE_CODE (lhs) != INTEGER_CST
8994 || !TREE_OVERFLOW (lhs)))
8995 return fold_build2 (code, type, variable, lhs);
8998 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9000 tree targ0 = strip_float_extensions (arg0);
9001 tree targ1 = strip_float_extensions (arg1);
9002 tree newtype = TREE_TYPE (targ0);
9004 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9005 newtype = TREE_TYPE (targ1);
9007 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9008 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9009 return fold_build2 (code, type, fold_convert (newtype, targ0),
9010 fold_convert (newtype, targ1));
9012 /* (-a) CMP (-b) -> b CMP a */
9013 if (TREE_CODE (arg0) == NEGATE_EXPR
9014 && TREE_CODE (arg1) == NEGATE_EXPR)
9015 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9016 TREE_OPERAND (arg0, 0));
9018 if (TREE_CODE (arg1) == REAL_CST)
9020 REAL_VALUE_TYPE cst;
9021 cst = TREE_REAL_CST (arg1);
9023 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9024 if (TREE_CODE (arg0) == NEGATE_EXPR)
9025 return
9026 fold_build2 (swap_tree_comparison (code), type,
9027 TREE_OPERAND (arg0, 0),
9028 build_real (TREE_TYPE (arg1),
9029 REAL_VALUE_NEGATE (cst)));
9031 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9032 /* a CMP (-0) -> a CMP 0 */
9033 if (REAL_VALUE_MINUS_ZERO (cst))
9034 return fold_build2 (code, type, arg0,
9035 build_real (TREE_TYPE (arg1), dconst0));
9037 /* x != NaN is always true, other ops are always false. */
9038 if (REAL_VALUE_ISNAN (cst)
9039 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9041 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9042 return omit_one_operand (type, tem, arg0);
9045 /* Fold comparisons against infinity. */
9046 if (REAL_VALUE_ISINF (cst))
9048 tem = fold_inf_compare (code, type, arg0, arg1);
9049 if (tem != NULL_TREE)
9050 return tem;
9054 /* If this is a comparison of a real constant with a PLUS_EXPR
9055 or a MINUS_EXPR of a real constant, we can convert it into a
9056 comparison with a revised real constant as long as no overflow
9057 occurs when unsafe_math_optimizations are enabled. */
9058 if (flag_unsafe_math_optimizations
9059 && TREE_CODE (arg1) == REAL_CST
9060 && (TREE_CODE (arg0) == PLUS_EXPR
9061 || TREE_CODE (arg0) == MINUS_EXPR)
9062 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9063 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9064 ? MINUS_EXPR : PLUS_EXPR,
9065 arg1, TREE_OPERAND (arg0, 1), 0))
9066 && ! TREE_CONSTANT_OVERFLOW (tem))
9067 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9069 /* Likewise, we can simplify a comparison of a real constant with
9070 a MINUS_EXPR whose first operand is also a real constant, i.e.
9071 (c1 - x) < c2 becomes x > c1-c2. */
9072 if (flag_unsafe_math_optimizations
9073 && TREE_CODE (arg1) == REAL_CST
9074 && TREE_CODE (arg0) == MINUS_EXPR
9075 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9076 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9077 arg1, 0))
9078 && ! TREE_CONSTANT_OVERFLOW (tem))
9079 return fold_build2 (swap_tree_comparison (code), type,
9080 TREE_OPERAND (arg0, 1), tem);
9082 /* Fold comparisons against built-in math functions. */
9083 if (TREE_CODE (arg1) == REAL_CST
9084 && flag_unsafe_math_optimizations
9085 && ! flag_errno_math)
9087 enum built_in_function fcode = builtin_mathfn_code (arg0);
9089 if (fcode != END_BUILTINS)
9091 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9092 if (tem != NULL_TREE)
9093 return tem;
9098 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9099 if (TREE_CONSTANT (arg1)
9100 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9101 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9102 /* This optimization is invalid for ordered comparisons
9103 if CONST+INCR overflows or if foo+incr might overflow.
9104 This optimization is invalid for floating point due to rounding.
9105 For pointer types we assume overflow doesn't happen. */
9106 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9107 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9108 && (code == EQ_EXPR || code == NE_EXPR))))
9110 tree varop, newconst;
9112 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9114 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9115 arg1, TREE_OPERAND (arg0, 1));
9116 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9117 TREE_OPERAND (arg0, 0),
9118 TREE_OPERAND (arg0, 1));
9120 else
9122 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9123 arg1, TREE_OPERAND (arg0, 1));
9124 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9125 TREE_OPERAND (arg0, 0),
9126 TREE_OPERAND (arg0, 1));
9130 /* If VAROP is a reference to a bitfield, we must mask
9131 the constant by the width of the field. */
9132 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9133 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9134 && host_integerp (DECL_SIZE (TREE_OPERAND
9135 (TREE_OPERAND (varop, 0), 1)), 1))
9137 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9138 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9139 tree folded_compare, shift;
9141 /* First check whether the comparison would come out
9142 always the same. If we don't do that we would
9143 change the meaning with the masking. */
9144 folded_compare = fold_build2 (code, type,
9145 TREE_OPERAND (varop, 0), arg1);
9146 if (integer_zerop (folded_compare)
9147 || integer_onep (folded_compare))
9148 return omit_one_operand (type, folded_compare, varop);
9150 shift = build_int_cst (NULL_TREE,
9151 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9152 shift = fold_convert (TREE_TYPE (varop), shift);
9153 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9154 newconst, shift);
9155 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9156 newconst, shift);
9159 return fold_build2 (code, type, varop, newconst);
9162 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9163 This transformation affects the cases which are handled in later
9164 optimizations involving comparisons with non-negative constants. */
9165 if (TREE_CODE (arg1) == INTEGER_CST
9166 && TREE_CODE (arg0) != INTEGER_CST
9167 && tree_int_cst_sgn (arg1) > 0)
9169 switch (code)
9171 case GE_EXPR:
9172 arg1 = const_binop (MINUS_EXPR, arg1,
9173 build_int_cst (TREE_TYPE (arg1), 1), 0);
9174 return fold_build2 (GT_EXPR, type, arg0,
9175 fold_convert (TREE_TYPE (arg0), arg1));
9177 case LT_EXPR:
9178 arg1 = const_binop (MINUS_EXPR, arg1,
9179 build_int_cst (TREE_TYPE (arg1), 1), 0);
9180 return fold_build2 (LE_EXPR, type, arg0,
9181 fold_convert (TREE_TYPE (arg0), arg1));
9183 default:
9184 break;
9188 /* Comparisons with the highest or lowest possible integer of
9189 the specified size will have known values. */
9191 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9193 if (TREE_CODE (arg1) == INTEGER_CST
9194 && ! TREE_CONSTANT_OVERFLOW (arg1)
9195 && width <= 2 * HOST_BITS_PER_WIDE_INT
9196 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9197 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9199 HOST_WIDE_INT signed_max_hi;
9200 unsigned HOST_WIDE_INT signed_max_lo;
9201 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9203 if (width <= HOST_BITS_PER_WIDE_INT)
9205 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9206 - 1;
9207 signed_max_hi = 0;
9208 max_hi = 0;
9210 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9212 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9213 min_lo = 0;
9214 min_hi = 0;
9216 else
9218 max_lo = signed_max_lo;
9219 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9220 min_hi = -1;
9223 else
9225 width -= HOST_BITS_PER_WIDE_INT;
9226 signed_max_lo = -1;
9227 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9228 - 1;
9229 max_lo = -1;
9230 min_lo = 0;
9232 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9234 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9235 min_hi = 0;
9237 else
9239 max_hi = signed_max_hi;
9240 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9244 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9245 && TREE_INT_CST_LOW (arg1) == max_lo)
9246 switch (code)
9248 case GT_EXPR:
9249 return omit_one_operand (type, integer_zero_node, arg0);
9251 case GE_EXPR:
9252 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9254 case LE_EXPR:
9255 return omit_one_operand (type, integer_one_node, arg0);
9257 case LT_EXPR:
9258 return fold_build2 (NE_EXPR, type, arg0, arg1);
9260 /* The GE_EXPR and LT_EXPR cases above are not normally
9261 reached because of previous transformations. */
9263 default:
9264 break;
9266 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9267 == max_hi
9268 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9269 switch (code)
9271 case GT_EXPR:
9272 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9273 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9274 case LE_EXPR:
9275 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9276 return fold_build2 (NE_EXPR, type, arg0, arg1);
9277 default:
9278 break;
9280 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9281 == min_hi
9282 && TREE_INT_CST_LOW (arg1) == min_lo)
9283 switch (code)
9285 case LT_EXPR:
9286 return omit_one_operand (type, integer_zero_node, arg0);
9288 case LE_EXPR:
9289 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9291 case GE_EXPR:
9292 return omit_one_operand (type, integer_one_node, arg0);
9294 case GT_EXPR:
9295 return fold_build2 (NE_EXPR, type, op0, op1);
9297 default:
9298 break;
9300 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9301 == min_hi
9302 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9303 switch (code)
9305 case GE_EXPR:
9306 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9307 return fold_build2 (NE_EXPR, type, arg0, arg1);
9308 case LT_EXPR:
9309 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9310 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9311 default:
9312 break;
9315 else if (!in_gimple_form
9316 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9317 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9318 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9319 /* signed_type does not work on pointer types. */
9320 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9322 /* The following case also applies to X < signed_max+1
9323 and X >= signed_max+1 because previous transformations. */
9324 if (code == LE_EXPR || code == GT_EXPR)
9326 tree st0, st1;
9327 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9328 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9329 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9330 type, fold_convert (st0, arg0),
9331 build_int_cst (st1, 0));
9337 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9338 a MINUS_EXPR of a constant, we can convert it into a comparison with
9339 a revised constant as long as no overflow occurs. */
9340 if ((code == EQ_EXPR || code == NE_EXPR)
9341 && TREE_CODE (arg1) == INTEGER_CST
9342 && (TREE_CODE (arg0) == PLUS_EXPR
9343 || TREE_CODE (arg0) == MINUS_EXPR)
9344 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9345 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9346 ? MINUS_EXPR : PLUS_EXPR,
9347 arg1, TREE_OPERAND (arg0, 1), 0))
9348 && ! TREE_CONSTANT_OVERFLOW (tem))
9349 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9351 /* Similarly for a NEGATE_EXPR. */
9352 else if ((code == EQ_EXPR || code == NE_EXPR)
9353 && TREE_CODE (arg0) == NEGATE_EXPR
9354 && TREE_CODE (arg1) == INTEGER_CST
9355 && 0 != (tem = negate_expr (arg1))
9356 && TREE_CODE (tem) == INTEGER_CST
9357 && ! TREE_CONSTANT_OVERFLOW (tem))
9358 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9360 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9361 for !=. Don't do this for ordered comparisons due to overflow. */
9362 else if ((code == NE_EXPR || code == EQ_EXPR)
9363 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9364 return fold_build2 (code, type,
9365 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9367 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9368 && (TREE_CODE (arg0) == NOP_EXPR
9369 || TREE_CODE (arg0) == CONVERT_EXPR))
9371 /* If we are widening one operand of an integer comparison,
9372 see if the other operand is similarly being widened. Perhaps we
9373 can do the comparison in the narrower type. */
9374 tem = fold_widened_comparison (code, type, arg0, arg1);
9375 if (tem)
9376 return tem;
9378 /* Or if we are changing signedness. */
9379 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9380 if (tem)
9381 return tem;
9384 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9385 constant, we can simplify it. */
9386 else if (TREE_CODE (arg1) == INTEGER_CST
9387 && (TREE_CODE (arg0) == MIN_EXPR
9388 || TREE_CODE (arg0) == MAX_EXPR)
9389 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9391 tem = optimize_minmax_comparison (code, type, op0, op1);
9392 if (tem)
9393 return tem;
9395 return NULL_TREE;
9398 /* If we are comparing an ABS_EXPR with a constant, we can
9399 convert all the cases into explicit comparisons, but they may
9400 well not be faster than doing the ABS and one comparison.
9401 But ABS (X) <= C is a range comparison, which becomes a subtraction
9402 and a comparison, and is probably faster. */
9403 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9404 && TREE_CODE (arg0) == ABS_EXPR
9405 && ! TREE_SIDE_EFFECTS (arg0)
9406 && (0 != (tem = negate_expr (arg1)))
9407 && TREE_CODE (tem) == INTEGER_CST
9408 && ! TREE_CONSTANT_OVERFLOW (tem))
9409 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9410 build2 (GE_EXPR, type,
9411 TREE_OPERAND (arg0, 0), tem),
9412 build2 (LE_EXPR, type,
9413 TREE_OPERAND (arg0, 0), arg1));
9415 /* Convert ABS_EXPR<x> >= 0 to true. */
9416 else if (code == GE_EXPR
9417 && tree_expr_nonnegative_p (arg0)
9418 && (integer_zerop (arg1)
9419 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9420 && real_zerop (arg1))))
9421 return omit_one_operand (type, integer_one_node, arg0);
9423 /* Convert ABS_EXPR<x> < 0 to false. */
9424 else if (code == LT_EXPR
9425 && tree_expr_nonnegative_p (arg0)
9426 && (integer_zerop (arg1) || real_zerop (arg1)))
9427 return omit_one_operand (type, integer_zero_node, arg0);
9429 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9430 else if ((code == EQ_EXPR || code == NE_EXPR)
9431 && TREE_CODE (arg0) == ABS_EXPR
9432 && (integer_zerop (arg1) || real_zerop (arg1)))
9433 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9435 /* If this is an EQ or NE comparison with zero and ARG0 is
9436 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9437 two operations, but the latter can be done in one less insn
9438 on machines that have only two-operand insns or on which a
9439 constant cannot be the first operand. */
9440 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9441 && TREE_CODE (arg0) == BIT_AND_EXPR)
9443 tree arg00 = TREE_OPERAND (arg0, 0);
9444 tree arg01 = TREE_OPERAND (arg0, 1);
9445 if (TREE_CODE (arg00) == LSHIFT_EXPR
9446 && integer_onep (TREE_OPERAND (arg00, 0)))
9447 return
9448 fold_build2 (code, type,
9449 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9450 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9451 arg01, TREE_OPERAND (arg00, 1)),
9452 fold_convert (TREE_TYPE (arg0),
9453 integer_one_node)),
9454 arg1);
9455 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9456 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9457 return
9458 fold_build2 (code, type,
9459 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9460 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9461 arg00, TREE_OPERAND (arg01, 1)),
9462 fold_convert (TREE_TYPE (arg0),
9463 integer_one_node)),
9464 arg1);
9467 /* If this is an NE or EQ comparison of zero against the result of a
9468 signed MOD operation whose second operand is a power of 2, make
9469 the MOD operation unsigned since it is simpler and equivalent. */
9470 if ((code == NE_EXPR || code == EQ_EXPR)
9471 && integer_zerop (arg1)
9472 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9473 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9474 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9475 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9476 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9477 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9479 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9480 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9481 fold_convert (newtype,
9482 TREE_OPERAND (arg0, 0)),
9483 fold_convert (newtype,
9484 TREE_OPERAND (arg0, 1)));
9486 return fold_build2 (code, type, newmod,
9487 fold_convert (newtype, arg1));
9490 /* If this is an NE comparison of zero with an AND of one, remove the
9491 comparison since the AND will give the correct value. */
9492 if (code == NE_EXPR && integer_zerop (arg1)
9493 && TREE_CODE (arg0) == BIT_AND_EXPR
9494 && integer_onep (TREE_OPERAND (arg0, 1)))
9495 return fold_convert (type, arg0);
9497 /* If we have (A & C) == C where C is a power of 2, convert this into
9498 (A & C) != 0. Similarly for NE_EXPR. */
9499 if ((code == EQ_EXPR || code == NE_EXPR)
9500 && TREE_CODE (arg0) == BIT_AND_EXPR
9501 && integer_pow2p (TREE_OPERAND (arg0, 1))
9502 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9503 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9504 arg0, fold_convert (TREE_TYPE (arg0),
9505 integer_zero_node));
9507 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9508 bit, then fold the expression into A < 0 or A >= 0. */
9509 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9510 if (tem)
9511 return tem;
9513 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9514 Similarly for NE_EXPR. */
9515 if ((code == EQ_EXPR || code == NE_EXPR)
9516 && TREE_CODE (arg0) == BIT_AND_EXPR
9517 && TREE_CODE (arg1) == INTEGER_CST
9518 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9520 tree notc = fold_build1 (BIT_NOT_EXPR,
9521 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9522 TREE_OPERAND (arg0, 1));
9523 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9524 arg1, notc);
9525 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9526 if (integer_nonzerop (dandnotc))
9527 return omit_one_operand (type, rslt, arg0);
9530 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9531 Similarly for NE_EXPR. */
9532 if ((code == EQ_EXPR || code == NE_EXPR)
9533 && TREE_CODE (arg0) == BIT_IOR_EXPR
9534 && TREE_CODE (arg1) == INTEGER_CST
9535 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9537 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9538 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9539 TREE_OPERAND (arg0, 1), notd);
9540 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9541 if (integer_nonzerop (candnotd))
9542 return omit_one_operand (type, rslt, arg0);
9545 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9546 and similarly for >= into !=. */
9547 if ((code == LT_EXPR || code == GE_EXPR)
9548 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9549 && TREE_CODE (arg1) == LSHIFT_EXPR
9550 && integer_onep (TREE_OPERAND (arg1, 0)))
9551 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9552 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9553 TREE_OPERAND (arg1, 1)),
9554 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9556 else if ((code == LT_EXPR || code == GE_EXPR)
9557 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9558 && (TREE_CODE (arg1) == NOP_EXPR
9559 || TREE_CODE (arg1) == CONVERT_EXPR)
9560 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9561 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9562 return
9563 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9564 fold_convert (TREE_TYPE (arg0),
9565 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9566 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9567 1))),
9568 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9570 /* Simplify comparison of something with itself. (For IEEE
9571 floating-point, we can only do some of these simplifications.) */
9572 if (operand_equal_p (arg0, arg1, 0))
9574 switch (code)
9576 case EQ_EXPR:
9577 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9578 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9579 return constant_boolean_node (1, type);
9580 break;
9582 case GE_EXPR:
9583 case LE_EXPR:
9584 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9585 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9586 return constant_boolean_node (1, type);
9587 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9589 case NE_EXPR:
9590 /* For NE, we can only do this simplification if integer
9591 or we don't honor IEEE floating point NaNs. */
9592 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9593 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9594 break;
9595 /* ... fall through ... */
9596 case GT_EXPR:
9597 case LT_EXPR:
9598 return constant_boolean_node (0, type);
9599 default:
9600 gcc_unreachable ();
9604 /* If we are comparing an expression that just has comparisons
9605 of two integer values, arithmetic expressions of those comparisons,
9606 and constants, we can simplify it. There are only three cases
9607 to check: the two values can either be equal, the first can be
9608 greater, or the second can be greater. Fold the expression for
9609 those three values. Since each value must be 0 or 1, we have
9610 eight possibilities, each of which corresponds to the constant 0
9611 or 1 or one of the six possible comparisons.
9613 This handles common cases like (a > b) == 0 but also handles
9614 expressions like ((x > y) - (y > x)) > 0, which supposedly
9615 occur in macroized code. */
9617 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9619 tree cval1 = 0, cval2 = 0;
9620 int save_p = 0;
9622 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9623 /* Don't handle degenerate cases here; they should already
9624 have been handled anyway. */
9625 && cval1 != 0 && cval2 != 0
9626 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9627 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9628 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9629 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9630 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9631 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9632 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9634 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9635 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9637 /* We can't just pass T to eval_subst in case cval1 or cval2
9638 was the same as ARG1. */
9640 tree high_result
9641 = fold_build2 (code, type,
9642 eval_subst (arg0, cval1, maxval,
9643 cval2, minval),
9644 arg1);
9645 tree equal_result
9646 = fold_build2 (code, type,
9647 eval_subst (arg0, cval1, maxval,
9648 cval2, maxval),
9649 arg1);
9650 tree low_result
9651 = fold_build2 (code, type,
9652 eval_subst (arg0, cval1, minval,
9653 cval2, maxval),
9654 arg1);
9656 /* All three of these results should be 0 or 1. Confirm they
9657 are. Then use those values to select the proper code
9658 to use. */
9660 if ((integer_zerop (high_result)
9661 || integer_onep (high_result))
9662 && (integer_zerop (equal_result)
9663 || integer_onep (equal_result))
9664 && (integer_zerop (low_result)
9665 || integer_onep (low_result)))
9667 /* Make a 3-bit mask with the high-order bit being the
9668 value for `>', the next for '=', and the low for '<'. */
9669 switch ((integer_onep (high_result) * 4)
9670 + (integer_onep (equal_result) * 2)
9671 + integer_onep (low_result))
9673 case 0:
9674 /* Always false. */
9675 return omit_one_operand (type, integer_zero_node, arg0);
9676 case 1:
9677 code = LT_EXPR;
9678 break;
9679 case 2:
9680 code = EQ_EXPR;
9681 break;
9682 case 3:
9683 code = LE_EXPR;
9684 break;
9685 case 4:
9686 code = GT_EXPR;
9687 break;
9688 case 5:
9689 code = NE_EXPR;
9690 break;
9691 case 6:
9692 code = GE_EXPR;
9693 break;
9694 case 7:
9695 /* Always true. */
9696 return omit_one_operand (type, integer_one_node, arg0);
9699 if (save_p)
9700 return save_expr (build2 (code, type, cval1, cval2));
9701 else
9702 return fold_build2 (code, type, cval1, cval2);
9707 /* If this is a comparison of a field, we may be able to simplify it. */
9708 if (((TREE_CODE (arg0) == COMPONENT_REF
9709 && lang_hooks.can_use_bit_fields_p ())
9710 || TREE_CODE (arg0) == BIT_FIELD_REF)
9711 && (code == EQ_EXPR || code == NE_EXPR)
9712 /* Handle the constant case even without -O
9713 to make sure the warnings are given. */
9714 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9716 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9717 if (t1)
9718 return t1;
9721 /* Fold a comparison of the address of COMPONENT_REFs with the same
9722 type and component to a comparison of the address of the base
9723 object. In short, &x->a OP &y->a to x OP y and
9724 &x->a OP &y.a to x OP &y */
9725 if (TREE_CODE (arg0) == ADDR_EXPR
9726 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9727 && TREE_CODE (arg1) == ADDR_EXPR
9728 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9730 tree cref0 = TREE_OPERAND (arg0, 0);
9731 tree cref1 = TREE_OPERAND (arg1, 0);
9732 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9734 tree op0 = TREE_OPERAND (cref0, 0);
9735 tree op1 = TREE_OPERAND (cref1, 0);
9736 return fold_build2 (code, type,
9737 build_fold_addr_expr (op0),
9738 build_fold_addr_expr (op1));
9742 /* Optimize comparisons of strlen vs zero to a compare of the
9743 first character of the string vs zero. To wit,
9744 strlen(ptr) == 0 => *ptr == 0
9745 strlen(ptr) != 0 => *ptr != 0
9746 Other cases should reduce to one of these two (or a constant)
9747 due to the return value of strlen being unsigned. */
9748 if ((code == EQ_EXPR || code == NE_EXPR)
9749 && integer_zerop (arg1)
9750 && TREE_CODE (arg0) == CALL_EXPR)
9752 tree fndecl = get_callee_fndecl (arg0);
9753 tree arglist;
9755 if (fndecl
9756 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9757 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9758 && (arglist = TREE_OPERAND (arg0, 1))
9759 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9760 && ! TREE_CHAIN (arglist))
9762 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9763 return fold_build2 (code, type, iref,
9764 build_int_cst (TREE_TYPE (iref), 0));
9768 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9769 into a single range test. */
9770 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9771 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9772 && TREE_CODE (arg1) == INTEGER_CST
9773 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9774 && !integer_zerop (TREE_OPERAND (arg0, 1))
9775 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9776 && !TREE_OVERFLOW (arg1))
9778 t1 = fold_div_compare (code, type, arg0, arg1);
9779 if (t1 != NULL_TREE)
9780 return t1;
9783 if ((code == EQ_EXPR || code == NE_EXPR)
9784 && integer_zerop (arg1)
9785 && tree_expr_nonzero_p (arg0))
9787 tree res = constant_boolean_node (code==NE_EXPR, type);
9788 return omit_one_operand (type, res, arg0);
9791 t1 = fold_relational_const (code, type, arg0, arg1);
9792 return t1 == NULL_TREE ? NULL_TREE : t1;
9794 case UNORDERED_EXPR:
9795 case ORDERED_EXPR:
9796 case UNLT_EXPR:
9797 case UNLE_EXPR:
9798 case UNGT_EXPR:
9799 case UNGE_EXPR:
9800 case UNEQ_EXPR:
9801 case LTGT_EXPR:
9802 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9804 t1 = fold_relational_const (code, type, arg0, arg1);
9805 if (t1 != NULL_TREE)
9806 return t1;
9809 /* If the first operand is NaN, the result is constant. */
9810 if (TREE_CODE (arg0) == REAL_CST
9811 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9812 && (code != LTGT_EXPR || ! flag_trapping_math))
9814 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9815 ? integer_zero_node
9816 : integer_one_node;
9817 return omit_one_operand (type, t1, arg1);
9820 /* If the second operand is NaN, the result is constant. */
9821 if (TREE_CODE (arg1) == REAL_CST
9822 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9823 && (code != LTGT_EXPR || ! flag_trapping_math))
9825 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9826 ? integer_zero_node
9827 : integer_one_node;
9828 return omit_one_operand (type, t1, arg0);
9831 /* Simplify unordered comparison of something with itself. */
9832 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9833 && operand_equal_p (arg0, arg1, 0))
9834 return constant_boolean_node (1, type);
9836 if (code == LTGT_EXPR
9837 && !flag_trapping_math
9838 && operand_equal_p (arg0, arg1, 0))
9839 return constant_boolean_node (0, type);
9841 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9843 tree targ0 = strip_float_extensions (arg0);
9844 tree targ1 = strip_float_extensions (arg1);
9845 tree newtype = TREE_TYPE (targ0);
9847 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9848 newtype = TREE_TYPE (targ1);
9850 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9851 return fold_build2 (code, type, fold_convert (newtype, targ0),
9852 fold_convert (newtype, targ1));
9855 return NULL_TREE;
9857 case COMPOUND_EXPR:
9858 /* When pedantic, a compound expression can be neither an lvalue
9859 nor an integer constant expression. */
9860 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9861 return NULL_TREE;
9862 /* Don't let (0, 0) be null pointer constant. */
9863 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9864 : fold_convert (type, arg1);
9865 return pedantic_non_lvalue (tem);
9867 case COMPLEX_EXPR:
9868 if (wins)
9869 return build_complex (type, arg0, arg1);
9870 return NULL_TREE;
9872 case ASSERT_EXPR:
9873 /* An ASSERT_EXPR should never be passed to fold_binary. */
9874 gcc_unreachable ();
9876 default:
9877 return NULL_TREE;
9878 } /* switch (code) */
9881 /* Callback for walk_tree, looking for LABEL_EXPR.
9882 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
9883 Do not check the sub-tree of GOTO_EXPR. */
9885 static tree
9886 contains_label_1 (tree *tp,
9887 int *walk_subtrees,
9888 void *data ATTRIBUTE_UNUSED)
9890 switch (TREE_CODE (*tp))
9892 case LABEL_EXPR:
9893 return *tp;
9894 case GOTO_EXPR:
9895 *walk_subtrees = 0;
9896 /* no break */
9897 default:
9898 return NULL_TREE;
9902 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
9903 accessible from outside the sub-tree. Returns NULL_TREE if no
9904 addressable label is found. */
9906 static bool
9907 contains_label_p (tree st)
9909 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
9912 /* Fold a ternary expression of code CODE and type TYPE with operands
9913 OP0, OP1, and OP2. Return the folded expression if folding is
9914 successful. Otherwise, return NULL_TREE. */
9916 tree
9917 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
9919 tree tem;
9920 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9921 enum tree_code_class kind = TREE_CODE_CLASS (code);
9923 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9924 && TREE_CODE_LENGTH (code) == 3);
9926 /* Strip any conversions that don't change the mode. This is safe
9927 for every expression, except for a comparison expression because
9928 its signedness is derived from its operands. So, in the latter
9929 case, only strip conversions that don't change the signedness.
9931 Note that this is done as an internal manipulation within the
9932 constant folder, in order to find the simplest representation of
9933 the arguments so that their form can be studied. In any cases,
9934 the appropriate type conversions should be put back in the tree
9935 that will get out of the constant folder. */
9936 if (op0)
9938 arg0 = op0;
9939 STRIP_NOPS (arg0);
9942 if (op1)
9944 arg1 = op1;
9945 STRIP_NOPS (arg1);
9948 switch (code)
9950 case COMPONENT_REF:
9951 if (TREE_CODE (arg0) == CONSTRUCTOR
9952 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
9954 unsigned HOST_WIDE_INT idx;
9955 tree field, value;
9956 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
9957 if (field == arg1)
9958 return value;
9960 return NULL_TREE;
9962 case COND_EXPR:
9963 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9964 so all simple results must be passed through pedantic_non_lvalue. */
9965 if (TREE_CODE (arg0) == INTEGER_CST)
9967 tree unused_op = integer_zerop (arg0) ? op1 : op2;
9968 tem = integer_zerop (arg0) ? op2 : op1;
9969 /* Only optimize constant conditions when the selected branch
9970 has the same type as the COND_EXPR. This avoids optimizing
9971 away "c ? x : throw", where the throw has a void type.
9972 Avoid throwing away that operand which contains label. */
9973 if ((!TREE_SIDE_EFFECTS (unused_op)
9974 || !contains_label_p (unused_op))
9975 && (! VOID_TYPE_P (TREE_TYPE (tem))
9976 || VOID_TYPE_P (type)))
9977 return pedantic_non_lvalue (tem);
9978 return NULL_TREE;
9980 if (operand_equal_p (arg1, op2, 0))
9981 return pedantic_omit_one_operand (type, arg1, arg0);
9983 /* If we have A op B ? A : C, we may be able to convert this to a
9984 simpler expression, depending on the operation and the values
9985 of B and C. Signed zeros prevent all of these transformations,
9986 for reasons given above each one.
9988 Also try swapping the arguments and inverting the conditional. */
9989 if (COMPARISON_CLASS_P (arg0)
9990 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9991 arg1, TREE_OPERAND (arg0, 1))
9992 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9994 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
9995 if (tem)
9996 return tem;
9999 if (COMPARISON_CLASS_P (arg0)
10000 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10001 op2,
10002 TREE_OPERAND (arg0, 1))
10003 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10005 tem = invert_truthvalue (arg0);
10006 if (COMPARISON_CLASS_P (tem))
10008 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10009 if (tem)
10010 return tem;
10014 /* If the second operand is simpler than the third, swap them
10015 since that produces better jump optimization results. */
10016 if (truth_value_p (TREE_CODE (arg0))
10017 && tree_swap_operands_p (op1, op2, false))
10019 /* See if this can be inverted. If it can't, possibly because
10020 it was a floating-point inequality comparison, don't do
10021 anything. */
10022 tem = invert_truthvalue (arg0);
10024 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10025 return fold_build3 (code, type, tem, op2, op1);
10028 /* Convert A ? 1 : 0 to simply A. */
10029 if (integer_onep (op1)
10030 && integer_zerop (op2)
10031 /* If we try to convert OP0 to our type, the
10032 call to fold will try to move the conversion inside
10033 a COND, which will recurse. In that case, the COND_EXPR
10034 is probably the best choice, so leave it alone. */
10035 && type == TREE_TYPE (arg0))
10036 return pedantic_non_lvalue (arg0);
10038 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10039 over COND_EXPR in cases such as floating point comparisons. */
10040 if (integer_zerop (op1)
10041 && integer_onep (op2)
10042 && truth_value_p (TREE_CODE (arg0)))
10043 return pedantic_non_lvalue (fold_convert (type,
10044 invert_truthvalue (arg0)));
10046 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10047 if (TREE_CODE (arg0) == LT_EXPR
10048 && integer_zerop (TREE_OPERAND (arg0, 1))
10049 && integer_zerop (op2)
10050 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10051 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10052 TREE_TYPE (tem), tem, arg1));
10054 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10055 already handled above. */
10056 if (TREE_CODE (arg0) == BIT_AND_EXPR
10057 && integer_onep (TREE_OPERAND (arg0, 1))
10058 && integer_zerop (op2)
10059 && integer_pow2p (arg1))
10061 tree tem = TREE_OPERAND (arg0, 0);
10062 STRIP_NOPS (tem);
10063 if (TREE_CODE (tem) == RSHIFT_EXPR
10064 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10065 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10066 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10067 return fold_build2 (BIT_AND_EXPR, type,
10068 TREE_OPERAND (tem, 0), arg1);
10071 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10072 is probably obsolete because the first operand should be a
10073 truth value (that's why we have the two cases above), but let's
10074 leave it in until we can confirm this for all front-ends. */
10075 if (integer_zerop (op2)
10076 && TREE_CODE (arg0) == NE_EXPR
10077 && integer_zerop (TREE_OPERAND (arg0, 1))
10078 && integer_pow2p (arg1)
10079 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10080 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10081 arg1, OEP_ONLY_CONST))
10082 return pedantic_non_lvalue (fold_convert (type,
10083 TREE_OPERAND (arg0, 0)));
10085 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10086 if (integer_zerop (op2)
10087 && truth_value_p (TREE_CODE (arg0))
10088 && truth_value_p (TREE_CODE (arg1)))
10089 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10091 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10092 if (integer_onep (op2)
10093 && truth_value_p (TREE_CODE (arg0))
10094 && truth_value_p (TREE_CODE (arg1)))
10096 /* Only perform transformation if ARG0 is easily inverted. */
10097 tem = invert_truthvalue (arg0);
10098 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10099 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10102 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10103 if (integer_zerop (arg1)
10104 && truth_value_p (TREE_CODE (arg0))
10105 && truth_value_p (TREE_CODE (op2)))
10107 /* Only perform transformation if ARG0 is easily inverted. */
10108 tem = invert_truthvalue (arg0);
10109 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10110 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10113 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10114 if (integer_onep (arg1)
10115 && truth_value_p (TREE_CODE (arg0))
10116 && truth_value_p (TREE_CODE (op2)))
10117 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10119 return NULL_TREE;
10121 case CALL_EXPR:
10122 /* Check for a built-in function. */
10123 if (TREE_CODE (op0) == ADDR_EXPR
10124 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10125 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10126 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
10127 return NULL_TREE;
10129 case BIT_FIELD_REF:
10130 if (TREE_CODE (arg0) == VECTOR_CST
10131 && type == TREE_TYPE (TREE_TYPE (arg0))
10132 && host_integerp (arg1, 1)
10133 && host_integerp (op2, 1))
10135 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10136 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10138 if (width != 0
10139 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10140 && (idx % width) == 0
10141 && (idx = idx / width)
10142 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10144 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10145 while (idx-- > 0 && elements)
10146 elements = TREE_CHAIN (elements);
10147 if (elements)
10148 return TREE_VALUE (elements);
10149 else
10150 return fold_convert (type, integer_zero_node);
10153 return NULL_TREE;
10155 default:
10156 return NULL_TREE;
10157 } /* switch (code) */
10160 /* Perform constant folding and related simplification of EXPR.
10161 The related simplifications include x*1 => x, x*0 => 0, etc.,
10162 and application of the associative law.
10163 NOP_EXPR conversions may be removed freely (as long as we
10164 are careful not to change the type of the overall expression).
10165 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10166 but we can constant-fold them if they have constant operands. */
10168 #ifdef ENABLE_FOLD_CHECKING
10169 # define fold(x) fold_1 (x)
10170 static tree fold_1 (tree);
10171 static
10172 #endif
10173 tree
10174 fold (tree expr)
10176 const tree t = expr;
10177 enum tree_code code = TREE_CODE (t);
10178 enum tree_code_class kind = TREE_CODE_CLASS (code);
10179 tree tem;
10181 /* Return right away if a constant. */
10182 if (kind == tcc_constant)
10183 return t;
10185 if (IS_EXPR_CODE_CLASS (kind))
10187 tree type = TREE_TYPE (t);
10188 tree op0, op1, op2;
10190 switch (TREE_CODE_LENGTH (code))
10192 case 1:
10193 op0 = TREE_OPERAND (t, 0);
10194 tem = fold_unary (code, type, op0);
10195 return tem ? tem : expr;
10196 case 2:
10197 op0 = TREE_OPERAND (t, 0);
10198 op1 = TREE_OPERAND (t, 1);
10199 tem = fold_binary (code, type, op0, op1);
10200 return tem ? tem : expr;
10201 case 3:
10202 op0 = TREE_OPERAND (t, 0);
10203 op1 = TREE_OPERAND (t, 1);
10204 op2 = TREE_OPERAND (t, 2);
10205 tem = fold_ternary (code, type, op0, op1, op2);
10206 return tem ? tem : expr;
10207 default:
10208 break;
10212 switch (code)
10214 case CONST_DECL:
10215 return fold (DECL_INITIAL (t));
10217 default:
10218 return t;
10219 } /* switch (code) */
10222 #ifdef ENABLE_FOLD_CHECKING
10223 #undef fold
10225 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10226 static void fold_check_failed (tree, tree);
10227 void print_fold_checksum (tree);
10229 /* When --enable-checking=fold, compute a digest of expr before
10230 and after actual fold call to see if fold did not accidentally
10231 change original expr. */
10233 tree
10234 fold (tree expr)
10236 tree ret;
10237 struct md5_ctx ctx;
10238 unsigned char checksum_before[16], checksum_after[16];
10239 htab_t ht;
10241 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10242 md5_init_ctx (&ctx);
10243 fold_checksum_tree (expr, &ctx, ht);
10244 md5_finish_ctx (&ctx, checksum_before);
10245 htab_empty (ht);
10247 ret = fold_1 (expr);
10249 md5_init_ctx (&ctx);
10250 fold_checksum_tree (expr, &ctx, ht);
10251 md5_finish_ctx (&ctx, checksum_after);
10252 htab_delete (ht);
10254 if (memcmp (checksum_before, checksum_after, 16))
10255 fold_check_failed (expr, ret);
10257 return ret;
10260 void
10261 print_fold_checksum (tree expr)
10263 struct md5_ctx ctx;
10264 unsigned char checksum[16], cnt;
10265 htab_t ht;
10267 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10268 md5_init_ctx (&ctx);
10269 fold_checksum_tree (expr, &ctx, ht);
10270 md5_finish_ctx (&ctx, checksum);
10271 htab_delete (ht);
10272 for (cnt = 0; cnt < 16; ++cnt)
10273 fprintf (stderr, "%02x", checksum[cnt]);
10274 putc ('\n', stderr);
10277 static void
10278 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10280 internal_error ("fold check: original tree changed by fold");
10283 static void
10284 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10286 void **slot;
10287 enum tree_code code;
10288 char buf[sizeof (struct tree_function_decl)];
10289 int i, len;
10291 recursive_label:
10293 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10294 <= sizeof (struct tree_function_decl))
10295 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
10296 if (expr == NULL)
10297 return;
10298 slot = htab_find_slot (ht, expr, INSERT);
10299 if (*slot != NULL)
10300 return;
10301 *slot = expr;
10302 code = TREE_CODE (expr);
10303 if (TREE_CODE_CLASS (code) == tcc_declaration
10304 && DECL_ASSEMBLER_NAME_SET_P (expr))
10306 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10307 memcpy (buf, expr, tree_size (expr));
10308 expr = (tree) buf;
10309 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10311 else if (TREE_CODE_CLASS (code) == tcc_type
10312 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10313 || TYPE_CACHED_VALUES_P (expr)
10314 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10316 /* Allow these fields to be modified. */
10317 memcpy (buf, expr, tree_size (expr));
10318 expr = (tree) buf;
10319 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10320 TYPE_POINTER_TO (expr) = NULL;
10321 TYPE_REFERENCE_TO (expr) = NULL;
10322 if (TYPE_CACHED_VALUES_P (expr))
10324 TYPE_CACHED_VALUES_P (expr) = 0;
10325 TYPE_CACHED_VALUES (expr) = NULL;
10328 md5_process_bytes (expr, tree_size (expr), ctx);
10329 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10330 if (TREE_CODE_CLASS (code) != tcc_type
10331 && TREE_CODE_CLASS (code) != tcc_declaration
10332 && code != TREE_LIST)
10333 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10334 switch (TREE_CODE_CLASS (code))
10336 case tcc_constant:
10337 switch (code)
10339 case STRING_CST:
10340 md5_process_bytes (TREE_STRING_POINTER (expr),
10341 TREE_STRING_LENGTH (expr), ctx);
10342 break;
10343 case COMPLEX_CST:
10344 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10345 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10346 break;
10347 case VECTOR_CST:
10348 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10349 break;
10350 default:
10351 break;
10353 break;
10354 case tcc_exceptional:
10355 switch (code)
10357 case TREE_LIST:
10358 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10359 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10360 expr = TREE_CHAIN (expr);
10361 goto recursive_label;
10362 break;
10363 case TREE_VEC:
10364 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10365 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10366 break;
10367 default:
10368 break;
10370 break;
10371 case tcc_expression:
10372 case tcc_reference:
10373 case tcc_comparison:
10374 case tcc_unary:
10375 case tcc_binary:
10376 case tcc_statement:
10377 len = TREE_CODE_LENGTH (code);
10378 for (i = 0; i < len; ++i)
10379 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10380 break;
10381 case tcc_declaration:
10382 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10383 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10384 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10385 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10386 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10387 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10388 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10389 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
10390 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10392 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
10394 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10395 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10396 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
10398 break;
10399 case tcc_type:
10400 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10401 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10402 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10403 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10404 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10405 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10406 if (INTEGRAL_TYPE_P (expr)
10407 || SCALAR_FLOAT_TYPE_P (expr))
10409 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10410 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10412 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10413 if (TREE_CODE (expr) == RECORD_TYPE
10414 || TREE_CODE (expr) == UNION_TYPE
10415 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10416 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10417 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10418 break;
10419 default:
10420 break;
10424 #endif
10426 /* Fold a unary tree expression with code CODE of type TYPE with an
10427 operand OP0. Return a folded expression if successful. Otherwise,
10428 return a tree expression with code CODE of type TYPE with an
10429 operand OP0. */
10431 tree
10432 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
10434 tree tem;
10435 #ifdef ENABLE_FOLD_CHECKING
10436 unsigned char checksum_before[16], checksum_after[16];
10437 struct md5_ctx ctx;
10438 htab_t ht;
10440 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10441 md5_init_ctx (&ctx);
10442 fold_checksum_tree (op0, &ctx, ht);
10443 md5_finish_ctx (&ctx, checksum_before);
10444 htab_empty (ht);
10445 #endif
10447 tem = fold_unary (code, type, op0);
10448 if (!tem)
10449 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
10451 #ifdef ENABLE_FOLD_CHECKING
10452 md5_init_ctx (&ctx);
10453 fold_checksum_tree (op0, &ctx, ht);
10454 md5_finish_ctx (&ctx, checksum_after);
10455 htab_delete (ht);
10457 if (memcmp (checksum_before, checksum_after, 16))
10458 fold_check_failed (op0, tem);
10459 #endif
10460 return tem;
10463 /* Fold a binary tree expression with code CODE of type TYPE with
10464 operands OP0 and OP1. Return a folded expression if successful.
10465 Otherwise, return a tree expression with code CODE of type TYPE
10466 with operands OP0 and OP1. */
10468 tree
10469 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
10470 MEM_STAT_DECL)
10472 tree tem;
10473 #ifdef ENABLE_FOLD_CHECKING
10474 unsigned char checksum_before_op0[16],
10475 checksum_before_op1[16],
10476 checksum_after_op0[16],
10477 checksum_after_op1[16];
10478 struct md5_ctx ctx;
10479 htab_t ht;
10481 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10482 md5_init_ctx (&ctx);
10483 fold_checksum_tree (op0, &ctx, ht);
10484 md5_finish_ctx (&ctx, checksum_before_op0);
10485 htab_empty (ht);
10487 md5_init_ctx (&ctx);
10488 fold_checksum_tree (op1, &ctx, ht);
10489 md5_finish_ctx (&ctx, checksum_before_op1);
10490 htab_empty (ht);
10491 #endif
10493 tem = fold_binary (code, type, op0, op1);
10494 if (!tem)
10495 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
10497 #ifdef ENABLE_FOLD_CHECKING
10498 md5_init_ctx (&ctx);
10499 fold_checksum_tree (op0, &ctx, ht);
10500 md5_finish_ctx (&ctx, checksum_after_op0);
10501 htab_empty (ht);
10503 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10504 fold_check_failed (op0, tem);
10506 md5_init_ctx (&ctx);
10507 fold_checksum_tree (op1, &ctx, ht);
10508 md5_finish_ctx (&ctx, checksum_after_op1);
10509 htab_delete (ht);
10511 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10512 fold_check_failed (op1, tem);
10513 #endif
10514 return tem;
10517 /* Fold a ternary tree expression with code CODE of type TYPE with
10518 operands OP0, OP1, and OP2. Return a folded expression if
10519 successful. Otherwise, return a tree expression with code CODE of
10520 type TYPE with operands OP0, OP1, and OP2. */
10522 tree
10523 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
10524 MEM_STAT_DECL)
10526 tree tem;
10527 #ifdef ENABLE_FOLD_CHECKING
10528 unsigned char checksum_before_op0[16],
10529 checksum_before_op1[16],
10530 checksum_before_op2[16],
10531 checksum_after_op0[16],
10532 checksum_after_op1[16],
10533 checksum_after_op2[16];
10534 struct md5_ctx ctx;
10535 htab_t ht;
10537 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10538 md5_init_ctx (&ctx);
10539 fold_checksum_tree (op0, &ctx, ht);
10540 md5_finish_ctx (&ctx, checksum_before_op0);
10541 htab_empty (ht);
10543 md5_init_ctx (&ctx);
10544 fold_checksum_tree (op1, &ctx, ht);
10545 md5_finish_ctx (&ctx, checksum_before_op1);
10546 htab_empty (ht);
10548 md5_init_ctx (&ctx);
10549 fold_checksum_tree (op2, &ctx, ht);
10550 md5_finish_ctx (&ctx, checksum_before_op2);
10551 htab_empty (ht);
10552 #endif
10554 tem = fold_ternary (code, type, op0, op1, op2);
10555 if (!tem)
10556 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
10558 #ifdef ENABLE_FOLD_CHECKING
10559 md5_init_ctx (&ctx);
10560 fold_checksum_tree (op0, &ctx, ht);
10561 md5_finish_ctx (&ctx, checksum_after_op0);
10562 htab_empty (ht);
10564 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10565 fold_check_failed (op0, tem);
10567 md5_init_ctx (&ctx);
10568 fold_checksum_tree (op1, &ctx, ht);
10569 md5_finish_ctx (&ctx, checksum_after_op1);
10570 htab_empty (ht);
10572 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10573 fold_check_failed (op1, tem);
10575 md5_init_ctx (&ctx);
10576 fold_checksum_tree (op2, &ctx, ht);
10577 md5_finish_ctx (&ctx, checksum_after_op2);
10578 htab_delete (ht);
10580 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
10581 fold_check_failed (op2, tem);
10582 #endif
10583 return tem;
10586 /* Perform constant folding and related simplification of initializer
10587 expression EXPR. These behave identically to "fold_buildN" but ignore
10588 potential run-time traps and exceptions that fold must preserve. */
10590 #define START_FOLD_INIT \
10591 int saved_signaling_nans = flag_signaling_nans;\
10592 int saved_trapping_math = flag_trapping_math;\
10593 int saved_rounding_math = flag_rounding_math;\
10594 int saved_trapv = flag_trapv;\
10595 flag_signaling_nans = 0;\
10596 flag_trapping_math = 0;\
10597 flag_rounding_math = 0;\
10598 flag_trapv = 0
10600 #define END_FOLD_INIT \
10601 flag_signaling_nans = saved_signaling_nans;\
10602 flag_trapping_math = saved_trapping_math;\
10603 flag_rounding_math = saved_rounding_math;\
10604 flag_trapv = saved_trapv
10606 tree
10607 fold_build1_initializer (enum tree_code code, tree type, tree op)
10609 tree result;
10610 START_FOLD_INIT;
10612 result = fold_build1 (code, type, op);
10614 END_FOLD_INIT;
10615 return result;
10618 tree
10619 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
10621 tree result;
10622 START_FOLD_INIT;
10624 result = fold_build2 (code, type, op0, op1);
10626 END_FOLD_INIT;
10627 return result;
10630 tree
10631 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
10632 tree op2)
10634 tree result;
10635 START_FOLD_INIT;
10637 result = fold_build3 (code, type, op0, op1, op2);
10639 END_FOLD_INIT;
10640 return result;
10643 #undef START_FOLD_INIT
10644 #undef END_FOLD_INIT
10646 /* Determine if first argument is a multiple of second argument. Return 0 if
10647 it is not, or we cannot easily determined it to be.
10649 An example of the sort of thing we care about (at this point; this routine
10650 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10651 fold cases do now) is discovering that
10653 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10655 is a multiple of
10657 SAVE_EXPR (J * 8)
10659 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10661 This code also handles discovering that
10663 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10665 is a multiple of 8 so we don't have to worry about dealing with a
10666 possible remainder.
10668 Note that we *look* inside a SAVE_EXPR only to determine how it was
10669 calculated; it is not safe for fold to do much of anything else with the
10670 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10671 at run time. For example, the latter example above *cannot* be implemented
10672 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10673 evaluation time of the original SAVE_EXPR is not necessarily the same at
10674 the time the new expression is evaluated. The only optimization of this
10675 sort that would be valid is changing
10677 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10679 divided by 8 to
10681 SAVE_EXPR (I) * SAVE_EXPR (J)
10683 (where the same SAVE_EXPR (J) is used in the original and the
10684 transformed version). */
10686 static int
10687 multiple_of_p (tree type, tree top, tree bottom)
10689 if (operand_equal_p (top, bottom, 0))
10690 return 1;
10692 if (TREE_CODE (type) != INTEGER_TYPE)
10693 return 0;
10695 switch (TREE_CODE (top))
10697 case BIT_AND_EXPR:
10698 /* Bitwise and provides a power of two multiple. If the mask is
10699 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10700 if (!integer_pow2p (bottom))
10701 return 0;
10702 /* FALLTHRU */
10704 case MULT_EXPR:
10705 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10706 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10708 case PLUS_EXPR:
10709 case MINUS_EXPR:
10710 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10711 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10713 case LSHIFT_EXPR:
10714 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10716 tree op1, t1;
10718 op1 = TREE_OPERAND (top, 1);
10719 /* const_binop may not detect overflow correctly,
10720 so check for it explicitly here. */
10721 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10722 > TREE_INT_CST_LOW (op1)
10723 && TREE_INT_CST_HIGH (op1) == 0
10724 && 0 != (t1 = fold_convert (type,
10725 const_binop (LSHIFT_EXPR,
10726 size_one_node,
10727 op1, 0)))
10728 && ! TREE_OVERFLOW (t1))
10729 return multiple_of_p (type, t1, bottom);
10731 return 0;
10733 case NOP_EXPR:
10734 /* Can't handle conversions from non-integral or wider integral type. */
10735 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10736 || (TYPE_PRECISION (type)
10737 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10738 return 0;
10740 /* .. fall through ... */
10742 case SAVE_EXPR:
10743 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10745 case INTEGER_CST:
10746 if (TREE_CODE (bottom) != INTEGER_CST
10747 || (TYPE_UNSIGNED (type)
10748 && (tree_int_cst_sgn (top) < 0
10749 || tree_int_cst_sgn (bottom) < 0)))
10750 return 0;
10751 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10752 top, bottom, 0));
10754 default:
10755 return 0;
10759 /* Return true if `t' is known to be non-negative. */
10762 tree_expr_nonnegative_p (tree t)
10764 if (TYPE_UNSIGNED (TREE_TYPE (t)))
10765 return 1;
10767 switch (TREE_CODE (t))
10769 case ABS_EXPR:
10770 /* We can't return 1 if flag_wrapv is set because
10771 ABS_EXPR<INT_MIN> = INT_MIN. */
10772 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
10773 return 1;
10774 break;
10776 case INTEGER_CST:
10777 return tree_int_cst_sgn (t) >= 0;
10779 case REAL_CST:
10780 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10782 case PLUS_EXPR:
10783 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10784 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10785 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10787 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10788 both unsigned and at least 2 bits shorter than the result. */
10789 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10790 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10791 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10793 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10794 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10795 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10796 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10798 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10799 TYPE_PRECISION (inner2)) + 1;
10800 return prec < TYPE_PRECISION (TREE_TYPE (t));
10803 break;
10805 case MULT_EXPR:
10806 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10808 /* x * x for floating point x is always non-negative. */
10809 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10810 return 1;
10811 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10812 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10815 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10816 both unsigned and their total bits is shorter than the result. */
10817 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10818 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10819 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10821 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10822 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10823 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10824 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10825 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10826 < TYPE_PRECISION (TREE_TYPE (t));
10828 return 0;
10830 case BIT_AND_EXPR:
10831 case MAX_EXPR:
10832 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10833 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10835 case BIT_IOR_EXPR:
10836 case BIT_XOR_EXPR:
10837 case MIN_EXPR:
10838 case RDIV_EXPR:
10839 case TRUNC_DIV_EXPR:
10840 case CEIL_DIV_EXPR:
10841 case FLOOR_DIV_EXPR:
10842 case ROUND_DIV_EXPR:
10843 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10844 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10846 case TRUNC_MOD_EXPR:
10847 case CEIL_MOD_EXPR:
10848 case FLOOR_MOD_EXPR:
10849 case ROUND_MOD_EXPR:
10850 case SAVE_EXPR:
10851 case NON_LVALUE_EXPR:
10852 case FLOAT_EXPR:
10853 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10855 case COMPOUND_EXPR:
10856 case MODIFY_EXPR:
10857 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10859 case BIND_EXPR:
10860 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10862 case COND_EXPR:
10863 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10864 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10866 case NOP_EXPR:
10868 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10869 tree outer_type = TREE_TYPE (t);
10871 if (TREE_CODE (outer_type) == REAL_TYPE)
10873 if (TREE_CODE (inner_type) == REAL_TYPE)
10874 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10875 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10877 if (TYPE_UNSIGNED (inner_type))
10878 return 1;
10879 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10882 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10884 if (TREE_CODE (inner_type) == REAL_TYPE)
10885 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10886 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10887 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10888 && TYPE_UNSIGNED (inner_type);
10891 break;
10893 case TARGET_EXPR:
10895 tree temp = TARGET_EXPR_SLOT (t);
10896 t = TARGET_EXPR_INITIAL (t);
10898 /* If the initializer is non-void, then it's a normal expression
10899 that will be assigned to the slot. */
10900 if (!VOID_TYPE_P (t))
10901 return tree_expr_nonnegative_p (t);
10903 /* Otherwise, the initializer sets the slot in some way. One common
10904 way is an assignment statement at the end of the initializer. */
10905 while (1)
10907 if (TREE_CODE (t) == BIND_EXPR)
10908 t = expr_last (BIND_EXPR_BODY (t));
10909 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10910 || TREE_CODE (t) == TRY_CATCH_EXPR)
10911 t = expr_last (TREE_OPERAND (t, 0));
10912 else if (TREE_CODE (t) == STATEMENT_LIST)
10913 t = expr_last (t);
10914 else
10915 break;
10917 if (TREE_CODE (t) == MODIFY_EXPR
10918 && TREE_OPERAND (t, 0) == temp)
10919 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10921 return 0;
10924 case CALL_EXPR:
10926 tree fndecl = get_callee_fndecl (t);
10927 tree arglist = TREE_OPERAND (t, 1);
10928 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10929 switch (DECL_FUNCTION_CODE (fndecl))
10931 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10932 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10933 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10934 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10936 CASE_BUILTIN_F (BUILT_IN_ACOS)
10937 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10938 CASE_BUILTIN_F (BUILT_IN_CABS)
10939 CASE_BUILTIN_F (BUILT_IN_COSH)
10940 CASE_BUILTIN_F (BUILT_IN_ERFC)
10941 CASE_BUILTIN_F (BUILT_IN_EXP)
10942 CASE_BUILTIN_F (BUILT_IN_EXP10)
10943 CASE_BUILTIN_F (BUILT_IN_EXP2)
10944 CASE_BUILTIN_F (BUILT_IN_FABS)
10945 CASE_BUILTIN_F (BUILT_IN_FDIM)
10946 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10947 CASE_BUILTIN_F (BUILT_IN_POW10)
10948 CASE_BUILTIN_I (BUILT_IN_FFS)
10949 CASE_BUILTIN_I (BUILT_IN_PARITY)
10950 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10951 /* Always true. */
10952 return 1;
10954 CASE_BUILTIN_F (BUILT_IN_SQRT)
10955 /* sqrt(-0.0) is -0.0. */
10956 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10957 return 1;
10958 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10960 CASE_BUILTIN_F (BUILT_IN_ASINH)
10961 CASE_BUILTIN_F (BUILT_IN_ATAN)
10962 CASE_BUILTIN_F (BUILT_IN_ATANH)
10963 CASE_BUILTIN_F (BUILT_IN_CBRT)
10964 CASE_BUILTIN_F (BUILT_IN_CEIL)
10965 CASE_BUILTIN_F (BUILT_IN_ERF)
10966 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10967 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10968 CASE_BUILTIN_F (BUILT_IN_FMOD)
10969 CASE_BUILTIN_F (BUILT_IN_FREXP)
10970 CASE_BUILTIN_F (BUILT_IN_LCEIL)
10971 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10972 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
10973 CASE_BUILTIN_F (BUILT_IN_LLCEIL)
10974 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
10975 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10976 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10977 CASE_BUILTIN_F (BUILT_IN_LRINT)
10978 CASE_BUILTIN_F (BUILT_IN_LROUND)
10979 CASE_BUILTIN_F (BUILT_IN_MODF)
10980 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
10981 CASE_BUILTIN_F (BUILT_IN_POW)
10982 CASE_BUILTIN_F (BUILT_IN_RINT)
10983 CASE_BUILTIN_F (BUILT_IN_ROUND)
10984 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
10985 CASE_BUILTIN_F (BUILT_IN_SINH)
10986 CASE_BUILTIN_F (BUILT_IN_TANH)
10987 CASE_BUILTIN_F (BUILT_IN_TRUNC)
10988 /* True if the 1st argument is nonnegative. */
10989 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10991 CASE_BUILTIN_F (BUILT_IN_FMAX)
10992 /* True if the 1st OR 2nd arguments are nonnegative. */
10993 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10994 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10996 CASE_BUILTIN_F (BUILT_IN_FMIN)
10997 /* True if the 1st AND 2nd arguments are nonnegative. */
10998 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10999 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11001 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
11002 /* True if the 2nd argument is nonnegative. */
11003 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11005 default:
11006 break;
11007 #undef CASE_BUILTIN_F
11008 #undef CASE_BUILTIN_I
11012 /* ... fall through ... */
11014 default:
11015 if (truth_value_p (TREE_CODE (t)))
11016 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11017 return 1;
11020 /* We don't know sign of `t', so be conservative and return false. */
11021 return 0;
11024 /* Return true when T is an address and is known to be nonzero.
11025 For floating point we further ensure that T is not denormal.
11026 Similar logic is present in nonzero_address in rtlanal.h. */
11028 bool
11029 tree_expr_nonzero_p (tree t)
11031 tree type = TREE_TYPE (t);
11033 /* Doing something useful for floating point would need more work. */
11034 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11035 return false;
11037 switch (TREE_CODE (t))
11039 case ABS_EXPR:
11040 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11042 case INTEGER_CST:
11043 /* We used to test for !integer_zerop here. This does not work correctly
11044 if TREE_CONSTANT_OVERFLOW (t). */
11045 return (TREE_INT_CST_LOW (t) != 0
11046 || TREE_INT_CST_HIGH (t) != 0);
11048 case PLUS_EXPR:
11049 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11051 /* With the presence of negative values it is hard
11052 to say something. */
11053 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11054 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11055 return false;
11056 /* One of operands must be positive and the other non-negative. */
11057 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11058 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11060 break;
11062 case MULT_EXPR:
11063 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11065 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11066 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11068 break;
11070 case NOP_EXPR:
11072 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11073 tree outer_type = TREE_TYPE (t);
11075 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11076 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11078 break;
11080 case ADDR_EXPR:
11082 tree base = get_base_address (TREE_OPERAND (t, 0));
11084 if (!base)
11085 return false;
11087 /* Weak declarations may link to NULL. */
11088 if (VAR_OR_FUNCTION_DECL_P (base))
11089 return !DECL_WEAK (base);
11091 /* Constants are never weak. */
11092 if (CONSTANT_CLASS_P (base))
11093 return true;
11095 return false;
11098 case COND_EXPR:
11099 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11100 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11102 case MIN_EXPR:
11103 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11104 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11106 case MAX_EXPR:
11107 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11109 /* When both operands are nonzero, then MAX must be too. */
11110 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11111 return true;
11113 /* MAX where operand 0 is positive is positive. */
11114 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11116 /* MAX where operand 1 is positive is positive. */
11117 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11118 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11119 return true;
11120 break;
11122 case COMPOUND_EXPR:
11123 case MODIFY_EXPR:
11124 case BIND_EXPR:
11125 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11127 case SAVE_EXPR:
11128 case NON_LVALUE_EXPR:
11129 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11131 case BIT_IOR_EXPR:
11132 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11133 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11135 case CALL_EXPR:
11136 return alloca_call_p (t);
11138 default:
11139 break;
11141 return false;
11144 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11145 attempt to fold the expression to a constant without modifying TYPE,
11146 OP0 or OP1.
11148 If the expression could be simplified to a constant, then return
11149 the constant. If the expression would not be simplified to a
11150 constant, then return NULL_TREE. */
11152 tree
11153 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11155 tree tem = fold_binary (code, type, op0, op1);
11156 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11159 /* Given the components of a unary expression CODE, TYPE and OP0,
11160 attempt to fold the expression to a constant without modifying
11161 TYPE or OP0.
11163 If the expression could be simplified to a constant, then return
11164 the constant. If the expression would not be simplified to a
11165 constant, then return NULL_TREE. */
11167 tree
11168 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11170 tree tem = fold_unary (code, type, op0);
11171 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11174 /* If EXP represents referencing an element in a constant string
11175 (either via pointer arithmetic or array indexing), return the
11176 tree representing the value accessed, otherwise return NULL. */
11178 tree
11179 fold_read_from_constant_string (tree exp)
11181 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11183 tree exp1 = TREE_OPERAND (exp, 0);
11184 tree index;
11185 tree string;
11187 if (TREE_CODE (exp) == INDIRECT_REF)
11188 string = string_constant (exp1, &index);
11189 else
11191 tree low_bound = array_ref_low_bound (exp);
11192 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11194 /* Optimize the special-case of a zero lower bound.
11196 We convert the low_bound to sizetype to avoid some problems
11197 with constant folding. (E.g. suppose the lower bound is 1,
11198 and its mode is QI. Without the conversion,l (ARRAY
11199 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11200 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11201 if (! integer_zerop (low_bound))
11202 index = size_diffop (index, fold_convert (sizetype, low_bound));
11204 string = exp1;
11207 if (string
11208 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11209 && TREE_CODE (string) == STRING_CST
11210 && TREE_CODE (index) == INTEGER_CST
11211 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11212 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11213 == MODE_INT)
11214 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11215 return fold_convert (TREE_TYPE (exp),
11216 build_int_cst (NULL_TREE,
11217 (TREE_STRING_POINTER (string)
11218 [TREE_INT_CST_LOW (index)])));
11220 return NULL;
11223 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11224 an integer constant or real constant.
11226 TYPE is the type of the result. */
11228 static tree
11229 fold_negate_const (tree arg0, tree type)
11231 tree t = NULL_TREE;
11233 switch (TREE_CODE (arg0))
11235 case INTEGER_CST:
11237 unsigned HOST_WIDE_INT low;
11238 HOST_WIDE_INT high;
11239 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11240 TREE_INT_CST_HIGH (arg0),
11241 &low, &high);
11242 t = build_int_cst_wide (type, low, high);
11243 t = force_fit_type (t, 1,
11244 (overflow | TREE_OVERFLOW (arg0))
11245 && !TYPE_UNSIGNED (type),
11246 TREE_CONSTANT_OVERFLOW (arg0));
11247 break;
11250 case REAL_CST:
11251 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11252 break;
11254 default:
11255 gcc_unreachable ();
11258 return t;
11261 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11262 an integer constant or real constant.
11264 TYPE is the type of the result. */
11266 tree
11267 fold_abs_const (tree arg0, tree type)
11269 tree t = NULL_TREE;
11271 switch (TREE_CODE (arg0))
11273 case INTEGER_CST:
11274 /* If the value is unsigned, then the absolute value is
11275 the same as the ordinary value. */
11276 if (TYPE_UNSIGNED (type))
11277 t = arg0;
11278 /* Similarly, if the value is non-negative. */
11279 else if (INT_CST_LT (integer_minus_one_node, arg0))
11280 t = arg0;
11281 /* If the value is negative, then the absolute value is
11282 its negation. */
11283 else
11285 unsigned HOST_WIDE_INT low;
11286 HOST_WIDE_INT high;
11287 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11288 TREE_INT_CST_HIGH (arg0),
11289 &low, &high);
11290 t = build_int_cst_wide (type, low, high);
11291 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11292 TREE_CONSTANT_OVERFLOW (arg0));
11294 break;
11296 case REAL_CST:
11297 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11298 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11299 else
11300 t = arg0;
11301 break;
11303 default:
11304 gcc_unreachable ();
11307 return t;
11310 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11311 constant. TYPE is the type of the result. */
11313 static tree
11314 fold_not_const (tree arg0, tree type)
11316 tree t = NULL_TREE;
11318 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11320 t = build_int_cst_wide (type,
11321 ~ TREE_INT_CST_LOW (arg0),
11322 ~ TREE_INT_CST_HIGH (arg0));
11323 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11324 TREE_CONSTANT_OVERFLOW (arg0));
11326 return t;
11329 /* Given CODE, a relational operator, the target type, TYPE and two
11330 constant operands OP0 and OP1, return the result of the
11331 relational operation. If the result is not a compile time
11332 constant, then return NULL_TREE. */
11334 static tree
11335 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11337 int result, invert;
11339 /* From here on, the only cases we handle are when the result is
11340 known to be a constant. */
11342 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11344 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11345 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11347 /* Handle the cases where either operand is a NaN. */
11348 if (real_isnan (c0) || real_isnan (c1))
11350 switch (code)
11352 case EQ_EXPR:
11353 case ORDERED_EXPR:
11354 result = 0;
11355 break;
11357 case NE_EXPR:
11358 case UNORDERED_EXPR:
11359 case UNLT_EXPR:
11360 case UNLE_EXPR:
11361 case UNGT_EXPR:
11362 case UNGE_EXPR:
11363 case UNEQ_EXPR:
11364 result = 1;
11365 break;
11367 case LT_EXPR:
11368 case LE_EXPR:
11369 case GT_EXPR:
11370 case GE_EXPR:
11371 case LTGT_EXPR:
11372 if (flag_trapping_math)
11373 return NULL_TREE;
11374 result = 0;
11375 break;
11377 default:
11378 gcc_unreachable ();
11381 return constant_boolean_node (result, type);
11384 return constant_boolean_node (real_compare (code, c0, c1), type);
11387 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11389 To compute GT, swap the arguments and do LT.
11390 To compute GE, do LT and invert the result.
11391 To compute LE, swap the arguments, do LT and invert the result.
11392 To compute NE, do EQ and invert the result.
11394 Therefore, the code below must handle only EQ and LT. */
11396 if (code == LE_EXPR || code == GT_EXPR)
11398 tree tem = op0;
11399 op0 = op1;
11400 op1 = tem;
11401 code = swap_tree_comparison (code);
11404 /* Note that it is safe to invert for real values here because we
11405 have already handled the one case that it matters. */
11407 invert = 0;
11408 if (code == NE_EXPR || code == GE_EXPR)
11410 invert = 1;
11411 code = invert_tree_comparison (code, false);
11414 /* Compute a result for LT or EQ if args permit;
11415 Otherwise return T. */
11416 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11418 if (code == EQ_EXPR)
11419 result = tree_int_cst_equal (op0, op1);
11420 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11421 result = INT_CST_LT_UNSIGNED (op0, op1);
11422 else
11423 result = INT_CST_LT (op0, op1);
11425 else
11426 return NULL_TREE;
11428 if (invert)
11429 result ^= 1;
11430 return constant_boolean_node (result, type);
11433 /* Build an expression for the a clean point containing EXPR with type TYPE.
11434 Don't build a cleanup point expression for EXPR which don't have side
11435 effects. */
11437 tree
11438 fold_build_cleanup_point_expr (tree type, tree expr)
11440 /* If the expression does not have side effects then we don't have to wrap
11441 it with a cleanup point expression. */
11442 if (!TREE_SIDE_EFFECTS (expr))
11443 return expr;
11445 /* If the expression is a return, check to see if the expression inside the
11446 return has no side effects or the right hand side of the modify expression
11447 inside the return. If either don't have side effects set we don't need to
11448 wrap the expression in a cleanup point expression. Note we don't check the
11449 left hand side of the modify because it should always be a return decl. */
11450 if (TREE_CODE (expr) == RETURN_EXPR)
11452 tree op = TREE_OPERAND (expr, 0);
11453 if (!op || !TREE_SIDE_EFFECTS (op))
11454 return expr;
11455 op = TREE_OPERAND (op, 1);
11456 if (!TREE_SIDE_EFFECTS (op))
11457 return expr;
11460 return build1 (CLEANUP_POINT_EXPR, type, expr);
11463 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11464 avoid confusing the gimplify process. */
11466 tree
11467 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11469 /* The size of the object is not relevant when talking about its address. */
11470 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11471 t = TREE_OPERAND (t, 0);
11473 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11474 if (TREE_CODE (t) == INDIRECT_REF
11475 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11477 t = TREE_OPERAND (t, 0);
11478 if (TREE_TYPE (t) != ptrtype)
11479 t = build1 (NOP_EXPR, ptrtype, t);
11481 else
11483 tree base = t;
11485 while (handled_component_p (base))
11486 base = TREE_OPERAND (base, 0);
11487 if (DECL_P (base))
11488 TREE_ADDRESSABLE (base) = 1;
11490 t = build1 (ADDR_EXPR, ptrtype, t);
11493 return t;
11496 tree
11497 build_fold_addr_expr (tree t)
11499 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11502 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11503 of an indirection through OP0, or NULL_TREE if no simplification is
11504 possible. */
11506 tree
11507 fold_indirect_ref_1 (tree type, tree op0)
11509 tree sub = op0;
11510 tree subtype;
11512 STRIP_NOPS (sub);
11513 subtype = TREE_TYPE (sub);
11514 if (!POINTER_TYPE_P (subtype))
11515 return NULL_TREE;
11517 if (TREE_CODE (sub) == ADDR_EXPR)
11519 tree op = TREE_OPERAND (sub, 0);
11520 tree optype = TREE_TYPE (op);
11521 /* *&p => p */
11522 if (type == optype)
11523 return op;
11524 /* *(foo *)&fooarray => fooarray[0] */
11525 else if (TREE_CODE (optype) == ARRAY_TYPE
11526 && type == TREE_TYPE (optype))
11528 tree type_domain = TYPE_DOMAIN (optype);
11529 tree min_val = size_zero_node;
11530 if (type_domain && TYPE_MIN_VALUE (type_domain))
11531 min_val = TYPE_MIN_VALUE (type_domain);
11532 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11536 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11537 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11538 && type == TREE_TYPE (TREE_TYPE (subtype)))
11540 tree type_domain;
11541 tree min_val = size_zero_node;
11542 sub = build_fold_indirect_ref (sub);
11543 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11544 if (type_domain && TYPE_MIN_VALUE (type_domain))
11545 min_val = TYPE_MIN_VALUE (type_domain);
11546 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11549 return NULL_TREE;
11552 /* Builds an expression for an indirection through T, simplifying some
11553 cases. */
11555 tree
11556 build_fold_indirect_ref (tree t)
11558 tree type = TREE_TYPE (TREE_TYPE (t));
11559 tree sub = fold_indirect_ref_1 (type, t);
11561 if (sub)
11562 return sub;
11563 else
11564 return build1 (INDIRECT_REF, type, t);
11567 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11569 tree
11570 fold_indirect_ref (tree t)
11572 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11574 if (sub)
11575 return sub;
11576 else
11577 return t;
11580 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11581 whose result is ignored. The type of the returned tree need not be
11582 the same as the original expression. */
11584 tree
11585 fold_ignored_result (tree t)
11587 if (!TREE_SIDE_EFFECTS (t))
11588 return integer_zero_node;
11590 for (;;)
11591 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11593 case tcc_unary:
11594 t = TREE_OPERAND (t, 0);
11595 break;
11597 case tcc_binary:
11598 case tcc_comparison:
11599 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11600 t = TREE_OPERAND (t, 0);
11601 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11602 t = TREE_OPERAND (t, 1);
11603 else
11604 return t;
11605 break;
11607 case tcc_expression:
11608 switch (TREE_CODE (t))
11610 case COMPOUND_EXPR:
11611 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11612 return t;
11613 t = TREE_OPERAND (t, 0);
11614 break;
11616 case COND_EXPR:
11617 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11618 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11619 return t;
11620 t = TREE_OPERAND (t, 0);
11621 break;
11623 default:
11624 return t;
11626 break;
11628 default:
11629 return t;
11633 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11634 This can only be applied to objects of a sizetype. */
11636 tree
11637 round_up (tree value, int divisor)
11639 tree div = NULL_TREE;
11641 gcc_assert (divisor > 0);
11642 if (divisor == 1)
11643 return value;
11645 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11646 have to do anything. Only do this when we are not given a const,
11647 because in that case, this check is more expensive than just
11648 doing it. */
11649 if (TREE_CODE (value) != INTEGER_CST)
11651 div = build_int_cst (TREE_TYPE (value), divisor);
11653 if (multiple_of_p (TREE_TYPE (value), value, div))
11654 return value;
11657 /* If divisor is a power of two, simplify this to bit manipulation. */
11658 if (divisor == (divisor & -divisor))
11660 tree t;
11662 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11663 value = size_binop (PLUS_EXPR, value, t);
11664 t = build_int_cst (TREE_TYPE (value), -divisor);
11665 value = size_binop (BIT_AND_EXPR, value, t);
11667 else
11669 if (!div)
11670 div = build_int_cst (TREE_TYPE (value), divisor);
11671 value = size_binop (CEIL_DIV_EXPR, value, div);
11672 value = size_binop (MULT_EXPR, value, div);
11675 return value;
11678 /* Likewise, but round down. */
11680 tree
11681 round_down (tree value, int divisor)
11683 tree div = NULL_TREE;
11685 gcc_assert (divisor > 0);
11686 if (divisor == 1)
11687 return value;
11689 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11690 have to do anything. Only do this when we are not given a const,
11691 because in that case, this check is more expensive than just
11692 doing it. */
11693 if (TREE_CODE (value) != INTEGER_CST)
11695 div = build_int_cst (TREE_TYPE (value), divisor);
11697 if (multiple_of_p (TREE_TYPE (value), value, div))
11698 return value;
11701 /* If divisor is a power of two, simplify this to bit manipulation. */
11702 if (divisor == (divisor & -divisor))
11704 tree t;
11706 t = build_int_cst (TREE_TYPE (value), -divisor);
11707 value = size_binop (BIT_AND_EXPR, value, t);
11709 else
11711 if (!div)
11712 div = build_int_cst (TREE_TYPE (value), divisor);
11713 value = size_binop (FLOOR_DIV_EXPR, value, div);
11714 value = size_binop (MULT_EXPR, value, div);
11717 return value;
11720 /* Returns the pointer to the base of the object addressed by EXP and
11721 extracts the information about the offset of the access, storing it
11722 to PBITPOS and POFFSET. */
11724 static tree
11725 split_address_to_core_and_offset (tree exp,
11726 HOST_WIDE_INT *pbitpos, tree *poffset)
11728 tree core;
11729 enum machine_mode mode;
11730 int unsignedp, volatilep;
11731 HOST_WIDE_INT bitsize;
11733 if (TREE_CODE (exp) == ADDR_EXPR)
11735 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11736 poffset, &mode, &unsignedp, &volatilep,
11737 false);
11738 core = build_fold_addr_expr (core);
11740 else
11742 core = exp;
11743 *pbitpos = 0;
11744 *poffset = NULL_TREE;
11747 return core;
11750 /* Returns true if addresses of E1 and E2 differ by a constant, false
11751 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11753 bool
11754 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11756 tree core1, core2;
11757 HOST_WIDE_INT bitpos1, bitpos2;
11758 tree toffset1, toffset2, tdiff, type;
11760 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11761 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11763 if (bitpos1 % BITS_PER_UNIT != 0
11764 || bitpos2 % BITS_PER_UNIT != 0
11765 || !operand_equal_p (core1, core2, 0))
11766 return false;
11768 if (toffset1 && toffset2)
11770 type = TREE_TYPE (toffset1);
11771 if (type != TREE_TYPE (toffset2))
11772 toffset2 = fold_convert (type, toffset2);
11774 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11775 if (!cst_and_fits_in_hwi (tdiff))
11776 return false;
11778 *diff = int_cst_value (tdiff);
11780 else if (toffset1 || toffset2)
11782 /* If only one of the offsets is non-constant, the difference cannot
11783 be a constant. */
11784 return false;
11786 else
11787 *diff = 0;
11789 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11790 return true;
11793 /* Simplify the floating point expression EXP when the sign of the
11794 result is not significant. Return NULL_TREE if no simplification
11795 is possible. */
11797 tree
11798 fold_strip_sign_ops (tree exp)
11800 tree arg0, arg1;
11802 switch (TREE_CODE (exp))
11804 case ABS_EXPR:
11805 case NEGATE_EXPR:
11806 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11807 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11809 case MULT_EXPR:
11810 case RDIV_EXPR:
11811 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11812 return NULL_TREE;
11813 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11814 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11815 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11816 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11817 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11818 arg1 ? arg1 : TREE_OPERAND (exp, 1));
11819 break;
11821 default:
11822 break;
11824 return NULL_TREE;