Remove old autovect-branch by moving to "dead" directory.
[official-gcc.git] / old-autovect-branch / gcc / fold-const.c
blob33e27af59f0b07840ee14210e830a609650b1ebe
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
106 tree *, tree *);
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
114 tree);
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
124 tree, tree,
125 tree, tree, int);
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
128 tree, tree, tree);
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
139 addition.
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
143 sign. */
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
151 #define LOWPART(x) \
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
161 static void
162 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
164 words[0] = LOWPART (low);
165 words[1] = HIGHPART (low);
166 words[2] = LOWPART (hi);
167 words[3] = HIGHPART (hi);
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
174 static void
175 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
176 HOST_WIDE_INT *hi)
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
197 tree
198 force_fit_type (tree t, int overflowable,
199 bool overflowed, bool overflowed_const)
201 unsigned HOST_WIDE_INT low;
202 HOST_WIDE_INT high;
203 unsigned int prec;
204 int sign_extended_type;
206 gcc_assert (TREE_CODE (t) == INTEGER_CST);
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
211 if (POINTER_TYPE_P (TREE_TYPE (t))
212 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
213 prec = POINTER_SIZE;
214 else
215 prec = TYPE_PRECISION (TREE_TYPE (t));
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
218 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 else
229 high = 0;
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 low &= ~((HOST_WIDE_INT) (-1) << prec);
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (high & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)low < 0)
248 high = -1;
250 else
252 /* Sign extend bottom half? */
253 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
255 high = -1;
256 low |= (HOST_WIDE_INT)(-1) << prec;
260 /* If the value changed, return a new node. */
261 if (overflowed || overflowed_const
262 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
264 t = build_int_cst_wide (TREE_TYPE (t), low, high);
266 if (overflowed
267 || overflowable < 0
268 || (overflowable > 0 && sign_extended_type))
270 t = copy_node (t);
271 TREE_OVERFLOW (t) = 1;
272 TREE_CONSTANT_OVERFLOW (t) = 1;
274 else if (overflowed_const)
276 t = copy_node (t);
277 TREE_CONSTANT_OVERFLOW (t) = 1;
281 return t;
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
291 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
292 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
294 unsigned HOST_WIDE_INT l;
295 HOST_WIDE_INT h;
297 l = l1 + l2;
298 h = h1 + h2 + (l < l1);
300 *lv = l;
301 *hv = h;
302 return OVERFLOW_SUM_SIGN (h1, h2, h);
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
314 if (l1 == 0)
316 *lv = 0;
317 *hv = - h1;
318 return (*hv & h1) < 0;
320 else
322 *lv = -l1;
323 *hv = ~h1;
324 return 0;
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
335 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
336 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
337 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
339 HOST_WIDE_INT arg1[4];
340 HOST_WIDE_INT arg2[4];
341 HOST_WIDE_INT prod[4 * 2];
342 unsigned HOST_WIDE_INT carry;
343 int i, j, k;
344 unsigned HOST_WIDE_INT toplow, neglow;
345 HOST_WIDE_INT tophigh, neghigh;
347 encode (arg1, l1, h1);
348 encode (arg2, l2, h2);
350 memset (prod, 0, sizeof prod);
352 for (i = 0; i < 4; i++)
354 carry = 0;
355 for (j = 0; j < 4; j++)
357 k = i + j;
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry += arg1[i] * arg2[j];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
361 carry += prod[k];
362 prod[k] = LOWPART (carry);
363 carry = HIGHPART (carry);
365 prod[i + 4] = carry;
368 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod + 4, &toplow, &tophigh);
373 if (h1 < 0)
375 neg_double (l2, h2, &neglow, &neghigh);
376 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
378 if (h2 < 0)
380 neg_double (l1, h1, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
392 void
393 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
394 HOST_WIDE_INT count, unsigned int prec,
395 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
397 unsigned HOST_WIDE_INT signmask;
399 if (count < 0)
401 rshift_double (l1, h1, -count, prec, lv, hv, arith);
402 return;
405 if (SHIFT_COUNT_TRUNCATED)
406 count %= prec;
408 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
412 *hv = 0;
413 *lv = 0;
415 else if (count >= HOST_BITS_PER_WIDE_INT)
417 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
418 *lv = 0;
420 else
422 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
423 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
424 *lv = l1 << count;
427 /* Sign extend all bits that are beyond the precision. */
429 signmask = -((prec > HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT) *hv
431 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
432 : (*lv >> (prec - 1))) & 1);
434 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
436 else if (prec >= HOST_BITS_PER_WIDE_INT)
438 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
439 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
441 else
443 *hv = signmask;
444 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
445 *lv |= signmask << prec;
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
454 void
455 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
456 HOST_WIDE_INT count, unsigned int prec,
457 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
458 int arith)
460 unsigned HOST_WIDE_INT signmask;
462 signmask = (arith
463 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
464 : 0);
466 if (SHIFT_COUNT_TRUNCATED)
467 count %= prec;
469 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
473 *hv = 0;
474 *lv = 0;
476 else if (count >= HOST_BITS_PER_WIDE_INT)
478 *hv = 0;
479 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
481 else
483 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
484 *lv = ((l1 >> count)
485 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
488 /* Zero / sign extend all bits that are beyond the precision. */
490 if (count >= (HOST_WIDE_INT)prec)
492 *hv = signmask;
493 *lv = signmask;
495 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
497 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
499 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
500 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
502 else
504 *hv = signmask;
505 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
506 *lv |= signmask << (prec - count);
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
515 void
516 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
517 HOST_WIDE_INT count, unsigned int prec,
518 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
520 unsigned HOST_WIDE_INT s1l, s2l;
521 HOST_WIDE_INT s1h, s2h;
523 count %= prec;
524 if (count < 0)
525 count += prec;
527 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
528 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
529 *lv = s1l | s2l;
530 *hv = s1h | s2h;
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
537 void
538 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
545 count %= prec;
546 if (count < 0)
547 count += prec;
549 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551 *lv = s1l | s2l;
552 *hv = s1h | s2h;
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
559 or EXACT_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
565 div_and_round_double (enum tree_code code, int uns,
566 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig,
568 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig,
570 unsigned HOST_WIDE_INT *lquo,
571 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
572 HOST_WIDE_INT *hrem)
574 int quo_neg = 0;
575 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den[4], quo[4];
577 int i, j;
578 unsigned HOST_WIDE_INT work;
579 unsigned HOST_WIDE_INT carry = 0;
580 unsigned HOST_WIDE_INT lnum = lnum_orig;
581 HOST_WIDE_INT hnum = hnum_orig;
582 unsigned HOST_WIDE_INT lden = lden_orig;
583 HOST_WIDE_INT hden = hden_orig;
584 int overflow = 0;
586 if (hden == 0 && lden == 0)
587 overflow = 1, lden = 1;
589 /* Calculate quotient sign and convert operands to unsigned. */
590 if (!uns)
592 if (hnum < 0)
594 quo_neg = ~ quo_neg;
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum, hnum, &lnum, &hnum)
597 && ((HOST_WIDE_INT) lden & hden) == -1)
598 overflow = 1;
600 if (hden < 0)
602 quo_neg = ~ quo_neg;
603 neg_double (lden, hden, &lden, &hden);
607 if (hnum == 0 && hden == 0)
608 { /* single precision */
609 *hquo = *hrem = 0;
610 /* This unsigned division rounds toward zero. */
611 *lquo = lnum / lden;
612 goto finish_up;
615 if (hnum == 0)
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
618 *hquo = *lquo = 0;
619 *hrem = hnum;
620 *lrem = lnum;
621 goto finish_up;
624 memset (quo, 0, sizeof quo);
626 memset (num, 0, sizeof num); /* to zero 9th element */
627 memset (den, 0, sizeof den);
629 encode (num, lnum, hnum);
630 encode (den, lden, hden);
632 /* Special code for when the divisor < BASE. */
633 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
635 /* hnum != 0 already checked. */
636 for (i = 4 - 1; i >= 0; i--)
638 work = num[i] + carry * BASE;
639 quo[i] = work / lden;
640 carry = work % lden;
643 else
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig, den_hi_sig;
648 unsigned HOST_WIDE_INT quo_est, scale;
650 /* Find the highest nonzero divisor digit. */
651 for (i = 4 - 1;; i--)
652 if (den[i] != 0)
654 den_hi_sig = i;
655 break;
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
661 scale = BASE / (den[den_hi_sig] + 1);
662 if (scale > 1)
663 { /* scale divisor and dividend */
664 carry = 0;
665 for (i = 0; i <= 4 - 1; i++)
667 work = (num[i] * scale) + carry;
668 num[i] = LOWPART (work);
669 carry = HIGHPART (work);
672 num[4] = carry;
673 carry = 0;
674 for (i = 0; i <= 4 - 1; i++)
676 work = (den[i] * scale) + carry;
677 den[i] = LOWPART (work);
678 carry = HIGHPART (work);
679 if (den[i] != 0) den_hi_sig = i;
683 num_hi_sig = 4;
685 /* Main loop */
686 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp;
693 num_hi_sig = i + den_hi_sig + 1;
694 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
695 if (num[num_hi_sig] != den[den_hi_sig])
696 quo_est = work / den[den_hi_sig];
697 else
698 quo_est = BASE - 1;
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp = work - quo_est * den[den_hi_sig];
702 if (tmp < BASE
703 && (den[den_hi_sig - 1] * quo_est
704 > (tmp * BASE + num[num_hi_sig - 2])))
705 quo_est--;
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
711 carry = 0;
712 for (j = 0; j <= den_hi_sig; j++)
714 work = quo_est * den[j] + carry;
715 carry = HIGHPART (work);
716 work = num[i + j] - LOWPART (work);
717 num[i + j] = LOWPART (work);
718 carry += HIGHPART (work) != 0;
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
725 quo_est--;
726 carry = 0; /* add divisor back in */
727 for (j = 0; j <= den_hi_sig; j++)
729 work = num[i + j] + den[j] + carry;
730 carry = HIGHPART (work);
731 num[i + j] = LOWPART (work);
734 num [num_hi_sig] += carry;
737 /* Store the quotient digit. */
738 quo[i] = quo_est;
742 decode (quo, lquo, hquo);
744 finish_up:
745 /* If result is negative, make it so. */
746 if (quo_neg)
747 neg_double (*lquo, *hquo, lquo, hquo);
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
751 neg_double (*lrem, *hrem, lrem, hrem);
752 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
754 switch (code)
756 case TRUNC_DIV_EXPR:
757 case TRUNC_MOD_EXPR: /* round toward zero */
758 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
759 return overflow;
761 case FLOOR_DIV_EXPR:
762 case FLOOR_MOD_EXPR: /* round toward negative infinity */
763 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
765 /* quo = quo - 1; */
766 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
767 lquo, hquo);
769 else
770 return overflow;
771 break;
773 case CEIL_DIV_EXPR:
774 case CEIL_MOD_EXPR: /* round toward positive infinity */
775 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
778 lquo, hquo);
780 else
781 return overflow;
782 break;
784 case ROUND_DIV_EXPR:
785 case ROUND_MOD_EXPR: /* round to closest integer */
787 unsigned HOST_WIDE_INT labs_rem = *lrem;
788 HOST_WIDE_INT habs_rem = *hrem;
789 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
790 HOST_WIDE_INT habs_den = hden, htwice;
792 /* Get absolute values. */
793 if (*hrem < 0)
794 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
795 if (hden < 0)
796 neg_double (lden, hden, &labs_den, &habs_den);
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
800 labs_rem, habs_rem, &ltwice, &htwice);
802 if (((unsigned HOST_WIDE_INT) habs_den
803 < (unsigned HOST_WIDE_INT) htwice)
804 || (((unsigned HOST_WIDE_INT) habs_den
805 == (unsigned HOST_WIDE_INT) htwice)
806 && (labs_den < ltwice)))
808 if (*hquo < 0)
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo,
811 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
812 else
813 /* quo = quo + 1; */
814 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
815 lquo, hquo);
817 else
818 return overflow;
820 break;
822 default:
823 gcc_unreachable ();
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
828 neg_double (*lrem, *hrem, lrem, hrem);
829 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
830 return overflow;
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
837 static tree
838 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
840 unsigned HOST_WIDE_INT int1l, int2l;
841 HOST_WIDE_INT int1h, int2h;
842 unsigned HOST_WIDE_INT quol, reml;
843 HOST_WIDE_INT quoh, remh;
844 tree type = TREE_TYPE (arg1);
845 int uns = TYPE_UNSIGNED (type);
847 int1l = TREE_INT_CST_LOW (arg1);
848 int1h = TREE_INT_CST_HIGH (arg1);
849 int2l = TREE_INT_CST_LOW (arg2);
850 int2h = TREE_INT_CST_HIGH (arg2);
852 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
853 &quol, &quoh, &reml, &remh);
854 if (remh != 0 || reml != 0)
855 return NULL_TREE;
857 return build_int_cst_wide (type, quol, quoh);
860 /* Return true if the built-in mathematical function specified by CODE
861 is odd, i.e. -f(x) == f(-x). */
863 static bool
864 negate_mathfn_p (enum built_in_function code)
866 switch (code)
868 CASE_FLT_FN (BUILT_IN_ASIN):
869 CASE_FLT_FN (BUILT_IN_ASINH):
870 CASE_FLT_FN (BUILT_IN_ATAN):
871 CASE_FLT_FN (BUILT_IN_ATANH):
872 CASE_FLT_FN (BUILT_IN_CBRT):
873 CASE_FLT_FN (BUILT_IN_SIN):
874 CASE_FLT_FN (BUILT_IN_SINH):
875 CASE_FLT_FN (BUILT_IN_TAN):
876 CASE_FLT_FN (BUILT_IN_TANH):
877 return true;
879 default:
880 break;
882 return false;
885 /* Check whether we may negate an integer constant T without causing
886 overflow. */
888 bool
889 may_negate_without_overflow_p (tree t)
891 unsigned HOST_WIDE_INT val;
892 unsigned int prec;
893 tree type;
895 gcc_assert (TREE_CODE (t) == INTEGER_CST);
897 type = TREE_TYPE (t);
898 if (TYPE_UNSIGNED (type))
899 return false;
901 prec = TYPE_PRECISION (type);
902 if (prec > HOST_BITS_PER_WIDE_INT)
904 if (TREE_INT_CST_LOW (t) != 0)
905 return true;
906 prec -= HOST_BITS_PER_WIDE_INT;
907 val = TREE_INT_CST_HIGH (t);
909 else
910 val = TREE_INT_CST_LOW (t);
911 if (prec < HOST_BITS_PER_WIDE_INT)
912 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
913 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
916 /* Determine whether an expression T can be cheaply negated using
917 the function negate_expr. */
919 static bool
920 negate_expr_p (tree t)
922 tree type;
924 if (t == 0)
925 return false;
927 type = TREE_TYPE (t);
929 STRIP_SIGN_NOPS (t);
930 switch (TREE_CODE (t))
932 case INTEGER_CST:
933 if (TYPE_UNSIGNED (type) || ! flag_trapv)
934 return true;
936 /* Check that -CST will not overflow type. */
937 return may_negate_without_overflow_p (t);
938 case BIT_NOT_EXPR:
939 return INTEGRAL_TYPE_P (type);
941 case REAL_CST:
942 case NEGATE_EXPR:
943 return true;
945 case COMPLEX_CST:
946 return negate_expr_p (TREE_REALPART (t))
947 && negate_expr_p (TREE_IMAGPART (t));
949 case PLUS_EXPR:
950 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
951 return false;
952 /* -(A + B) -> (-B) - A. */
953 if (negate_expr_p (TREE_OPERAND (t, 1))
954 && reorder_operands_p (TREE_OPERAND (t, 0),
955 TREE_OPERAND (t, 1)))
956 return true;
957 /* -(A + B) -> (-A) - B. */
958 return negate_expr_p (TREE_OPERAND (t, 0));
960 case MINUS_EXPR:
961 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
962 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
963 && reorder_operands_p (TREE_OPERAND (t, 0),
964 TREE_OPERAND (t, 1));
966 case MULT_EXPR:
967 if (TYPE_UNSIGNED (TREE_TYPE (t)))
968 break;
970 /* Fall through. */
972 case RDIV_EXPR:
973 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
974 return negate_expr_p (TREE_OPERAND (t, 1))
975 || negate_expr_p (TREE_OPERAND (t, 0));
976 break;
978 case TRUNC_DIV_EXPR:
979 case ROUND_DIV_EXPR:
980 case FLOOR_DIV_EXPR:
981 case CEIL_DIV_EXPR:
982 case EXACT_DIV_EXPR:
983 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
984 break;
985 return negate_expr_p (TREE_OPERAND (t, 1))
986 || negate_expr_p (TREE_OPERAND (t, 0));
988 case NOP_EXPR:
989 /* Negate -((double)float) as (double)(-float). */
990 if (TREE_CODE (type) == REAL_TYPE)
992 tree tem = strip_float_extensions (t);
993 if (tem != t)
994 return negate_expr_p (tem);
996 break;
998 case CALL_EXPR:
999 /* Negate -f(x) as f(-x). */
1000 if (negate_mathfn_p (builtin_mathfn_code (t)))
1001 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1002 break;
1004 case RSHIFT_EXPR:
1005 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1006 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1008 tree op1 = TREE_OPERAND (t, 1);
1009 if (TREE_INT_CST_HIGH (op1) == 0
1010 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1011 == TREE_INT_CST_LOW (op1))
1012 return true;
1014 break;
1016 default:
1017 break;
1019 return false;
1022 /* Given T, an expression, return the negation of T. Allow for T to be
1023 null, in which case return null. */
1025 static tree
1026 negate_expr (tree t)
1028 tree type;
1029 tree tem;
1031 if (t == 0)
1032 return 0;
1034 type = TREE_TYPE (t);
1035 STRIP_SIGN_NOPS (t);
1037 switch (TREE_CODE (t))
1039 /* Convert - (~A) to A + 1. */
1040 case BIT_NOT_EXPR:
1041 if (INTEGRAL_TYPE_P (type))
1042 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1043 build_int_cst (type, 1));
1044 break;
1046 case INTEGER_CST:
1047 tem = fold_negate_const (t, type);
1048 if (! TREE_OVERFLOW (tem)
1049 || TYPE_UNSIGNED (type)
1050 || ! flag_trapv)
1051 return tem;
1052 break;
1054 case REAL_CST:
1055 tem = fold_negate_const (t, type);
1056 /* Two's complement FP formats, such as c4x, may overflow. */
1057 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1058 return fold_convert (type, tem);
1059 break;
1061 case COMPLEX_CST:
1063 tree rpart = negate_expr (TREE_REALPART (t));
1064 tree ipart = negate_expr (TREE_IMAGPART (t));
1066 if ((TREE_CODE (rpart) == REAL_CST
1067 && TREE_CODE (ipart) == REAL_CST)
1068 || (TREE_CODE (rpart) == INTEGER_CST
1069 && TREE_CODE (ipart) == INTEGER_CST))
1070 return build_complex (type, rpart, ipart);
1072 break;
1074 case NEGATE_EXPR:
1075 return fold_convert (type, TREE_OPERAND (t, 0));
1077 case PLUS_EXPR:
1078 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1080 /* -(A + B) -> (-B) - A. */
1081 if (negate_expr_p (TREE_OPERAND (t, 1))
1082 && reorder_operands_p (TREE_OPERAND (t, 0),
1083 TREE_OPERAND (t, 1)))
1085 tem = negate_expr (TREE_OPERAND (t, 1));
1086 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1087 tem, TREE_OPERAND (t, 0));
1088 return fold_convert (type, tem);
1091 /* -(A + B) -> (-A) - B. */
1092 if (negate_expr_p (TREE_OPERAND (t, 0)))
1094 tem = negate_expr (TREE_OPERAND (t, 0));
1095 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1096 tem, TREE_OPERAND (t, 1));
1097 return fold_convert (type, tem);
1100 break;
1102 case MINUS_EXPR:
1103 /* - (A - B) -> B - A */
1104 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1105 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1106 return fold_convert (type,
1107 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1108 TREE_OPERAND (t, 1),
1109 TREE_OPERAND (t, 0)));
1110 break;
1112 case MULT_EXPR:
1113 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1114 break;
1116 /* Fall through. */
1118 case RDIV_EXPR:
1119 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1121 tem = TREE_OPERAND (t, 1);
1122 if (negate_expr_p (tem))
1123 return fold_convert (type,
1124 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1125 TREE_OPERAND (t, 0),
1126 negate_expr (tem)));
1127 tem = TREE_OPERAND (t, 0);
1128 if (negate_expr_p (tem))
1129 return fold_convert (type,
1130 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1131 negate_expr (tem),
1132 TREE_OPERAND (t, 1)));
1134 break;
1136 case TRUNC_DIV_EXPR:
1137 case ROUND_DIV_EXPR:
1138 case FLOOR_DIV_EXPR:
1139 case CEIL_DIV_EXPR:
1140 case EXACT_DIV_EXPR:
1141 if (!TYPE_UNSIGNED (TREE_TYPE (t)) && !flag_wrapv)
1143 tem = TREE_OPERAND (t, 1);
1144 if (negate_expr_p (tem))
1145 return fold_convert (type,
1146 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1147 TREE_OPERAND (t, 0),
1148 negate_expr (tem)));
1149 tem = TREE_OPERAND (t, 0);
1150 if (negate_expr_p (tem))
1151 return fold_convert (type,
1152 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1153 negate_expr (tem),
1154 TREE_OPERAND (t, 1)));
1156 break;
1158 case NOP_EXPR:
1159 /* Convert -((double)float) into (double)(-float). */
1160 if (TREE_CODE (type) == REAL_TYPE)
1162 tem = strip_float_extensions (t);
1163 if (tem != t && negate_expr_p (tem))
1164 return fold_convert (type, negate_expr (tem));
1166 break;
1168 case CALL_EXPR:
1169 /* Negate -f(x) as f(-x). */
1170 if (negate_mathfn_p (builtin_mathfn_code (t))
1171 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1173 tree fndecl, arg, arglist;
1175 fndecl = get_callee_fndecl (t);
1176 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1177 arglist = build_tree_list (NULL_TREE, arg);
1178 return build_function_call_expr (fndecl, arglist);
1180 break;
1182 case RSHIFT_EXPR:
1183 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1184 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1186 tree op1 = TREE_OPERAND (t, 1);
1187 if (TREE_INT_CST_HIGH (op1) == 0
1188 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1189 == TREE_INT_CST_LOW (op1))
1191 tree ntype = TYPE_UNSIGNED (type)
1192 ? lang_hooks.types.signed_type (type)
1193 : lang_hooks.types.unsigned_type (type);
1194 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1195 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1196 return fold_convert (type, temp);
1199 break;
1201 default:
1202 break;
1205 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1206 return fold_convert (type, tem);
1209 /* Split a tree IN into a constant, literal and variable parts that could be
1210 combined with CODE to make IN. "constant" means an expression with
1211 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1212 commutative arithmetic operation. Store the constant part into *CONP,
1213 the literal in *LITP and return the variable part. If a part isn't
1214 present, set it to null. If the tree does not decompose in this way,
1215 return the entire tree as the variable part and the other parts as null.
1217 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1218 case, we negate an operand that was subtracted. Except if it is a
1219 literal for which we use *MINUS_LITP instead.
1221 If NEGATE_P is true, we are negating all of IN, again except a literal
1222 for which we use *MINUS_LITP instead.
1224 If IN is itself a literal or constant, return it as appropriate.
1226 Note that we do not guarantee that any of the three values will be the
1227 same type as IN, but they will have the same signedness and mode. */
1229 static tree
1230 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1231 tree *minus_litp, int negate_p)
1233 tree var = 0;
1235 *conp = 0;
1236 *litp = 0;
1237 *minus_litp = 0;
1239 /* Strip any conversions that don't change the machine mode or signedness. */
1240 STRIP_SIGN_NOPS (in);
1242 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1243 *litp = in;
1244 else if (TREE_CODE (in) == code
1245 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1246 /* We can associate addition and subtraction together (even
1247 though the C standard doesn't say so) for integers because
1248 the value is not affected. For reals, the value might be
1249 affected, so we can't. */
1250 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1251 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1253 tree op0 = TREE_OPERAND (in, 0);
1254 tree op1 = TREE_OPERAND (in, 1);
1255 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1256 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1258 /* First see if either of the operands is a literal, then a constant. */
1259 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1260 *litp = op0, op0 = 0;
1261 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1262 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1264 if (op0 != 0 && TREE_CONSTANT (op0))
1265 *conp = op0, op0 = 0;
1266 else if (op1 != 0 && TREE_CONSTANT (op1))
1267 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1269 /* If we haven't dealt with either operand, this is not a case we can
1270 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1271 if (op0 != 0 && op1 != 0)
1272 var = in;
1273 else if (op0 != 0)
1274 var = op0;
1275 else
1276 var = op1, neg_var_p = neg1_p;
1278 /* Now do any needed negations. */
1279 if (neg_litp_p)
1280 *minus_litp = *litp, *litp = 0;
1281 if (neg_conp_p)
1282 *conp = negate_expr (*conp);
1283 if (neg_var_p)
1284 var = negate_expr (var);
1286 else if (TREE_CONSTANT (in))
1287 *conp = in;
1288 else
1289 var = in;
1291 if (negate_p)
1293 if (*litp)
1294 *minus_litp = *litp, *litp = 0;
1295 else if (*minus_litp)
1296 *litp = *minus_litp, *minus_litp = 0;
1297 *conp = negate_expr (*conp);
1298 var = negate_expr (var);
1301 return var;
1304 /* Re-associate trees split by the above function. T1 and T2 are either
1305 expressions to associate or null. Return the new expression, if any. If
1306 we build an operation, do it in TYPE and with CODE. */
1308 static tree
1309 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1311 if (t1 == 0)
1312 return t2;
1313 else if (t2 == 0)
1314 return t1;
1316 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1317 try to fold this since we will have infinite recursion. But do
1318 deal with any NEGATE_EXPRs. */
1319 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1320 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1322 if (code == PLUS_EXPR)
1324 if (TREE_CODE (t1) == NEGATE_EXPR)
1325 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1326 fold_convert (type, TREE_OPERAND (t1, 0)));
1327 else if (TREE_CODE (t2) == NEGATE_EXPR)
1328 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1329 fold_convert (type, TREE_OPERAND (t2, 0)));
1330 else if (integer_zerop (t2))
1331 return fold_convert (type, t1);
1333 else if (code == MINUS_EXPR)
1335 if (integer_zerop (t2))
1336 return fold_convert (type, t1);
1339 return build2 (code, type, fold_convert (type, t1),
1340 fold_convert (type, t2));
1343 return fold_build2 (code, type, fold_convert (type, t1),
1344 fold_convert (type, t2));
1347 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1348 to produce a new constant.
1350 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1352 tree
1353 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1355 unsigned HOST_WIDE_INT int1l, int2l;
1356 HOST_WIDE_INT int1h, int2h;
1357 unsigned HOST_WIDE_INT low;
1358 HOST_WIDE_INT hi;
1359 unsigned HOST_WIDE_INT garbagel;
1360 HOST_WIDE_INT garbageh;
1361 tree t;
1362 tree type = TREE_TYPE (arg1);
1363 int uns = TYPE_UNSIGNED (type);
1364 int is_sizetype
1365 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1366 int overflow = 0;
1368 int1l = TREE_INT_CST_LOW (arg1);
1369 int1h = TREE_INT_CST_HIGH (arg1);
1370 int2l = TREE_INT_CST_LOW (arg2);
1371 int2h = TREE_INT_CST_HIGH (arg2);
1373 switch (code)
1375 case BIT_IOR_EXPR:
1376 low = int1l | int2l, hi = int1h | int2h;
1377 break;
1379 case BIT_XOR_EXPR:
1380 low = int1l ^ int2l, hi = int1h ^ int2h;
1381 break;
1383 case BIT_AND_EXPR:
1384 low = int1l & int2l, hi = int1h & int2h;
1385 break;
1387 case RSHIFT_EXPR:
1388 int2l = -int2l;
1389 case LSHIFT_EXPR:
1390 /* It's unclear from the C standard whether shifts can overflow.
1391 The following code ignores overflow; perhaps a C standard
1392 interpretation ruling is needed. */
1393 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1394 &low, &hi, !uns);
1395 break;
1397 case RROTATE_EXPR:
1398 int2l = - int2l;
1399 case LROTATE_EXPR:
1400 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1401 &low, &hi);
1402 break;
1404 case PLUS_EXPR:
1405 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1406 break;
1408 case MINUS_EXPR:
1409 neg_double (int2l, int2h, &low, &hi);
1410 add_double (int1l, int1h, low, hi, &low, &hi);
1411 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1412 break;
1414 case MULT_EXPR:
1415 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1416 break;
1418 case TRUNC_DIV_EXPR:
1419 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1420 case EXACT_DIV_EXPR:
1421 /* This is a shortcut for a common special case. */
1422 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1423 && ! TREE_CONSTANT_OVERFLOW (arg1)
1424 && ! TREE_CONSTANT_OVERFLOW (arg2)
1425 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1427 if (code == CEIL_DIV_EXPR)
1428 int1l += int2l - 1;
1430 low = int1l / int2l, hi = 0;
1431 break;
1434 /* ... fall through ... */
1436 case ROUND_DIV_EXPR:
1437 if (int2h == 0 && int2l == 1)
1439 low = int1l, hi = int1h;
1440 break;
1442 if (int1l == int2l && int1h == int2h
1443 && ! (int1l == 0 && int1h == 0))
1445 low = 1, hi = 0;
1446 break;
1448 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1449 &low, &hi, &garbagel, &garbageh);
1450 break;
1452 case TRUNC_MOD_EXPR:
1453 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1454 /* This is a shortcut for a common special case. */
1455 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1456 && ! TREE_CONSTANT_OVERFLOW (arg1)
1457 && ! TREE_CONSTANT_OVERFLOW (arg2)
1458 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1460 if (code == CEIL_MOD_EXPR)
1461 int1l += int2l - 1;
1462 low = int1l % int2l, hi = 0;
1463 break;
1466 /* ... fall through ... */
1468 case ROUND_MOD_EXPR:
1469 overflow = div_and_round_double (code, uns,
1470 int1l, int1h, int2l, int2h,
1471 &garbagel, &garbageh, &low, &hi);
1472 break;
1474 case MIN_EXPR:
1475 case MAX_EXPR:
1476 if (uns)
1477 low = (((unsigned HOST_WIDE_INT) int1h
1478 < (unsigned HOST_WIDE_INT) int2h)
1479 || (((unsigned HOST_WIDE_INT) int1h
1480 == (unsigned HOST_WIDE_INT) int2h)
1481 && int1l < int2l));
1482 else
1483 low = (int1h < int2h
1484 || (int1h == int2h && int1l < int2l));
1486 if (low == (code == MIN_EXPR))
1487 low = int1l, hi = int1h;
1488 else
1489 low = int2l, hi = int2h;
1490 break;
1492 default:
1493 gcc_unreachable ();
1496 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1498 if (notrunc)
1500 /* Propagate overflow flags ourselves. */
1501 if (((!uns || is_sizetype) && overflow)
1502 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1504 t = copy_node (t);
1505 TREE_OVERFLOW (t) = 1;
1506 TREE_CONSTANT_OVERFLOW (t) = 1;
1508 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1510 t = copy_node (t);
1511 TREE_CONSTANT_OVERFLOW (t) = 1;
1514 else
1515 t = force_fit_type (t, 1,
1516 ((!uns || is_sizetype) && overflow)
1517 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1518 TREE_CONSTANT_OVERFLOW (arg1)
1519 | TREE_CONSTANT_OVERFLOW (arg2));
1521 return t;
1524 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1525 constant. We assume ARG1 and ARG2 have the same data type, or at least
1526 are the same kind of constant and the same machine mode.
1528 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1530 static tree
1531 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1533 STRIP_NOPS (arg1);
1534 STRIP_NOPS (arg2);
1536 if (TREE_CODE (arg1) == INTEGER_CST)
1537 return int_const_binop (code, arg1, arg2, notrunc);
1539 if (TREE_CODE (arg1) == REAL_CST)
1541 enum machine_mode mode;
1542 REAL_VALUE_TYPE d1;
1543 REAL_VALUE_TYPE d2;
1544 REAL_VALUE_TYPE value;
1545 REAL_VALUE_TYPE result;
1546 bool inexact;
1547 tree t, type;
1549 d1 = TREE_REAL_CST (arg1);
1550 d2 = TREE_REAL_CST (arg2);
1552 type = TREE_TYPE (arg1);
1553 mode = TYPE_MODE (type);
1555 /* Don't perform operation if we honor signaling NaNs and
1556 either operand is a NaN. */
1557 if (HONOR_SNANS (mode)
1558 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1559 return NULL_TREE;
1561 /* Don't perform operation if it would raise a division
1562 by zero exception. */
1563 if (code == RDIV_EXPR
1564 && REAL_VALUES_EQUAL (d2, dconst0)
1565 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1566 return NULL_TREE;
1568 /* If either operand is a NaN, just return it. Otherwise, set up
1569 for floating-point trap; we return an overflow. */
1570 if (REAL_VALUE_ISNAN (d1))
1571 return arg1;
1572 else if (REAL_VALUE_ISNAN (d2))
1573 return arg2;
1575 inexact = real_arithmetic (&value, code, &d1, &d2);
1576 real_convert (&result, mode, &value);
1578 /* Don't constant fold this floating point operation if
1579 the result has overflowed and flag_trapping_math. */
1581 if (flag_trapping_math
1582 && MODE_HAS_INFINITIES (mode)
1583 && REAL_VALUE_ISINF (result)
1584 && !REAL_VALUE_ISINF (d1)
1585 && !REAL_VALUE_ISINF (d2))
1586 return NULL_TREE;
1588 /* Don't constant fold this floating point operation if the
1589 result may dependent upon the run-time rounding mode and
1590 flag_rounding_math is set, or if GCC's software emulation
1591 is unable to accurately represent the result. */
1593 if ((flag_rounding_math
1594 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1595 && !flag_unsafe_math_optimizations))
1596 && (inexact || !real_identical (&result, &value)))
1597 return NULL_TREE;
1599 t = build_real (type, result);
1601 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1602 TREE_CONSTANT_OVERFLOW (t)
1603 = TREE_OVERFLOW (t)
1604 | TREE_CONSTANT_OVERFLOW (arg1)
1605 | TREE_CONSTANT_OVERFLOW (arg2);
1606 return t;
1608 if (TREE_CODE (arg1) == COMPLEX_CST)
1610 tree type = TREE_TYPE (arg1);
1611 tree r1 = TREE_REALPART (arg1);
1612 tree i1 = TREE_IMAGPART (arg1);
1613 tree r2 = TREE_REALPART (arg2);
1614 tree i2 = TREE_IMAGPART (arg2);
1615 tree t;
1617 switch (code)
1619 case PLUS_EXPR:
1620 t = build_complex (type,
1621 const_binop (PLUS_EXPR, r1, r2, notrunc),
1622 const_binop (PLUS_EXPR, i1, i2, notrunc));
1623 break;
1625 case MINUS_EXPR:
1626 t = build_complex (type,
1627 const_binop (MINUS_EXPR, r1, r2, notrunc),
1628 const_binop (MINUS_EXPR, i1, i2, notrunc));
1629 break;
1631 case MULT_EXPR:
1632 t = build_complex (type,
1633 const_binop (MINUS_EXPR,
1634 const_binop (MULT_EXPR,
1635 r1, r2, notrunc),
1636 const_binop (MULT_EXPR,
1637 i1, i2, notrunc),
1638 notrunc),
1639 const_binop (PLUS_EXPR,
1640 const_binop (MULT_EXPR,
1641 r1, i2, notrunc),
1642 const_binop (MULT_EXPR,
1643 i1, r2, notrunc),
1644 notrunc));
1645 break;
1647 case RDIV_EXPR:
1649 tree t1, t2, real, imag;
1650 tree magsquared
1651 = const_binop (PLUS_EXPR,
1652 const_binop (MULT_EXPR, r2, r2, notrunc),
1653 const_binop (MULT_EXPR, i2, i2, notrunc),
1654 notrunc);
1656 t1 = const_binop (PLUS_EXPR,
1657 const_binop (MULT_EXPR, r1, r2, notrunc),
1658 const_binop (MULT_EXPR, i1, i2, notrunc),
1659 notrunc);
1660 t2 = const_binop (MINUS_EXPR,
1661 const_binop (MULT_EXPR, i1, r2, notrunc),
1662 const_binop (MULT_EXPR, r1, i2, notrunc),
1663 notrunc);
1665 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1667 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1668 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1670 else
1672 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1673 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1674 if (!real || !imag)
1675 return NULL_TREE;
1678 t = build_complex (type, real, imag);
1680 break;
1682 default:
1683 gcc_unreachable ();
1685 return t;
1687 return 0;
1690 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1691 indicates which particular sizetype to create. */
1693 tree
1694 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1696 return build_int_cst (sizetype_tab[(int) kind], number);
1699 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1700 is a tree code. The type of the result is taken from the operands.
1701 Both must be the same type integer type and it must be a size type.
1702 If the operands are constant, so is the result. */
1704 tree
1705 size_binop (enum tree_code code, tree arg0, tree arg1)
1707 tree type = TREE_TYPE (arg0);
1709 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1710 && type == TREE_TYPE (arg1));
1712 /* Handle the special case of two integer constants faster. */
1713 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1715 /* And some specific cases even faster than that. */
1716 if (code == PLUS_EXPR && integer_zerop (arg0))
1717 return arg1;
1718 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1719 && integer_zerop (arg1))
1720 return arg0;
1721 else if (code == MULT_EXPR && integer_onep (arg0))
1722 return arg1;
1724 /* Handle general case of two integer constants. */
1725 return int_const_binop (code, arg0, arg1, 0);
1728 if (arg0 == error_mark_node || arg1 == error_mark_node)
1729 return error_mark_node;
1731 return fold_build2 (code, type, arg0, arg1);
1734 /* Given two values, either both of sizetype or both of bitsizetype,
1735 compute the difference between the two values. Return the value
1736 in signed type corresponding to the type of the operands. */
1738 tree
1739 size_diffop (tree arg0, tree arg1)
1741 tree type = TREE_TYPE (arg0);
1742 tree ctype;
1744 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1745 && type == TREE_TYPE (arg1));
1747 /* If the type is already signed, just do the simple thing. */
1748 if (!TYPE_UNSIGNED (type))
1749 return size_binop (MINUS_EXPR, arg0, arg1);
1751 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1753 /* If either operand is not a constant, do the conversions to the signed
1754 type and subtract. The hardware will do the right thing with any
1755 overflow in the subtraction. */
1756 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1757 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1758 fold_convert (ctype, arg1));
1760 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1761 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1762 overflow) and negate (which can't either). Special-case a result
1763 of zero while we're here. */
1764 if (tree_int_cst_equal (arg0, arg1))
1765 return fold_convert (ctype, integer_zero_node);
1766 else if (tree_int_cst_lt (arg1, arg0))
1767 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1768 else
1769 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1770 fold_convert (ctype, size_binop (MINUS_EXPR,
1771 arg1, arg0)));
1774 /* A subroutine of fold_convert_const handling conversions of an
1775 INTEGER_CST to another integer type. */
1777 static tree
1778 fold_convert_const_int_from_int (tree type, tree arg1)
1780 tree t;
1782 /* Given an integer constant, make new constant with new type,
1783 appropriately sign-extended or truncated. */
1784 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1785 TREE_INT_CST_HIGH (arg1));
1787 t = force_fit_type (t,
1788 /* Don't set the overflow when
1789 converting a pointer */
1790 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1791 (TREE_INT_CST_HIGH (arg1) < 0
1792 && (TYPE_UNSIGNED (type)
1793 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1794 | TREE_OVERFLOW (arg1),
1795 TREE_CONSTANT_OVERFLOW (arg1));
1797 return t;
1800 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1801 to an integer type. */
1803 static tree
1804 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1806 int overflow = 0;
1807 tree t;
1809 /* The following code implements the floating point to integer
1810 conversion rules required by the Java Language Specification,
1811 that IEEE NaNs are mapped to zero and values that overflow
1812 the target precision saturate, i.e. values greater than
1813 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1814 are mapped to INT_MIN. These semantics are allowed by the
1815 C and C++ standards that simply state that the behavior of
1816 FP-to-integer conversion is unspecified upon overflow. */
1818 HOST_WIDE_INT high, low;
1819 REAL_VALUE_TYPE r;
1820 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1822 switch (code)
1824 case FIX_TRUNC_EXPR:
1825 real_trunc (&r, VOIDmode, &x);
1826 break;
1828 case FIX_CEIL_EXPR:
1829 real_ceil (&r, VOIDmode, &x);
1830 break;
1832 case FIX_FLOOR_EXPR:
1833 real_floor (&r, VOIDmode, &x);
1834 break;
1836 case FIX_ROUND_EXPR:
1837 real_round (&r, VOIDmode, &x);
1838 break;
1840 default:
1841 gcc_unreachable ();
1844 /* If R is NaN, return zero and show we have an overflow. */
1845 if (REAL_VALUE_ISNAN (r))
1847 overflow = 1;
1848 high = 0;
1849 low = 0;
1852 /* See if R is less than the lower bound or greater than the
1853 upper bound. */
1855 if (! overflow)
1857 tree lt = TYPE_MIN_VALUE (type);
1858 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1859 if (REAL_VALUES_LESS (r, l))
1861 overflow = 1;
1862 high = TREE_INT_CST_HIGH (lt);
1863 low = TREE_INT_CST_LOW (lt);
1867 if (! overflow)
1869 tree ut = TYPE_MAX_VALUE (type);
1870 if (ut)
1872 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1873 if (REAL_VALUES_LESS (u, r))
1875 overflow = 1;
1876 high = TREE_INT_CST_HIGH (ut);
1877 low = TREE_INT_CST_LOW (ut);
1882 if (! overflow)
1883 REAL_VALUE_TO_INT (&low, &high, r);
1885 t = build_int_cst_wide (type, low, high);
1887 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1888 TREE_CONSTANT_OVERFLOW (arg1));
1889 return t;
1892 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1893 to another floating point type. */
1895 static tree
1896 fold_convert_const_real_from_real (tree type, tree arg1)
1898 REAL_VALUE_TYPE value;
1899 tree t;
1901 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1902 t = build_real (type, value);
1904 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1905 TREE_CONSTANT_OVERFLOW (t)
1906 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1907 return t;
1910 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1911 type TYPE. If no simplification can be done return NULL_TREE. */
1913 static tree
1914 fold_convert_const (enum tree_code code, tree type, tree arg1)
1916 if (TREE_TYPE (arg1) == type)
1917 return arg1;
1919 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1921 if (TREE_CODE (arg1) == INTEGER_CST)
1922 return fold_convert_const_int_from_int (type, arg1);
1923 else if (TREE_CODE (arg1) == REAL_CST)
1924 return fold_convert_const_int_from_real (code, type, arg1);
1926 else if (TREE_CODE (type) == REAL_TYPE)
1928 if (TREE_CODE (arg1) == INTEGER_CST)
1929 return build_real_from_int_cst (type, arg1);
1930 if (TREE_CODE (arg1) == REAL_CST)
1931 return fold_convert_const_real_from_real (type, arg1);
1933 return NULL_TREE;
1936 /* Construct a vector of zero elements of vector type TYPE. */
1938 static tree
1939 build_zero_vector (tree type)
1941 tree elem, list;
1942 int i, units;
1944 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1945 units = TYPE_VECTOR_SUBPARTS (type);
1947 list = NULL_TREE;
1948 for (i = 0; i < units; i++)
1949 list = tree_cons (NULL_TREE, elem, list);
1950 return build_vector (type, list);
1953 /* Convert expression ARG to type TYPE. Used by the middle-end for
1954 simple conversions in preference to calling the front-end's convert. */
1956 tree
1957 fold_convert (tree type, tree arg)
1959 tree orig = TREE_TYPE (arg);
1960 tree tem;
1962 if (type == orig)
1963 return arg;
1965 if (TREE_CODE (arg) == ERROR_MARK
1966 || TREE_CODE (type) == ERROR_MARK
1967 || TREE_CODE (orig) == ERROR_MARK)
1968 return error_mark_node;
1970 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1971 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1972 TYPE_MAIN_VARIANT (orig)))
1973 return fold_build1 (NOP_EXPR, type, arg);
1975 switch (TREE_CODE (type))
1977 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1978 case POINTER_TYPE: case REFERENCE_TYPE:
1979 case OFFSET_TYPE:
1980 if (TREE_CODE (arg) == INTEGER_CST)
1982 tem = fold_convert_const (NOP_EXPR, type, arg);
1983 if (tem != NULL_TREE)
1984 return tem;
1986 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1987 || TREE_CODE (orig) == OFFSET_TYPE)
1988 return fold_build1 (NOP_EXPR, type, arg);
1989 if (TREE_CODE (orig) == COMPLEX_TYPE)
1991 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1992 return fold_convert (type, tem);
1994 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1995 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1996 return fold_build1 (NOP_EXPR, type, arg);
1998 case REAL_TYPE:
1999 if (TREE_CODE (arg) == INTEGER_CST)
2001 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2002 if (tem != NULL_TREE)
2003 return tem;
2005 else if (TREE_CODE (arg) == REAL_CST)
2007 tem = fold_convert_const (NOP_EXPR, type, arg);
2008 if (tem != NULL_TREE)
2009 return tem;
2012 switch (TREE_CODE (orig))
2014 case INTEGER_TYPE: case CHAR_TYPE:
2015 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2016 case POINTER_TYPE: case REFERENCE_TYPE:
2017 return fold_build1 (FLOAT_EXPR, type, arg);
2019 case REAL_TYPE:
2020 return fold_build1 (NOP_EXPR, type, arg);
2022 case COMPLEX_TYPE:
2023 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2024 return fold_convert (type, tem);
2026 default:
2027 gcc_unreachable ();
2030 case COMPLEX_TYPE:
2031 switch (TREE_CODE (orig))
2033 case INTEGER_TYPE: case CHAR_TYPE:
2034 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2035 case POINTER_TYPE: case REFERENCE_TYPE:
2036 case REAL_TYPE:
2037 return build2 (COMPLEX_EXPR, type,
2038 fold_convert (TREE_TYPE (type), arg),
2039 fold_convert (TREE_TYPE (type), integer_zero_node));
2040 case COMPLEX_TYPE:
2042 tree rpart, ipart;
2044 if (TREE_CODE (arg) == COMPLEX_EXPR)
2046 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2047 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2048 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2051 arg = save_expr (arg);
2052 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2053 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2054 rpart = fold_convert (TREE_TYPE (type), rpart);
2055 ipart = fold_convert (TREE_TYPE (type), ipart);
2056 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2059 default:
2060 gcc_unreachable ();
2063 case VECTOR_TYPE:
2064 if (integer_zerop (arg))
2065 return build_zero_vector (type);
2066 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2067 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2068 || TREE_CODE (orig) == VECTOR_TYPE);
2069 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2071 case VOID_TYPE:
2072 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2074 default:
2075 gcc_unreachable ();
2079 /* Return false if expr can be assumed not to be an lvalue, true
2080 otherwise. */
2082 static bool
2083 maybe_lvalue_p (tree x)
2085 /* We only need to wrap lvalue tree codes. */
2086 switch (TREE_CODE (x))
2088 case VAR_DECL:
2089 case PARM_DECL:
2090 case RESULT_DECL:
2091 case LABEL_DECL:
2092 case FUNCTION_DECL:
2093 case SSA_NAME:
2095 case COMPONENT_REF:
2096 case INDIRECT_REF:
2097 case ALIGN_INDIRECT_REF:
2098 case MISALIGNED_INDIRECT_REF:
2099 case ARRAY_REF:
2100 case ARRAY_RANGE_REF:
2101 case BIT_FIELD_REF:
2102 case OBJ_TYPE_REF:
2104 case REALPART_EXPR:
2105 case IMAGPART_EXPR:
2106 case PREINCREMENT_EXPR:
2107 case PREDECREMENT_EXPR:
2108 case SAVE_EXPR:
2109 case TRY_CATCH_EXPR:
2110 case WITH_CLEANUP_EXPR:
2111 case COMPOUND_EXPR:
2112 case MODIFY_EXPR:
2113 case TARGET_EXPR:
2114 case COND_EXPR:
2115 case BIND_EXPR:
2116 case MIN_EXPR:
2117 case MAX_EXPR:
2118 break;
2120 default:
2121 /* Assume the worst for front-end tree codes. */
2122 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2123 break;
2124 return false;
2127 return true;
2130 /* Return an expr equal to X but certainly not valid as an lvalue. */
2132 tree
2133 non_lvalue (tree x)
2135 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2136 us. */
2137 if (in_gimple_form)
2138 return x;
2140 if (! maybe_lvalue_p (x))
2141 return x;
2142 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2145 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2146 Zero means allow extended lvalues. */
2148 int pedantic_lvalues;
2150 /* When pedantic, return an expr equal to X but certainly not valid as a
2151 pedantic lvalue. Otherwise, return X. */
2153 static tree
2154 pedantic_non_lvalue (tree x)
2156 if (pedantic_lvalues)
2157 return non_lvalue (x);
2158 else
2159 return x;
2162 /* Given a tree comparison code, return the code that is the logical inverse
2163 of the given code. It is not safe to do this for floating-point
2164 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2165 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2167 enum tree_code
2168 invert_tree_comparison (enum tree_code code, bool honor_nans)
2170 if (honor_nans && flag_trapping_math)
2171 return ERROR_MARK;
2173 switch (code)
2175 case EQ_EXPR:
2176 return NE_EXPR;
2177 case NE_EXPR:
2178 return EQ_EXPR;
2179 case GT_EXPR:
2180 return honor_nans ? UNLE_EXPR : LE_EXPR;
2181 case GE_EXPR:
2182 return honor_nans ? UNLT_EXPR : LT_EXPR;
2183 case LT_EXPR:
2184 return honor_nans ? UNGE_EXPR : GE_EXPR;
2185 case LE_EXPR:
2186 return honor_nans ? UNGT_EXPR : GT_EXPR;
2187 case LTGT_EXPR:
2188 return UNEQ_EXPR;
2189 case UNEQ_EXPR:
2190 return LTGT_EXPR;
2191 case UNGT_EXPR:
2192 return LE_EXPR;
2193 case UNGE_EXPR:
2194 return LT_EXPR;
2195 case UNLT_EXPR:
2196 return GE_EXPR;
2197 case UNLE_EXPR:
2198 return GT_EXPR;
2199 case ORDERED_EXPR:
2200 return UNORDERED_EXPR;
2201 case UNORDERED_EXPR:
2202 return ORDERED_EXPR;
2203 default:
2204 gcc_unreachable ();
2208 /* Similar, but return the comparison that results if the operands are
2209 swapped. This is safe for floating-point. */
2211 enum tree_code
2212 swap_tree_comparison (enum tree_code code)
2214 switch (code)
2216 case EQ_EXPR:
2217 case NE_EXPR:
2218 case ORDERED_EXPR:
2219 case UNORDERED_EXPR:
2220 case LTGT_EXPR:
2221 case UNEQ_EXPR:
2222 return code;
2223 case GT_EXPR:
2224 return LT_EXPR;
2225 case GE_EXPR:
2226 return LE_EXPR;
2227 case LT_EXPR:
2228 return GT_EXPR;
2229 case LE_EXPR:
2230 return GE_EXPR;
2231 case UNGT_EXPR:
2232 return UNLT_EXPR;
2233 case UNGE_EXPR:
2234 return UNLE_EXPR;
2235 case UNLT_EXPR:
2236 return UNGT_EXPR;
2237 case UNLE_EXPR:
2238 return UNGE_EXPR;
2239 default:
2240 gcc_unreachable ();
2245 /* Convert a comparison tree code from an enum tree_code representation
2246 into a compcode bit-based encoding. This function is the inverse of
2247 compcode_to_comparison. */
2249 static enum comparison_code
2250 comparison_to_compcode (enum tree_code code)
2252 switch (code)
2254 case LT_EXPR:
2255 return COMPCODE_LT;
2256 case EQ_EXPR:
2257 return COMPCODE_EQ;
2258 case LE_EXPR:
2259 return COMPCODE_LE;
2260 case GT_EXPR:
2261 return COMPCODE_GT;
2262 case NE_EXPR:
2263 return COMPCODE_NE;
2264 case GE_EXPR:
2265 return COMPCODE_GE;
2266 case ORDERED_EXPR:
2267 return COMPCODE_ORD;
2268 case UNORDERED_EXPR:
2269 return COMPCODE_UNORD;
2270 case UNLT_EXPR:
2271 return COMPCODE_UNLT;
2272 case UNEQ_EXPR:
2273 return COMPCODE_UNEQ;
2274 case UNLE_EXPR:
2275 return COMPCODE_UNLE;
2276 case UNGT_EXPR:
2277 return COMPCODE_UNGT;
2278 case LTGT_EXPR:
2279 return COMPCODE_LTGT;
2280 case UNGE_EXPR:
2281 return COMPCODE_UNGE;
2282 default:
2283 gcc_unreachable ();
2287 /* Convert a compcode bit-based encoding of a comparison operator back
2288 to GCC's enum tree_code representation. This function is the
2289 inverse of comparison_to_compcode. */
2291 static enum tree_code
2292 compcode_to_comparison (enum comparison_code code)
2294 switch (code)
2296 case COMPCODE_LT:
2297 return LT_EXPR;
2298 case COMPCODE_EQ:
2299 return EQ_EXPR;
2300 case COMPCODE_LE:
2301 return LE_EXPR;
2302 case COMPCODE_GT:
2303 return GT_EXPR;
2304 case COMPCODE_NE:
2305 return NE_EXPR;
2306 case COMPCODE_GE:
2307 return GE_EXPR;
2308 case COMPCODE_ORD:
2309 return ORDERED_EXPR;
2310 case COMPCODE_UNORD:
2311 return UNORDERED_EXPR;
2312 case COMPCODE_UNLT:
2313 return UNLT_EXPR;
2314 case COMPCODE_UNEQ:
2315 return UNEQ_EXPR;
2316 case COMPCODE_UNLE:
2317 return UNLE_EXPR;
2318 case COMPCODE_UNGT:
2319 return UNGT_EXPR;
2320 case COMPCODE_LTGT:
2321 return LTGT_EXPR;
2322 case COMPCODE_UNGE:
2323 return UNGE_EXPR;
2324 default:
2325 gcc_unreachable ();
2329 /* Return a tree for the comparison which is the combination of
2330 doing the AND or OR (depending on CODE) of the two operations LCODE
2331 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2332 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2333 if this makes the transformation invalid. */
2335 tree
2336 combine_comparisons (enum tree_code code, enum tree_code lcode,
2337 enum tree_code rcode, tree truth_type,
2338 tree ll_arg, tree lr_arg)
2340 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2341 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2342 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2343 enum comparison_code compcode;
2345 switch (code)
2347 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2348 compcode = lcompcode & rcompcode;
2349 break;
2351 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2352 compcode = lcompcode | rcompcode;
2353 break;
2355 default:
2356 return NULL_TREE;
2359 if (!honor_nans)
2361 /* Eliminate unordered comparisons, as well as LTGT and ORD
2362 which are not used unless the mode has NaNs. */
2363 compcode &= ~COMPCODE_UNORD;
2364 if (compcode == COMPCODE_LTGT)
2365 compcode = COMPCODE_NE;
2366 else if (compcode == COMPCODE_ORD)
2367 compcode = COMPCODE_TRUE;
2369 else if (flag_trapping_math)
2371 /* Check that the original operation and the optimized ones will trap
2372 under the same condition. */
2373 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2374 && (lcompcode != COMPCODE_EQ)
2375 && (lcompcode != COMPCODE_ORD);
2376 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2377 && (rcompcode != COMPCODE_EQ)
2378 && (rcompcode != COMPCODE_ORD);
2379 bool trap = (compcode & COMPCODE_UNORD) == 0
2380 && (compcode != COMPCODE_EQ)
2381 && (compcode != COMPCODE_ORD);
2383 /* In a short-circuited boolean expression the LHS might be
2384 such that the RHS, if evaluated, will never trap. For
2385 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2386 if neither x nor y is NaN. (This is a mixed blessing: for
2387 example, the expression above will never trap, hence
2388 optimizing it to x < y would be invalid). */
2389 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2390 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2391 rtrap = false;
2393 /* If the comparison was short-circuited, and only the RHS
2394 trapped, we may now generate a spurious trap. */
2395 if (rtrap && !ltrap
2396 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2397 return NULL_TREE;
2399 /* If we changed the conditions that cause a trap, we lose. */
2400 if ((ltrap || rtrap) != trap)
2401 return NULL_TREE;
2404 if (compcode == COMPCODE_TRUE)
2405 return constant_boolean_node (true, truth_type);
2406 else if (compcode == COMPCODE_FALSE)
2407 return constant_boolean_node (false, truth_type);
2408 else
2409 return fold_build2 (compcode_to_comparison (compcode),
2410 truth_type, ll_arg, lr_arg);
2413 /* Return nonzero if CODE is a tree code that represents a truth value. */
2415 static int
2416 truth_value_p (enum tree_code code)
2418 return (TREE_CODE_CLASS (code) == tcc_comparison
2419 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2420 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2421 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2424 /* Return nonzero if two operands (typically of the same tree node)
2425 are necessarily equal. If either argument has side-effects this
2426 function returns zero. FLAGS modifies behavior as follows:
2428 If OEP_ONLY_CONST is set, only return nonzero for constants.
2429 This function tests whether the operands are indistinguishable;
2430 it does not test whether they are equal using C's == operation.
2431 The distinction is important for IEEE floating point, because
2432 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2433 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2435 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2436 even though it may hold multiple values during a function.
2437 This is because a GCC tree node guarantees that nothing else is
2438 executed between the evaluation of its "operands" (which may often
2439 be evaluated in arbitrary order). Hence if the operands themselves
2440 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2441 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2442 unset means assuming isochronic (or instantaneous) tree equivalence.
2443 Unless comparing arbitrary expression trees, such as from different
2444 statements, this flag can usually be left unset.
2446 If OEP_PURE_SAME is set, then pure functions with identical arguments
2447 are considered the same. It is used when the caller has other ways
2448 to ensure that global memory is unchanged in between. */
2451 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2453 /* If either is ERROR_MARK, they aren't equal. */
2454 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2455 return 0;
2457 /* If both types don't have the same signedness, then we can't consider
2458 them equal. We must check this before the STRIP_NOPS calls
2459 because they may change the signedness of the arguments. */
2460 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2461 return 0;
2463 STRIP_NOPS (arg0);
2464 STRIP_NOPS (arg1);
2466 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2467 /* This is needed for conversions and for COMPONENT_REF.
2468 Might as well play it safe and always test this. */
2469 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2470 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2471 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2472 return 0;
2474 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2475 We don't care about side effects in that case because the SAVE_EXPR
2476 takes care of that for us. In all other cases, two expressions are
2477 equal if they have no side effects. If we have two identical
2478 expressions with side effects that should be treated the same due
2479 to the only side effects being identical SAVE_EXPR's, that will
2480 be detected in the recursive calls below. */
2481 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2482 && (TREE_CODE (arg0) == SAVE_EXPR
2483 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2484 return 1;
2486 /* Next handle constant cases, those for which we can return 1 even
2487 if ONLY_CONST is set. */
2488 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2489 switch (TREE_CODE (arg0))
2491 case INTEGER_CST:
2492 return (! TREE_CONSTANT_OVERFLOW (arg0)
2493 && ! TREE_CONSTANT_OVERFLOW (arg1)
2494 && tree_int_cst_equal (arg0, arg1));
2496 case REAL_CST:
2497 return (! TREE_CONSTANT_OVERFLOW (arg0)
2498 && ! TREE_CONSTANT_OVERFLOW (arg1)
2499 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2500 TREE_REAL_CST (arg1)));
2502 case VECTOR_CST:
2504 tree v1, v2;
2506 if (TREE_CONSTANT_OVERFLOW (arg0)
2507 || TREE_CONSTANT_OVERFLOW (arg1))
2508 return 0;
2510 v1 = TREE_VECTOR_CST_ELTS (arg0);
2511 v2 = TREE_VECTOR_CST_ELTS (arg1);
2512 while (v1 && v2)
2514 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2515 flags))
2516 return 0;
2517 v1 = TREE_CHAIN (v1);
2518 v2 = TREE_CHAIN (v2);
2521 return v1 == v2;
2524 case COMPLEX_CST:
2525 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2526 flags)
2527 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2528 flags));
2530 case STRING_CST:
2531 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2532 && ! memcmp (TREE_STRING_POINTER (arg0),
2533 TREE_STRING_POINTER (arg1),
2534 TREE_STRING_LENGTH (arg0)));
2536 case ADDR_EXPR:
2537 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2539 default:
2540 break;
2543 if (flags & OEP_ONLY_CONST)
2544 return 0;
2546 /* Define macros to test an operand from arg0 and arg1 for equality and a
2547 variant that allows null and views null as being different from any
2548 non-null value. In the latter case, if either is null, the both
2549 must be; otherwise, do the normal comparison. */
2550 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2551 TREE_OPERAND (arg1, N), flags)
2553 #define OP_SAME_WITH_NULL(N) \
2554 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2555 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2557 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2559 case tcc_unary:
2560 /* Two conversions are equal only if signedness and modes match. */
2561 switch (TREE_CODE (arg0))
2563 case NOP_EXPR:
2564 case CONVERT_EXPR:
2565 case FIX_CEIL_EXPR:
2566 case FIX_TRUNC_EXPR:
2567 case FIX_FLOOR_EXPR:
2568 case FIX_ROUND_EXPR:
2569 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2570 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2571 return 0;
2572 break;
2573 default:
2574 break;
2577 return OP_SAME (0);
2580 case tcc_comparison:
2581 case tcc_binary:
2582 if (OP_SAME (0) && OP_SAME (1))
2583 return 1;
2585 /* For commutative ops, allow the other order. */
2586 return (commutative_tree_code (TREE_CODE (arg0))
2587 && operand_equal_p (TREE_OPERAND (arg0, 0),
2588 TREE_OPERAND (arg1, 1), flags)
2589 && operand_equal_p (TREE_OPERAND (arg0, 1),
2590 TREE_OPERAND (arg1, 0), flags));
2592 case tcc_reference:
2593 /* If either of the pointer (or reference) expressions we are
2594 dereferencing contain a side effect, these cannot be equal. */
2595 if (TREE_SIDE_EFFECTS (arg0)
2596 || TREE_SIDE_EFFECTS (arg1))
2597 return 0;
2599 switch (TREE_CODE (arg0))
2601 case INDIRECT_REF:
2602 case ALIGN_INDIRECT_REF:
2603 case MISALIGNED_INDIRECT_REF:
2604 case REALPART_EXPR:
2605 case IMAGPART_EXPR:
2606 return OP_SAME (0);
2608 case ARRAY_REF:
2609 case ARRAY_RANGE_REF:
2610 /* Operands 2 and 3 may be null. */
2611 return (OP_SAME (0)
2612 && OP_SAME (1)
2613 && OP_SAME_WITH_NULL (2)
2614 && OP_SAME_WITH_NULL (3));
2616 case COMPONENT_REF:
2617 /* Handle operand 2 the same as for ARRAY_REF. */
2618 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2620 case BIT_FIELD_REF:
2621 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2623 default:
2624 return 0;
2627 case tcc_expression:
2628 switch (TREE_CODE (arg0))
2630 case ADDR_EXPR:
2631 case TRUTH_NOT_EXPR:
2632 return OP_SAME (0);
2634 case TRUTH_ANDIF_EXPR:
2635 case TRUTH_ORIF_EXPR:
2636 return OP_SAME (0) && OP_SAME (1);
2638 case TRUTH_AND_EXPR:
2639 case TRUTH_OR_EXPR:
2640 case TRUTH_XOR_EXPR:
2641 if (OP_SAME (0) && OP_SAME (1))
2642 return 1;
2644 /* Otherwise take into account this is a commutative operation. */
2645 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2646 TREE_OPERAND (arg1, 1), flags)
2647 && operand_equal_p (TREE_OPERAND (arg0, 1),
2648 TREE_OPERAND (arg1, 0), flags));
2650 case CALL_EXPR:
2651 /* If the CALL_EXPRs call different functions, then they
2652 clearly can not be equal. */
2653 if (!OP_SAME (0))
2654 return 0;
2657 unsigned int cef = call_expr_flags (arg0);
2658 if (flags & OEP_PURE_SAME)
2659 cef &= ECF_CONST | ECF_PURE;
2660 else
2661 cef &= ECF_CONST;
2662 if (!cef)
2663 return 0;
2666 /* Now see if all the arguments are the same. operand_equal_p
2667 does not handle TREE_LIST, so we walk the operands here
2668 feeding them to operand_equal_p. */
2669 arg0 = TREE_OPERAND (arg0, 1);
2670 arg1 = TREE_OPERAND (arg1, 1);
2671 while (arg0 && arg1)
2673 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2674 flags))
2675 return 0;
2677 arg0 = TREE_CHAIN (arg0);
2678 arg1 = TREE_CHAIN (arg1);
2681 /* If we get here and both argument lists are exhausted
2682 then the CALL_EXPRs are equal. */
2683 return ! (arg0 || arg1);
2685 default:
2686 return 0;
2689 case tcc_declaration:
2690 /* Consider __builtin_sqrt equal to sqrt. */
2691 return (TREE_CODE (arg0) == FUNCTION_DECL
2692 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2693 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2694 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2696 default:
2697 return 0;
2700 #undef OP_SAME
2701 #undef OP_SAME_WITH_NULL
2704 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2705 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2707 When in doubt, return 0. */
2709 static int
2710 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2712 int unsignedp1, unsignedpo;
2713 tree primarg0, primarg1, primother;
2714 unsigned int correct_width;
2716 if (operand_equal_p (arg0, arg1, 0))
2717 return 1;
2719 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2720 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2721 return 0;
2723 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2724 and see if the inner values are the same. This removes any
2725 signedness comparison, which doesn't matter here. */
2726 primarg0 = arg0, primarg1 = arg1;
2727 STRIP_NOPS (primarg0);
2728 STRIP_NOPS (primarg1);
2729 if (operand_equal_p (primarg0, primarg1, 0))
2730 return 1;
2732 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2733 actual comparison operand, ARG0.
2735 First throw away any conversions to wider types
2736 already present in the operands. */
2738 primarg1 = get_narrower (arg1, &unsignedp1);
2739 primother = get_narrower (other, &unsignedpo);
2741 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2742 if (unsignedp1 == unsignedpo
2743 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2744 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2746 tree type = TREE_TYPE (arg0);
2748 /* Make sure shorter operand is extended the right way
2749 to match the longer operand. */
2750 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2751 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2753 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2754 return 1;
2757 return 0;
2760 /* See if ARG is an expression that is either a comparison or is performing
2761 arithmetic on comparisons. The comparisons must only be comparing
2762 two different values, which will be stored in *CVAL1 and *CVAL2; if
2763 they are nonzero it means that some operands have already been found.
2764 No variables may be used anywhere else in the expression except in the
2765 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2766 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2768 If this is true, return 1. Otherwise, return zero. */
2770 static int
2771 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2773 enum tree_code code = TREE_CODE (arg);
2774 enum tree_code_class class = TREE_CODE_CLASS (code);
2776 /* We can handle some of the tcc_expression cases here. */
2777 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2778 class = tcc_unary;
2779 else if (class == tcc_expression
2780 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2781 || code == COMPOUND_EXPR))
2782 class = tcc_binary;
2784 else if (class == tcc_expression && code == SAVE_EXPR
2785 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2787 /* If we've already found a CVAL1 or CVAL2, this expression is
2788 two complex to handle. */
2789 if (*cval1 || *cval2)
2790 return 0;
2792 class = tcc_unary;
2793 *save_p = 1;
2796 switch (class)
2798 case tcc_unary:
2799 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2801 case tcc_binary:
2802 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2803 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2804 cval1, cval2, save_p));
2806 case tcc_constant:
2807 return 1;
2809 case tcc_expression:
2810 if (code == COND_EXPR)
2811 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2812 cval1, cval2, save_p)
2813 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2814 cval1, cval2, save_p)
2815 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2816 cval1, cval2, save_p));
2817 return 0;
2819 case tcc_comparison:
2820 /* First see if we can handle the first operand, then the second. For
2821 the second operand, we know *CVAL1 can't be zero. It must be that
2822 one side of the comparison is each of the values; test for the
2823 case where this isn't true by failing if the two operands
2824 are the same. */
2826 if (operand_equal_p (TREE_OPERAND (arg, 0),
2827 TREE_OPERAND (arg, 1), 0))
2828 return 0;
2830 if (*cval1 == 0)
2831 *cval1 = TREE_OPERAND (arg, 0);
2832 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2834 else if (*cval2 == 0)
2835 *cval2 = TREE_OPERAND (arg, 0);
2836 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2838 else
2839 return 0;
2841 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2843 else if (*cval2 == 0)
2844 *cval2 = TREE_OPERAND (arg, 1);
2845 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2847 else
2848 return 0;
2850 return 1;
2852 default:
2853 return 0;
2857 /* ARG is a tree that is known to contain just arithmetic operations and
2858 comparisons. Evaluate the operations in the tree substituting NEW0 for
2859 any occurrence of OLD0 as an operand of a comparison and likewise for
2860 NEW1 and OLD1. */
2862 static tree
2863 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2865 tree type = TREE_TYPE (arg);
2866 enum tree_code code = TREE_CODE (arg);
2867 enum tree_code_class class = TREE_CODE_CLASS (code);
2869 /* We can handle some of the tcc_expression cases here. */
2870 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2871 class = tcc_unary;
2872 else if (class == tcc_expression
2873 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2874 class = tcc_binary;
2876 switch (class)
2878 case tcc_unary:
2879 return fold_build1 (code, type,
2880 eval_subst (TREE_OPERAND (arg, 0),
2881 old0, new0, old1, new1));
2883 case tcc_binary:
2884 return fold_build2 (code, type,
2885 eval_subst (TREE_OPERAND (arg, 0),
2886 old0, new0, old1, new1),
2887 eval_subst (TREE_OPERAND (arg, 1),
2888 old0, new0, old1, new1));
2890 case tcc_expression:
2891 switch (code)
2893 case SAVE_EXPR:
2894 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2896 case COMPOUND_EXPR:
2897 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2899 case COND_EXPR:
2900 return fold_build3 (code, type,
2901 eval_subst (TREE_OPERAND (arg, 0),
2902 old0, new0, old1, new1),
2903 eval_subst (TREE_OPERAND (arg, 1),
2904 old0, new0, old1, new1),
2905 eval_subst (TREE_OPERAND (arg, 2),
2906 old0, new0, old1, new1));
2907 default:
2908 break;
2910 /* Fall through - ??? */
2912 case tcc_comparison:
2914 tree arg0 = TREE_OPERAND (arg, 0);
2915 tree arg1 = TREE_OPERAND (arg, 1);
2917 /* We need to check both for exact equality and tree equality. The
2918 former will be true if the operand has a side-effect. In that
2919 case, we know the operand occurred exactly once. */
2921 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2922 arg0 = new0;
2923 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2924 arg0 = new1;
2926 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2927 arg1 = new0;
2928 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2929 arg1 = new1;
2931 return fold_build2 (code, type, arg0, arg1);
2934 default:
2935 return arg;
2939 /* Return a tree for the case when the result of an expression is RESULT
2940 converted to TYPE and OMITTED was previously an operand of the expression
2941 but is now not needed (e.g., we folded OMITTED * 0).
2943 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2944 the conversion of RESULT to TYPE. */
2946 tree
2947 omit_one_operand (tree type, tree result, tree omitted)
2949 tree t = fold_convert (type, result);
2951 if (TREE_SIDE_EFFECTS (omitted))
2952 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2954 return non_lvalue (t);
2957 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2959 static tree
2960 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2962 tree t = fold_convert (type, result);
2964 if (TREE_SIDE_EFFECTS (omitted))
2965 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2967 return pedantic_non_lvalue (t);
2970 /* Return a tree for the case when the result of an expression is RESULT
2971 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2972 of the expression but are now not needed.
2974 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2975 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2976 evaluated before OMITTED2. Otherwise, if neither has side effects,
2977 just do the conversion of RESULT to TYPE. */
2979 tree
2980 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2982 tree t = fold_convert (type, result);
2984 if (TREE_SIDE_EFFECTS (omitted2))
2985 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2986 if (TREE_SIDE_EFFECTS (omitted1))
2987 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2989 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2993 /* Return a simplified tree node for the truth-negation of ARG. This
2994 never alters ARG itself. We assume that ARG is an operation that
2995 returns a truth value (0 or 1).
2997 FIXME: one would think we would fold the result, but it causes
2998 problems with the dominator optimizer. */
2999 tree
3000 invert_truthvalue (tree arg)
3002 tree type = TREE_TYPE (arg);
3003 enum tree_code code = TREE_CODE (arg);
3005 if (code == ERROR_MARK)
3006 return arg;
3008 /* If this is a comparison, we can simply invert it, except for
3009 floating-point non-equality comparisons, in which case we just
3010 enclose a TRUTH_NOT_EXPR around what we have. */
3012 if (TREE_CODE_CLASS (code) == tcc_comparison)
3014 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3015 if (FLOAT_TYPE_P (op_type)
3016 && flag_trapping_math
3017 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3018 && code != NE_EXPR && code != EQ_EXPR)
3019 return build1 (TRUTH_NOT_EXPR, type, arg);
3020 else
3022 code = invert_tree_comparison (code,
3023 HONOR_NANS (TYPE_MODE (op_type)));
3024 if (code == ERROR_MARK)
3025 return build1 (TRUTH_NOT_EXPR, type, arg);
3026 else
3027 return build2 (code, type,
3028 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3032 switch (code)
3034 case INTEGER_CST:
3035 return constant_boolean_node (integer_zerop (arg), type);
3037 case TRUTH_AND_EXPR:
3038 return build2 (TRUTH_OR_EXPR, type,
3039 invert_truthvalue (TREE_OPERAND (arg, 0)),
3040 invert_truthvalue (TREE_OPERAND (arg, 1)));
3042 case TRUTH_OR_EXPR:
3043 return build2 (TRUTH_AND_EXPR, type,
3044 invert_truthvalue (TREE_OPERAND (arg, 0)),
3045 invert_truthvalue (TREE_OPERAND (arg, 1)));
3047 case TRUTH_XOR_EXPR:
3048 /* Here we can invert either operand. We invert the first operand
3049 unless the second operand is a TRUTH_NOT_EXPR in which case our
3050 result is the XOR of the first operand with the inside of the
3051 negation of the second operand. */
3053 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3054 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3055 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3056 else
3057 return build2 (TRUTH_XOR_EXPR, type,
3058 invert_truthvalue (TREE_OPERAND (arg, 0)),
3059 TREE_OPERAND (arg, 1));
3061 case TRUTH_ANDIF_EXPR:
3062 return build2 (TRUTH_ORIF_EXPR, type,
3063 invert_truthvalue (TREE_OPERAND (arg, 0)),
3064 invert_truthvalue (TREE_OPERAND (arg, 1)));
3066 case TRUTH_ORIF_EXPR:
3067 return build2 (TRUTH_ANDIF_EXPR, type,
3068 invert_truthvalue (TREE_OPERAND (arg, 0)),
3069 invert_truthvalue (TREE_OPERAND (arg, 1)));
3071 case TRUTH_NOT_EXPR:
3072 return TREE_OPERAND (arg, 0);
3074 case COND_EXPR:
3076 tree arg1 = TREE_OPERAND (arg, 1);
3077 tree arg2 = TREE_OPERAND (arg, 2);
3078 /* A COND_EXPR may have a throw as one operand, which
3079 then has void type. Just leave void operands
3080 as they are. */
3081 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3082 VOID_TYPE_P (TREE_TYPE (arg1))
3083 ? arg1 : invert_truthvalue (arg1),
3084 VOID_TYPE_P (TREE_TYPE (arg2))
3085 ? arg2 : invert_truthvalue (arg2));
3088 case COMPOUND_EXPR:
3089 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3090 invert_truthvalue (TREE_OPERAND (arg, 1)));
3092 case NON_LVALUE_EXPR:
3093 return invert_truthvalue (TREE_OPERAND (arg, 0));
3095 case NOP_EXPR:
3096 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3097 break;
3099 case CONVERT_EXPR:
3100 case FLOAT_EXPR:
3101 return build1 (TREE_CODE (arg), type,
3102 invert_truthvalue (TREE_OPERAND (arg, 0)));
3104 case BIT_AND_EXPR:
3105 if (!integer_onep (TREE_OPERAND (arg, 1)))
3106 break;
3107 return build2 (EQ_EXPR, type, arg,
3108 fold_convert (type, integer_zero_node));
3110 case SAVE_EXPR:
3111 return build1 (TRUTH_NOT_EXPR, type, arg);
3113 case CLEANUP_POINT_EXPR:
3114 return build1 (CLEANUP_POINT_EXPR, type,
3115 invert_truthvalue (TREE_OPERAND (arg, 0)));
3117 default:
3118 break;
3120 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3121 return build1 (TRUTH_NOT_EXPR, type, arg);
3124 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3125 operands are another bit-wise operation with a common input. If so,
3126 distribute the bit operations to save an operation and possibly two if
3127 constants are involved. For example, convert
3128 (A | B) & (A | C) into A | (B & C)
3129 Further simplification will occur if B and C are constants.
3131 If this optimization cannot be done, 0 will be returned. */
3133 static tree
3134 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3136 tree common;
3137 tree left, right;
3139 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3140 || TREE_CODE (arg0) == code
3141 || (TREE_CODE (arg0) != BIT_AND_EXPR
3142 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3143 return 0;
3145 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3147 common = TREE_OPERAND (arg0, 0);
3148 left = TREE_OPERAND (arg0, 1);
3149 right = TREE_OPERAND (arg1, 1);
3151 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3153 common = TREE_OPERAND (arg0, 0);
3154 left = TREE_OPERAND (arg0, 1);
3155 right = TREE_OPERAND (arg1, 0);
3157 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3159 common = TREE_OPERAND (arg0, 1);
3160 left = TREE_OPERAND (arg0, 0);
3161 right = TREE_OPERAND (arg1, 1);
3163 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3165 common = TREE_OPERAND (arg0, 1);
3166 left = TREE_OPERAND (arg0, 0);
3167 right = TREE_OPERAND (arg1, 0);
3169 else
3170 return 0;
3172 return fold_build2 (TREE_CODE (arg0), type, common,
3173 fold_build2 (code, type, left, right));
3176 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3177 with code CODE. This optimization is unsafe. */
3178 static tree
3179 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3181 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3182 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3184 /* (A / C) +- (B / C) -> (A +- B) / C. */
3185 if (mul0 == mul1
3186 && operand_equal_p (TREE_OPERAND (arg0, 1),
3187 TREE_OPERAND (arg1, 1), 0))
3188 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3189 fold_build2 (code, type,
3190 TREE_OPERAND (arg0, 0),
3191 TREE_OPERAND (arg1, 0)),
3192 TREE_OPERAND (arg0, 1));
3194 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3195 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3196 TREE_OPERAND (arg1, 0), 0)
3197 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3198 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3200 REAL_VALUE_TYPE r0, r1;
3201 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3202 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3203 if (!mul0)
3204 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3205 if (!mul1)
3206 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3207 real_arithmetic (&r0, code, &r0, &r1);
3208 return fold_build2 (MULT_EXPR, type,
3209 TREE_OPERAND (arg0, 0),
3210 build_real (type, r0));
3213 return NULL_TREE;
3216 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3217 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3219 static tree
3220 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3221 int unsignedp)
3223 tree result;
3225 if (bitpos == 0)
3227 tree size = TYPE_SIZE (TREE_TYPE (inner));
3228 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3229 || POINTER_TYPE_P (TREE_TYPE (inner)))
3230 && host_integerp (size, 0)
3231 && tree_low_cst (size, 0) == bitsize)
3232 return fold_convert (type, inner);
3235 result = build3 (BIT_FIELD_REF, type, inner,
3236 size_int (bitsize), bitsize_int (bitpos));
3238 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3240 return result;
3243 /* Optimize a bit-field compare.
3245 There are two cases: First is a compare against a constant and the
3246 second is a comparison of two items where the fields are at the same
3247 bit position relative to the start of a chunk (byte, halfword, word)
3248 large enough to contain it. In these cases we can avoid the shift
3249 implicit in bitfield extractions.
3251 For constants, we emit a compare of the shifted constant with the
3252 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3253 compared. For two fields at the same position, we do the ANDs with the
3254 similar mask and compare the result of the ANDs.
3256 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3257 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3258 are the left and right operands of the comparison, respectively.
3260 If the optimization described above can be done, we return the resulting
3261 tree. Otherwise we return zero. */
3263 static tree
3264 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3265 tree lhs, tree rhs)
3267 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3268 tree type = TREE_TYPE (lhs);
3269 tree signed_type, unsigned_type;
3270 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3271 enum machine_mode lmode, rmode, nmode;
3272 int lunsignedp, runsignedp;
3273 int lvolatilep = 0, rvolatilep = 0;
3274 tree linner, rinner = NULL_TREE;
3275 tree mask;
3276 tree offset;
3278 /* Get all the information about the extractions being done. If the bit size
3279 if the same as the size of the underlying object, we aren't doing an
3280 extraction at all and so can do nothing. We also don't want to
3281 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3282 then will no longer be able to replace it. */
3283 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3284 &lunsignedp, &lvolatilep, false);
3285 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3286 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3287 return 0;
3289 if (!const_p)
3291 /* If this is not a constant, we can only do something if bit positions,
3292 sizes, and signedness are the same. */
3293 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3294 &runsignedp, &rvolatilep, false);
3296 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3297 || lunsignedp != runsignedp || offset != 0
3298 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3299 return 0;
3302 /* See if we can find a mode to refer to this field. We should be able to,
3303 but fail if we can't. */
3304 nmode = get_best_mode (lbitsize, lbitpos,
3305 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3306 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3307 TYPE_ALIGN (TREE_TYPE (rinner))),
3308 word_mode, lvolatilep || rvolatilep);
3309 if (nmode == VOIDmode)
3310 return 0;
3312 /* Set signed and unsigned types of the precision of this mode for the
3313 shifts below. */
3314 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3315 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3317 /* Compute the bit position and size for the new reference and our offset
3318 within it. If the new reference is the same size as the original, we
3319 won't optimize anything, so return zero. */
3320 nbitsize = GET_MODE_BITSIZE (nmode);
3321 nbitpos = lbitpos & ~ (nbitsize - 1);
3322 lbitpos -= nbitpos;
3323 if (nbitsize == lbitsize)
3324 return 0;
3326 if (BYTES_BIG_ENDIAN)
3327 lbitpos = nbitsize - lbitsize - lbitpos;
3329 /* Make the mask to be used against the extracted field. */
3330 mask = build_int_cst (unsigned_type, -1);
3331 mask = force_fit_type (mask, 0, false, false);
3332 mask = fold_convert (unsigned_type, mask);
3333 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3334 mask = const_binop (RSHIFT_EXPR, mask,
3335 size_int (nbitsize - lbitsize - lbitpos), 0);
3337 if (! const_p)
3338 /* If not comparing with constant, just rework the comparison
3339 and return. */
3340 return build2 (code, compare_type,
3341 build2 (BIT_AND_EXPR, unsigned_type,
3342 make_bit_field_ref (linner, unsigned_type,
3343 nbitsize, nbitpos, 1),
3344 mask),
3345 build2 (BIT_AND_EXPR, unsigned_type,
3346 make_bit_field_ref (rinner, unsigned_type,
3347 nbitsize, nbitpos, 1),
3348 mask));
3350 /* Otherwise, we are handling the constant case. See if the constant is too
3351 big for the field. Warn and return a tree of for 0 (false) if so. We do
3352 this not only for its own sake, but to avoid having to test for this
3353 error case below. If we didn't, we might generate wrong code.
3355 For unsigned fields, the constant shifted right by the field length should
3356 be all zero. For signed fields, the high-order bits should agree with
3357 the sign bit. */
3359 if (lunsignedp)
3361 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3362 fold_convert (unsigned_type, rhs),
3363 size_int (lbitsize), 0)))
3365 warning (0, "comparison is always %d due to width of bit-field",
3366 code == NE_EXPR);
3367 return constant_boolean_node (code == NE_EXPR, compare_type);
3370 else
3372 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3373 size_int (lbitsize - 1), 0);
3374 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3376 warning (0, "comparison is always %d due to width of bit-field",
3377 code == NE_EXPR);
3378 return constant_boolean_node (code == NE_EXPR, compare_type);
3382 /* Single-bit compares should always be against zero. */
3383 if (lbitsize == 1 && ! integer_zerop (rhs))
3385 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3386 rhs = fold_convert (type, integer_zero_node);
3389 /* Make a new bitfield reference, shift the constant over the
3390 appropriate number of bits and mask it with the computed mask
3391 (in case this was a signed field). If we changed it, make a new one. */
3392 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3393 if (lvolatilep)
3395 TREE_SIDE_EFFECTS (lhs) = 1;
3396 TREE_THIS_VOLATILE (lhs) = 1;
3399 rhs = const_binop (BIT_AND_EXPR,
3400 const_binop (LSHIFT_EXPR,
3401 fold_convert (unsigned_type, rhs),
3402 size_int (lbitpos), 0),
3403 mask, 0);
3405 return build2 (code, compare_type,
3406 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3407 rhs);
3410 /* Subroutine for fold_truthop: decode a field reference.
3412 If EXP is a comparison reference, we return the innermost reference.
3414 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3415 set to the starting bit number.
3417 If the innermost field can be completely contained in a mode-sized
3418 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3420 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3421 otherwise it is not changed.
3423 *PUNSIGNEDP is set to the signedness of the field.
3425 *PMASK is set to the mask used. This is either contained in a
3426 BIT_AND_EXPR or derived from the width of the field.
3428 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3430 Return 0 if this is not a component reference or is one that we can't
3431 do anything with. */
3433 static tree
3434 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3435 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3436 int *punsignedp, int *pvolatilep,
3437 tree *pmask, tree *pand_mask)
3439 tree outer_type = 0;
3440 tree and_mask = 0;
3441 tree mask, inner, offset;
3442 tree unsigned_type;
3443 unsigned int precision;
3445 /* All the optimizations using this function assume integer fields.
3446 There are problems with FP fields since the type_for_size call
3447 below can fail for, e.g., XFmode. */
3448 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3449 return 0;
3451 /* We are interested in the bare arrangement of bits, so strip everything
3452 that doesn't affect the machine mode. However, record the type of the
3453 outermost expression if it may matter below. */
3454 if (TREE_CODE (exp) == NOP_EXPR
3455 || TREE_CODE (exp) == CONVERT_EXPR
3456 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3457 outer_type = TREE_TYPE (exp);
3458 STRIP_NOPS (exp);
3460 if (TREE_CODE (exp) == BIT_AND_EXPR)
3462 and_mask = TREE_OPERAND (exp, 1);
3463 exp = TREE_OPERAND (exp, 0);
3464 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3465 if (TREE_CODE (and_mask) != INTEGER_CST)
3466 return 0;
3469 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3470 punsignedp, pvolatilep, false);
3471 if ((inner == exp && and_mask == 0)
3472 || *pbitsize < 0 || offset != 0
3473 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3474 return 0;
3476 /* If the number of bits in the reference is the same as the bitsize of
3477 the outer type, then the outer type gives the signedness. Otherwise
3478 (in case of a small bitfield) the signedness is unchanged. */
3479 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3480 *punsignedp = TYPE_UNSIGNED (outer_type);
3482 /* Compute the mask to access the bitfield. */
3483 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3484 precision = TYPE_PRECISION (unsigned_type);
3486 mask = build_int_cst (unsigned_type, -1);
3487 mask = force_fit_type (mask, 0, false, false);
3489 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3490 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3492 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3493 if (and_mask != 0)
3494 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3495 fold_convert (unsigned_type, and_mask), mask);
3497 *pmask = mask;
3498 *pand_mask = and_mask;
3499 return inner;
3502 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3503 bit positions. */
3505 static int
3506 all_ones_mask_p (tree mask, int size)
3508 tree type = TREE_TYPE (mask);
3509 unsigned int precision = TYPE_PRECISION (type);
3510 tree tmask;
3512 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3513 tmask = force_fit_type (tmask, 0, false, false);
3515 return
3516 tree_int_cst_equal (mask,
3517 const_binop (RSHIFT_EXPR,
3518 const_binop (LSHIFT_EXPR, tmask,
3519 size_int (precision - size),
3521 size_int (precision - size), 0));
3524 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3525 represents the sign bit of EXP's type. If EXP represents a sign
3526 or zero extension, also test VAL against the unextended type.
3527 The return value is the (sub)expression whose sign bit is VAL,
3528 or NULL_TREE otherwise. */
3530 static tree
3531 sign_bit_p (tree exp, tree val)
3533 unsigned HOST_WIDE_INT mask_lo, lo;
3534 HOST_WIDE_INT mask_hi, hi;
3535 int width;
3536 tree t;
3538 /* Tree EXP must have an integral type. */
3539 t = TREE_TYPE (exp);
3540 if (! INTEGRAL_TYPE_P (t))
3541 return NULL_TREE;
3543 /* Tree VAL must be an integer constant. */
3544 if (TREE_CODE (val) != INTEGER_CST
3545 || TREE_CONSTANT_OVERFLOW (val))
3546 return NULL_TREE;
3548 width = TYPE_PRECISION (t);
3549 if (width > HOST_BITS_PER_WIDE_INT)
3551 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3552 lo = 0;
3554 mask_hi = ((unsigned HOST_WIDE_INT) -1
3555 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3556 mask_lo = -1;
3558 else
3560 hi = 0;
3561 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3563 mask_hi = 0;
3564 mask_lo = ((unsigned HOST_WIDE_INT) -1
3565 >> (HOST_BITS_PER_WIDE_INT - width));
3568 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3569 treat VAL as if it were unsigned. */
3570 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3571 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3572 return exp;
3574 /* Handle extension from a narrower type. */
3575 if (TREE_CODE (exp) == NOP_EXPR
3576 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3577 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3579 return NULL_TREE;
3582 /* Subroutine for fold_truthop: determine if an operand is simple enough
3583 to be evaluated unconditionally. */
3585 static int
3586 simple_operand_p (tree exp)
3588 /* Strip any conversions that don't change the machine mode. */
3589 STRIP_NOPS (exp);
3591 return (CONSTANT_CLASS_P (exp)
3592 || TREE_CODE (exp) == SSA_NAME
3593 || (DECL_P (exp)
3594 && ! TREE_ADDRESSABLE (exp)
3595 && ! TREE_THIS_VOLATILE (exp)
3596 && ! DECL_NONLOCAL (exp)
3597 /* Don't regard global variables as simple. They may be
3598 allocated in ways unknown to the compiler (shared memory,
3599 #pragma weak, etc). */
3600 && ! TREE_PUBLIC (exp)
3601 && ! DECL_EXTERNAL (exp)
3602 /* Loading a static variable is unduly expensive, but global
3603 registers aren't expensive. */
3604 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3607 /* The following functions are subroutines to fold_range_test and allow it to
3608 try to change a logical combination of comparisons into a range test.
3610 For example, both
3611 X == 2 || X == 3 || X == 4 || X == 5
3613 X >= 2 && X <= 5
3614 are converted to
3615 (unsigned) (X - 2) <= 3
3617 We describe each set of comparisons as being either inside or outside
3618 a range, using a variable named like IN_P, and then describe the
3619 range with a lower and upper bound. If one of the bounds is omitted,
3620 it represents either the highest or lowest value of the type.
3622 In the comments below, we represent a range by two numbers in brackets
3623 preceded by a "+" to designate being inside that range, or a "-" to
3624 designate being outside that range, so the condition can be inverted by
3625 flipping the prefix. An omitted bound is represented by a "-". For
3626 example, "- [-, 10]" means being outside the range starting at the lowest
3627 possible value and ending at 10, in other words, being greater than 10.
3628 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3629 always false.
3631 We set up things so that the missing bounds are handled in a consistent
3632 manner so neither a missing bound nor "true" and "false" need to be
3633 handled using a special case. */
3635 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3636 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3637 and UPPER1_P are nonzero if the respective argument is an upper bound
3638 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3639 must be specified for a comparison. ARG1 will be converted to ARG0's
3640 type if both are specified. */
3642 static tree
3643 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3644 tree arg1, int upper1_p)
3646 tree tem;
3647 int result;
3648 int sgn0, sgn1;
3650 /* If neither arg represents infinity, do the normal operation.
3651 Else, if not a comparison, return infinity. Else handle the special
3652 comparison rules. Note that most of the cases below won't occur, but
3653 are handled for consistency. */
3655 if (arg0 != 0 && arg1 != 0)
3657 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3658 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3659 STRIP_NOPS (tem);
3660 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3663 if (TREE_CODE_CLASS (code) != tcc_comparison)
3664 return 0;
3666 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3667 for neither. In real maths, we cannot assume open ended ranges are
3668 the same. But, this is computer arithmetic, where numbers are finite.
3669 We can therefore make the transformation of any unbounded range with
3670 the value Z, Z being greater than any representable number. This permits
3671 us to treat unbounded ranges as equal. */
3672 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3673 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3674 switch (code)
3676 case EQ_EXPR:
3677 result = sgn0 == sgn1;
3678 break;
3679 case NE_EXPR:
3680 result = sgn0 != sgn1;
3681 break;
3682 case LT_EXPR:
3683 result = sgn0 < sgn1;
3684 break;
3685 case LE_EXPR:
3686 result = sgn0 <= sgn1;
3687 break;
3688 case GT_EXPR:
3689 result = sgn0 > sgn1;
3690 break;
3691 case GE_EXPR:
3692 result = sgn0 >= sgn1;
3693 break;
3694 default:
3695 gcc_unreachable ();
3698 return constant_boolean_node (result, type);
3701 /* Given EXP, a logical expression, set the range it is testing into
3702 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3703 actually being tested. *PLOW and *PHIGH will be made of the same type
3704 as the returned expression. If EXP is not a comparison, we will most
3705 likely not be returning a useful value and range. */
3707 static tree
3708 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3710 enum tree_code code;
3711 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3712 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3713 int in_p, n_in_p;
3714 tree low, high, n_low, n_high;
3716 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3717 and see if we can refine the range. Some of the cases below may not
3718 happen, but it doesn't seem worth worrying about this. We "continue"
3719 the outer loop when we've changed something; otherwise we "break"
3720 the switch, which will "break" the while. */
3722 in_p = 0;
3723 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3725 while (1)
3727 code = TREE_CODE (exp);
3728 exp_type = TREE_TYPE (exp);
3730 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3732 if (TREE_CODE_LENGTH (code) > 0)
3733 arg0 = TREE_OPERAND (exp, 0);
3734 if (TREE_CODE_CLASS (code) == tcc_comparison
3735 || TREE_CODE_CLASS (code) == tcc_unary
3736 || TREE_CODE_CLASS (code) == tcc_binary)
3737 arg0_type = TREE_TYPE (arg0);
3738 if (TREE_CODE_CLASS (code) == tcc_binary
3739 || TREE_CODE_CLASS (code) == tcc_comparison
3740 || (TREE_CODE_CLASS (code) == tcc_expression
3741 && TREE_CODE_LENGTH (code) > 1))
3742 arg1 = TREE_OPERAND (exp, 1);
3745 switch (code)
3747 case TRUTH_NOT_EXPR:
3748 in_p = ! in_p, exp = arg0;
3749 continue;
3751 case EQ_EXPR: case NE_EXPR:
3752 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3753 /* We can only do something if the range is testing for zero
3754 and if the second operand is an integer constant. Note that
3755 saying something is "in" the range we make is done by
3756 complementing IN_P since it will set in the initial case of
3757 being not equal to zero; "out" is leaving it alone. */
3758 if (low == 0 || high == 0
3759 || ! integer_zerop (low) || ! integer_zerop (high)
3760 || TREE_CODE (arg1) != INTEGER_CST)
3761 break;
3763 switch (code)
3765 case NE_EXPR: /* - [c, c] */
3766 low = high = arg1;
3767 break;
3768 case EQ_EXPR: /* + [c, c] */
3769 in_p = ! in_p, low = high = arg1;
3770 break;
3771 case GT_EXPR: /* - [-, c] */
3772 low = 0, high = arg1;
3773 break;
3774 case GE_EXPR: /* + [c, -] */
3775 in_p = ! in_p, low = arg1, high = 0;
3776 break;
3777 case LT_EXPR: /* - [c, -] */
3778 low = arg1, high = 0;
3779 break;
3780 case LE_EXPR: /* + [-, c] */
3781 in_p = ! in_p, low = 0, high = arg1;
3782 break;
3783 default:
3784 gcc_unreachable ();
3787 /* If this is an unsigned comparison, we also know that EXP is
3788 greater than or equal to zero. We base the range tests we make
3789 on that fact, so we record it here so we can parse existing
3790 range tests. We test arg0_type since often the return type
3791 of, e.g. EQ_EXPR, is boolean. */
3792 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3794 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3795 in_p, low, high, 1,
3796 fold_convert (arg0_type, integer_zero_node),
3797 NULL_TREE))
3798 break;
3800 in_p = n_in_p, low = n_low, high = n_high;
3802 /* If the high bound is missing, but we have a nonzero low
3803 bound, reverse the range so it goes from zero to the low bound
3804 minus 1. */
3805 if (high == 0 && low && ! integer_zerop (low))
3807 in_p = ! in_p;
3808 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3809 integer_one_node, 0);
3810 low = fold_convert (arg0_type, integer_zero_node);
3814 exp = arg0;
3815 continue;
3817 case NEGATE_EXPR:
3818 /* (-x) IN [a,b] -> x in [-b, -a] */
3819 n_low = range_binop (MINUS_EXPR, exp_type,
3820 fold_convert (exp_type, integer_zero_node),
3821 0, high, 1);
3822 n_high = range_binop (MINUS_EXPR, exp_type,
3823 fold_convert (exp_type, integer_zero_node),
3824 0, low, 0);
3825 low = n_low, high = n_high;
3826 exp = arg0;
3827 continue;
3829 case BIT_NOT_EXPR:
3830 /* ~ X -> -X - 1 */
3831 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3832 fold_convert (exp_type, integer_one_node));
3833 continue;
3835 case PLUS_EXPR: case MINUS_EXPR:
3836 if (TREE_CODE (arg1) != INTEGER_CST)
3837 break;
3839 /* If EXP is signed, any overflow in the computation is undefined,
3840 so we don't worry about it so long as our computations on
3841 the bounds don't overflow. For unsigned, overflow is defined
3842 and this is exactly the right thing. */
3843 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3844 arg0_type, low, 0, arg1, 0);
3845 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3846 arg0_type, high, 1, arg1, 0);
3847 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3848 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3849 break;
3851 /* Check for an unsigned range which has wrapped around the maximum
3852 value thus making n_high < n_low, and normalize it. */
3853 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3855 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3856 integer_one_node, 0);
3857 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3858 integer_one_node, 0);
3860 /* If the range is of the form +/- [ x+1, x ], we won't
3861 be able to normalize it. But then, it represents the
3862 whole range or the empty set, so make it
3863 +/- [ -, - ]. */
3864 if (tree_int_cst_equal (n_low, low)
3865 && tree_int_cst_equal (n_high, high))
3866 low = high = 0;
3867 else
3868 in_p = ! in_p;
3870 else
3871 low = n_low, high = n_high;
3873 exp = arg0;
3874 continue;
3876 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3877 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3878 break;
3880 if (! INTEGRAL_TYPE_P (arg0_type)
3881 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3882 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3883 break;
3885 n_low = low, n_high = high;
3887 if (n_low != 0)
3888 n_low = fold_convert (arg0_type, n_low);
3890 if (n_high != 0)
3891 n_high = fold_convert (arg0_type, n_high);
3894 /* If we're converting arg0 from an unsigned type, to exp,
3895 a signed type, we will be doing the comparison as unsigned.
3896 The tests above have already verified that LOW and HIGH
3897 are both positive.
3899 So we have to ensure that we will handle large unsigned
3900 values the same way that the current signed bounds treat
3901 negative values. */
3903 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3905 tree high_positive;
3906 tree equiv_type = lang_hooks.types.type_for_mode
3907 (TYPE_MODE (arg0_type), 1);
3909 /* A range without an upper bound is, naturally, unbounded.
3910 Since convert would have cropped a very large value, use
3911 the max value for the destination type. */
3912 high_positive
3913 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3914 : TYPE_MAX_VALUE (arg0_type);
3916 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3917 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3918 fold_convert (arg0_type,
3919 high_positive),
3920 fold_convert (arg0_type,
3921 integer_one_node));
3923 /* If the low bound is specified, "and" the range with the
3924 range for which the original unsigned value will be
3925 positive. */
3926 if (low != 0)
3928 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3929 1, n_low, n_high, 1,
3930 fold_convert (arg0_type,
3931 integer_zero_node),
3932 high_positive))
3933 break;
3935 in_p = (n_in_p == in_p);
3937 else
3939 /* Otherwise, "or" the range with the range of the input
3940 that will be interpreted as negative. */
3941 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3942 0, n_low, n_high, 1,
3943 fold_convert (arg0_type,
3944 integer_zero_node),
3945 high_positive))
3946 break;
3948 in_p = (in_p != n_in_p);
3952 exp = arg0;
3953 low = n_low, high = n_high;
3954 continue;
3956 default:
3957 break;
3960 break;
3963 /* If EXP is a constant, we can evaluate whether this is true or false. */
3964 if (TREE_CODE (exp) == INTEGER_CST)
3966 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3967 exp, 0, low, 0))
3968 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3969 exp, 1, high, 1)));
3970 low = high = 0;
3971 exp = 0;
3974 *pin_p = in_p, *plow = low, *phigh = high;
3975 return exp;
3978 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3979 type, TYPE, return an expression to test if EXP is in (or out of, depending
3980 on IN_P) the range. Return 0 if the test couldn't be created. */
3982 static tree
3983 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3985 tree etype = TREE_TYPE (exp);
3986 tree value;
3988 #ifdef HAVE_canonicalize_funcptr_for_compare
3989 /* Disable this optimization for function pointer expressions
3990 on targets that require function pointer canonicalization. */
3991 if (HAVE_canonicalize_funcptr_for_compare
3992 && TREE_CODE (etype) == POINTER_TYPE
3993 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
3994 return NULL_TREE;
3995 #endif
3997 if (! in_p)
3999 value = build_range_check (type, exp, 1, low, high);
4000 if (value != 0)
4001 return invert_truthvalue (value);
4003 return 0;
4006 if (low == 0 && high == 0)
4007 return fold_convert (type, integer_one_node);
4009 if (low == 0)
4010 return fold_build2 (LE_EXPR, type, exp,
4011 fold_convert (etype, high));
4013 if (high == 0)
4014 return fold_build2 (GE_EXPR, type, exp,
4015 fold_convert (etype, low));
4017 if (operand_equal_p (low, high, 0))
4018 return fold_build2 (EQ_EXPR, type, exp,
4019 fold_convert (etype, low));
4021 if (integer_zerop (low))
4023 if (! TYPE_UNSIGNED (etype))
4025 etype = lang_hooks.types.unsigned_type (etype);
4026 high = fold_convert (etype, high);
4027 exp = fold_convert (etype, exp);
4029 return build_range_check (type, exp, 1, 0, high);
4032 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4033 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4035 unsigned HOST_WIDE_INT lo;
4036 HOST_WIDE_INT hi;
4037 int prec;
4039 prec = TYPE_PRECISION (etype);
4040 if (prec <= HOST_BITS_PER_WIDE_INT)
4042 hi = 0;
4043 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4045 else
4047 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4048 lo = (unsigned HOST_WIDE_INT) -1;
4051 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4053 if (TYPE_UNSIGNED (etype))
4055 etype = lang_hooks.types.signed_type (etype);
4056 exp = fold_convert (etype, exp);
4058 return fold_build2 (GT_EXPR, type, exp,
4059 fold_convert (etype, integer_zero_node));
4063 value = const_binop (MINUS_EXPR, high, low, 0);
4064 if (value != 0 && (!flag_wrapv || TREE_OVERFLOW (value))
4065 && ! TYPE_UNSIGNED (etype))
4067 tree utype, minv, maxv;
4069 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4070 for the type in question, as we rely on this here. */
4071 switch (TREE_CODE (etype))
4073 case INTEGER_TYPE:
4074 case ENUMERAL_TYPE:
4075 case CHAR_TYPE:
4076 /* There is no requirement that LOW be within the range of ETYPE
4077 if the latter is a subtype. It must, however, be within the base
4078 type of ETYPE. So be sure we do the subtraction in that type. */
4079 if (TREE_TYPE (etype))
4080 etype = TREE_TYPE (etype);
4081 utype = lang_hooks.types.unsigned_type (etype);
4082 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4083 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4084 integer_one_node, 1);
4085 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4086 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4087 minv, 1, maxv, 1)))
4089 etype = utype;
4090 high = fold_convert (etype, high);
4091 low = fold_convert (etype, low);
4092 exp = fold_convert (etype, exp);
4093 value = const_binop (MINUS_EXPR, high, low, 0);
4095 break;
4096 default:
4097 break;
4101 if (value != 0 && ! TREE_OVERFLOW (value))
4103 /* There is no requirement that LOW be within the range of ETYPE
4104 if the latter is a subtype. It must, however, be within the base
4105 type of ETYPE. So be sure we do the subtraction in that type. */
4106 if (INTEGRAL_TYPE_P (etype) && TREE_TYPE (etype))
4108 etype = TREE_TYPE (etype);
4109 exp = fold_convert (etype, exp);
4110 low = fold_convert (etype, low);
4111 value = fold_convert (etype, value);
4114 return build_range_check (type,
4115 fold_build2 (MINUS_EXPR, etype, exp, low),
4116 1, build_int_cst (etype, 0), value);
4119 return 0;
4122 /* Given two ranges, see if we can merge them into one. Return 1 if we
4123 can, 0 if we can't. Set the output range into the specified parameters. */
4125 static int
4126 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4127 tree high0, int in1_p, tree low1, tree high1)
4129 int no_overlap;
4130 int subset;
4131 int temp;
4132 tree tem;
4133 int in_p;
4134 tree low, high;
4135 int lowequal = ((low0 == 0 && low1 == 0)
4136 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4137 low0, 0, low1, 0)));
4138 int highequal = ((high0 == 0 && high1 == 0)
4139 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4140 high0, 1, high1, 1)));
4142 /* Make range 0 be the range that starts first, or ends last if they
4143 start at the same value. Swap them if it isn't. */
4144 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4145 low0, 0, low1, 0))
4146 || (lowequal
4147 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4148 high1, 1, high0, 1))))
4150 temp = in0_p, in0_p = in1_p, in1_p = temp;
4151 tem = low0, low0 = low1, low1 = tem;
4152 tem = high0, high0 = high1, high1 = tem;
4155 /* Now flag two cases, whether the ranges are disjoint or whether the
4156 second range is totally subsumed in the first. Note that the tests
4157 below are simplified by the ones above. */
4158 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4159 high0, 1, low1, 0));
4160 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4161 high1, 1, high0, 1));
4163 /* We now have four cases, depending on whether we are including or
4164 excluding the two ranges. */
4165 if (in0_p && in1_p)
4167 /* If they don't overlap, the result is false. If the second range
4168 is a subset it is the result. Otherwise, the range is from the start
4169 of the second to the end of the first. */
4170 if (no_overlap)
4171 in_p = 0, low = high = 0;
4172 else if (subset)
4173 in_p = 1, low = low1, high = high1;
4174 else
4175 in_p = 1, low = low1, high = high0;
4178 else if (in0_p && ! in1_p)
4180 /* If they don't overlap, the result is the first range. If they are
4181 equal, the result is false. If the second range is a subset of the
4182 first, and the ranges begin at the same place, we go from just after
4183 the end of the first range to the end of the second. If the second
4184 range is not a subset of the first, or if it is a subset and both
4185 ranges end at the same place, the range starts at the start of the
4186 first range and ends just before the second range.
4187 Otherwise, we can't describe this as a single range. */
4188 if (no_overlap)
4189 in_p = 1, low = low0, high = high0;
4190 else if (lowequal && highequal)
4191 in_p = 0, low = high = 0;
4192 else if (subset && lowequal)
4194 in_p = 1, high = high0;
4195 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4196 integer_one_node, 0);
4198 else if (! subset || highequal)
4200 in_p = 1, low = low0;
4201 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4202 integer_one_node, 0);
4204 else
4205 return 0;
4208 else if (! in0_p && in1_p)
4210 /* If they don't overlap, the result is the second range. If the second
4211 is a subset of the first, the result is false. Otherwise,
4212 the range starts just after the first range and ends at the
4213 end of the second. */
4214 if (no_overlap)
4215 in_p = 1, low = low1, high = high1;
4216 else if (subset || highequal)
4217 in_p = 0, low = high = 0;
4218 else
4220 in_p = 1, high = high1;
4221 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4222 integer_one_node, 0);
4226 else
4228 /* The case where we are excluding both ranges. Here the complex case
4229 is if they don't overlap. In that case, the only time we have a
4230 range is if they are adjacent. If the second is a subset of the
4231 first, the result is the first. Otherwise, the range to exclude
4232 starts at the beginning of the first range and ends at the end of the
4233 second. */
4234 if (no_overlap)
4236 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4237 range_binop (PLUS_EXPR, NULL_TREE,
4238 high0, 1,
4239 integer_one_node, 1),
4240 1, low1, 0)))
4241 in_p = 0, low = low0, high = high1;
4242 else
4244 /* Canonicalize - [min, x] into - [-, x]. */
4245 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4246 switch (TREE_CODE (TREE_TYPE (low0)))
4248 case ENUMERAL_TYPE:
4249 if (TYPE_PRECISION (TREE_TYPE (low0))
4250 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4251 break;
4252 /* FALLTHROUGH */
4253 case INTEGER_TYPE:
4254 case CHAR_TYPE:
4255 if (tree_int_cst_equal (low0,
4256 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4257 low0 = 0;
4258 break;
4259 case POINTER_TYPE:
4260 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4261 && integer_zerop (low0))
4262 low0 = 0;
4263 break;
4264 default:
4265 break;
4268 /* Canonicalize - [x, max] into - [x, -]. */
4269 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4270 switch (TREE_CODE (TREE_TYPE (high1)))
4272 case ENUMERAL_TYPE:
4273 if (TYPE_PRECISION (TREE_TYPE (high1))
4274 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4275 break;
4276 /* FALLTHROUGH */
4277 case INTEGER_TYPE:
4278 case CHAR_TYPE:
4279 if (tree_int_cst_equal (high1,
4280 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4281 high1 = 0;
4282 break;
4283 case POINTER_TYPE:
4284 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4285 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4286 high1, 1,
4287 integer_one_node, 1)))
4288 high1 = 0;
4289 break;
4290 default:
4291 break;
4294 /* The ranges might be also adjacent between the maximum and
4295 minimum values of the given type. For
4296 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4297 return + [x + 1, y - 1]. */
4298 if (low0 == 0 && high1 == 0)
4300 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4301 integer_one_node, 1);
4302 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4303 integer_one_node, 0);
4304 if (low == 0 || high == 0)
4305 return 0;
4307 in_p = 1;
4309 else
4310 return 0;
4313 else if (subset)
4314 in_p = 0, low = low0, high = high0;
4315 else
4316 in_p = 0, low = low0, high = high1;
4319 *pin_p = in_p, *plow = low, *phigh = high;
4320 return 1;
4324 /* Subroutine of fold, looking inside expressions of the form
4325 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4326 of the COND_EXPR. This function is being used also to optimize
4327 A op B ? C : A, by reversing the comparison first.
4329 Return a folded expression whose code is not a COND_EXPR
4330 anymore, or NULL_TREE if no folding opportunity is found. */
4332 static tree
4333 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4335 enum tree_code comp_code = TREE_CODE (arg0);
4336 tree arg00 = TREE_OPERAND (arg0, 0);
4337 tree arg01 = TREE_OPERAND (arg0, 1);
4338 tree arg1_type = TREE_TYPE (arg1);
4339 tree tem;
4341 STRIP_NOPS (arg1);
4342 STRIP_NOPS (arg2);
4344 /* If we have A op 0 ? A : -A, consider applying the following
4345 transformations:
4347 A == 0? A : -A same as -A
4348 A != 0? A : -A same as A
4349 A >= 0? A : -A same as abs (A)
4350 A > 0? A : -A same as abs (A)
4351 A <= 0? A : -A same as -abs (A)
4352 A < 0? A : -A same as -abs (A)
4354 None of these transformations work for modes with signed
4355 zeros. If A is +/-0, the first two transformations will
4356 change the sign of the result (from +0 to -0, or vice
4357 versa). The last four will fix the sign of the result,
4358 even though the original expressions could be positive or
4359 negative, depending on the sign of A.
4361 Note that all these transformations are correct if A is
4362 NaN, since the two alternatives (A and -A) are also NaNs. */
4363 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4364 ? real_zerop (arg01)
4365 : integer_zerop (arg01))
4366 && ((TREE_CODE (arg2) == NEGATE_EXPR
4367 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4368 /* In the case that A is of the form X-Y, '-A' (arg2) may
4369 have already been folded to Y-X, check for that. */
4370 || (TREE_CODE (arg1) == MINUS_EXPR
4371 && TREE_CODE (arg2) == MINUS_EXPR
4372 && operand_equal_p (TREE_OPERAND (arg1, 0),
4373 TREE_OPERAND (arg2, 1), 0)
4374 && operand_equal_p (TREE_OPERAND (arg1, 1),
4375 TREE_OPERAND (arg2, 0), 0))))
4376 switch (comp_code)
4378 case EQ_EXPR:
4379 case UNEQ_EXPR:
4380 tem = fold_convert (arg1_type, arg1);
4381 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4382 case NE_EXPR:
4383 case LTGT_EXPR:
4384 return pedantic_non_lvalue (fold_convert (type, arg1));
4385 case UNGE_EXPR:
4386 case UNGT_EXPR:
4387 if (flag_trapping_math)
4388 break;
4389 /* Fall through. */
4390 case GE_EXPR:
4391 case GT_EXPR:
4392 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4393 arg1 = fold_convert (lang_hooks.types.signed_type
4394 (TREE_TYPE (arg1)), arg1);
4395 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4396 return pedantic_non_lvalue (fold_convert (type, tem));
4397 case UNLE_EXPR:
4398 case UNLT_EXPR:
4399 if (flag_trapping_math)
4400 break;
4401 case LE_EXPR:
4402 case LT_EXPR:
4403 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4404 arg1 = fold_convert (lang_hooks.types.signed_type
4405 (TREE_TYPE (arg1)), arg1);
4406 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4407 return negate_expr (fold_convert (type, tem));
4408 default:
4409 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4410 break;
4413 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4414 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4415 both transformations are correct when A is NaN: A != 0
4416 is then true, and A == 0 is false. */
4418 if (integer_zerop (arg01) && integer_zerop (arg2))
4420 if (comp_code == NE_EXPR)
4421 return pedantic_non_lvalue (fold_convert (type, arg1));
4422 else if (comp_code == EQ_EXPR)
4423 return fold_convert (type, integer_zero_node);
4426 /* Try some transformations of A op B ? A : B.
4428 A == B? A : B same as B
4429 A != B? A : B same as A
4430 A >= B? A : B same as max (A, B)
4431 A > B? A : B same as max (B, A)
4432 A <= B? A : B same as min (A, B)
4433 A < B? A : B same as min (B, A)
4435 As above, these transformations don't work in the presence
4436 of signed zeros. For example, if A and B are zeros of
4437 opposite sign, the first two transformations will change
4438 the sign of the result. In the last four, the original
4439 expressions give different results for (A=+0, B=-0) and
4440 (A=-0, B=+0), but the transformed expressions do not.
4442 The first two transformations are correct if either A or B
4443 is a NaN. In the first transformation, the condition will
4444 be false, and B will indeed be chosen. In the case of the
4445 second transformation, the condition A != B will be true,
4446 and A will be chosen.
4448 The conversions to max() and min() are not correct if B is
4449 a number and A is not. The conditions in the original
4450 expressions will be false, so all four give B. The min()
4451 and max() versions would give a NaN instead. */
4452 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4453 /* Avoid these transformations if the COND_EXPR may be used
4454 as an lvalue in the C++ front-end. PR c++/19199. */
4455 && (in_gimple_form
4456 || strcmp (lang_hooks.name, "GNU C++") != 0
4457 || ! maybe_lvalue_p (arg1)
4458 || ! maybe_lvalue_p (arg2)))
4460 tree comp_op0 = arg00;
4461 tree comp_op1 = arg01;
4462 tree comp_type = TREE_TYPE (comp_op0);
4464 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4465 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4467 comp_type = type;
4468 comp_op0 = arg1;
4469 comp_op1 = arg2;
4472 switch (comp_code)
4474 case EQ_EXPR:
4475 return pedantic_non_lvalue (fold_convert (type, arg2));
4476 case NE_EXPR:
4477 return pedantic_non_lvalue (fold_convert (type, arg1));
4478 case LE_EXPR:
4479 case LT_EXPR:
4480 case UNLE_EXPR:
4481 case UNLT_EXPR:
4482 /* In C++ a ?: expression can be an lvalue, so put the
4483 operand which will be used if they are equal first
4484 so that we can convert this back to the
4485 corresponding COND_EXPR. */
4486 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4488 comp_op0 = fold_convert (comp_type, comp_op0);
4489 comp_op1 = fold_convert (comp_type, comp_op1);
4490 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4491 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4492 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4493 return pedantic_non_lvalue (fold_convert (type, tem));
4495 break;
4496 case GE_EXPR:
4497 case GT_EXPR:
4498 case UNGE_EXPR:
4499 case UNGT_EXPR:
4500 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4502 comp_op0 = fold_convert (comp_type, comp_op0);
4503 comp_op1 = fold_convert (comp_type, comp_op1);
4504 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4505 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4506 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4507 return pedantic_non_lvalue (fold_convert (type, tem));
4509 break;
4510 case UNEQ_EXPR:
4511 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4512 return pedantic_non_lvalue (fold_convert (type, arg2));
4513 break;
4514 case LTGT_EXPR:
4515 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4516 return pedantic_non_lvalue (fold_convert (type, arg1));
4517 break;
4518 default:
4519 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4520 break;
4524 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4525 we might still be able to simplify this. For example,
4526 if C1 is one less or one more than C2, this might have started
4527 out as a MIN or MAX and been transformed by this function.
4528 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4530 if (INTEGRAL_TYPE_P (type)
4531 && TREE_CODE (arg01) == INTEGER_CST
4532 && TREE_CODE (arg2) == INTEGER_CST)
4533 switch (comp_code)
4535 case EQ_EXPR:
4536 /* We can replace A with C1 in this case. */
4537 arg1 = fold_convert (type, arg01);
4538 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4540 case LT_EXPR:
4541 /* If C1 is C2 + 1, this is min(A, C2). */
4542 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4543 OEP_ONLY_CONST)
4544 && operand_equal_p (arg01,
4545 const_binop (PLUS_EXPR, arg2,
4546 integer_one_node, 0),
4547 OEP_ONLY_CONST))
4548 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4549 type, arg1, arg2));
4550 break;
4552 case LE_EXPR:
4553 /* If C1 is C2 - 1, this is min(A, C2). */
4554 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4555 OEP_ONLY_CONST)
4556 && operand_equal_p (arg01,
4557 const_binop (MINUS_EXPR, arg2,
4558 integer_one_node, 0),
4559 OEP_ONLY_CONST))
4560 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4561 type, arg1, arg2));
4562 break;
4564 case GT_EXPR:
4565 /* If C1 is C2 - 1, this is max(A, C2). */
4566 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4567 OEP_ONLY_CONST)
4568 && operand_equal_p (arg01,
4569 const_binop (MINUS_EXPR, arg2,
4570 integer_one_node, 0),
4571 OEP_ONLY_CONST))
4572 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4573 type, arg1, arg2));
4574 break;
4576 case GE_EXPR:
4577 /* If C1 is C2 + 1, this is max(A, C2). */
4578 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4579 OEP_ONLY_CONST)
4580 && operand_equal_p (arg01,
4581 const_binop (PLUS_EXPR, arg2,
4582 integer_one_node, 0),
4583 OEP_ONLY_CONST))
4584 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4585 type, arg1, arg2));
4586 break;
4587 case NE_EXPR:
4588 break;
4589 default:
4590 gcc_unreachable ();
4593 return NULL_TREE;
4598 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4599 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4600 #endif
4602 /* EXP is some logical combination of boolean tests. See if we can
4603 merge it into some range test. Return the new tree if so. */
4605 static tree
4606 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4608 int or_op = (code == TRUTH_ORIF_EXPR
4609 || code == TRUTH_OR_EXPR);
4610 int in0_p, in1_p, in_p;
4611 tree low0, low1, low, high0, high1, high;
4612 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4613 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4614 tree tem;
4616 /* If this is an OR operation, invert both sides; we will invert
4617 again at the end. */
4618 if (or_op)
4619 in0_p = ! in0_p, in1_p = ! in1_p;
4621 /* If both expressions are the same, if we can merge the ranges, and we
4622 can build the range test, return it or it inverted. If one of the
4623 ranges is always true or always false, consider it to be the same
4624 expression as the other. */
4625 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4626 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4627 in1_p, low1, high1)
4628 && 0 != (tem = (build_range_check (type,
4629 lhs != 0 ? lhs
4630 : rhs != 0 ? rhs : integer_zero_node,
4631 in_p, low, high))))
4632 return or_op ? invert_truthvalue (tem) : tem;
4634 /* On machines where the branch cost is expensive, if this is a
4635 short-circuited branch and the underlying object on both sides
4636 is the same, make a non-short-circuit operation. */
4637 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4638 && lhs != 0 && rhs != 0
4639 && (code == TRUTH_ANDIF_EXPR
4640 || code == TRUTH_ORIF_EXPR)
4641 && operand_equal_p (lhs, rhs, 0))
4643 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4644 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4645 which cases we can't do this. */
4646 if (simple_operand_p (lhs))
4647 return build2 (code == TRUTH_ANDIF_EXPR
4648 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4649 type, op0, op1);
4651 else if (lang_hooks.decls.global_bindings_p () == 0
4652 && ! CONTAINS_PLACEHOLDER_P (lhs))
4654 tree common = save_expr (lhs);
4656 if (0 != (lhs = build_range_check (type, common,
4657 or_op ? ! in0_p : in0_p,
4658 low0, high0))
4659 && (0 != (rhs = build_range_check (type, common,
4660 or_op ? ! in1_p : in1_p,
4661 low1, high1))))
4662 return build2 (code == TRUTH_ANDIF_EXPR
4663 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4664 type, lhs, rhs);
4668 return 0;
4671 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4672 bit value. Arrange things so the extra bits will be set to zero if and
4673 only if C is signed-extended to its full width. If MASK is nonzero,
4674 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4676 static tree
4677 unextend (tree c, int p, int unsignedp, tree mask)
4679 tree type = TREE_TYPE (c);
4680 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4681 tree temp;
4683 if (p == modesize || unsignedp)
4684 return c;
4686 /* We work by getting just the sign bit into the low-order bit, then
4687 into the high-order bit, then sign-extend. We then XOR that value
4688 with C. */
4689 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4690 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4692 /* We must use a signed type in order to get an arithmetic right shift.
4693 However, we must also avoid introducing accidental overflows, so that
4694 a subsequent call to integer_zerop will work. Hence we must
4695 do the type conversion here. At this point, the constant is either
4696 zero or one, and the conversion to a signed type can never overflow.
4697 We could get an overflow if this conversion is done anywhere else. */
4698 if (TYPE_UNSIGNED (type))
4699 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4701 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4702 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4703 if (mask != 0)
4704 temp = const_binop (BIT_AND_EXPR, temp,
4705 fold_convert (TREE_TYPE (c), mask), 0);
4706 /* If necessary, convert the type back to match the type of C. */
4707 if (TYPE_UNSIGNED (type))
4708 temp = fold_convert (type, temp);
4710 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4713 /* Find ways of folding logical expressions of LHS and RHS:
4714 Try to merge two comparisons to the same innermost item.
4715 Look for range tests like "ch >= '0' && ch <= '9'".
4716 Look for combinations of simple terms on machines with expensive branches
4717 and evaluate the RHS unconditionally.
4719 For example, if we have p->a == 2 && p->b == 4 and we can make an
4720 object large enough to span both A and B, we can do this with a comparison
4721 against the object ANDed with the a mask.
4723 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4724 operations to do this with one comparison.
4726 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4727 function and the one above.
4729 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4730 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4732 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4733 two operands.
4735 We return the simplified tree or 0 if no optimization is possible. */
4737 static tree
4738 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4740 /* If this is the "or" of two comparisons, we can do something if
4741 the comparisons are NE_EXPR. If this is the "and", we can do something
4742 if the comparisons are EQ_EXPR. I.e.,
4743 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4745 WANTED_CODE is this operation code. For single bit fields, we can
4746 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4747 comparison for one-bit fields. */
4749 enum tree_code wanted_code;
4750 enum tree_code lcode, rcode;
4751 tree ll_arg, lr_arg, rl_arg, rr_arg;
4752 tree ll_inner, lr_inner, rl_inner, rr_inner;
4753 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4754 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4755 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4756 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4757 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4758 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4759 enum machine_mode lnmode, rnmode;
4760 tree ll_mask, lr_mask, rl_mask, rr_mask;
4761 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4762 tree l_const, r_const;
4763 tree lntype, rntype, result;
4764 int first_bit, end_bit;
4765 int volatilep;
4767 /* Start by getting the comparison codes. Fail if anything is volatile.
4768 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4769 it were surrounded with a NE_EXPR. */
4771 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4772 return 0;
4774 lcode = TREE_CODE (lhs);
4775 rcode = TREE_CODE (rhs);
4777 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4779 lhs = build2 (NE_EXPR, truth_type, lhs,
4780 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4781 lcode = NE_EXPR;
4784 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4786 rhs = build2 (NE_EXPR, truth_type, rhs,
4787 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4788 rcode = NE_EXPR;
4791 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4792 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4793 return 0;
4795 ll_arg = TREE_OPERAND (lhs, 0);
4796 lr_arg = TREE_OPERAND (lhs, 1);
4797 rl_arg = TREE_OPERAND (rhs, 0);
4798 rr_arg = TREE_OPERAND (rhs, 1);
4800 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4801 if (simple_operand_p (ll_arg)
4802 && simple_operand_p (lr_arg))
4804 tree result;
4805 if (operand_equal_p (ll_arg, rl_arg, 0)
4806 && operand_equal_p (lr_arg, rr_arg, 0))
4808 result = combine_comparisons (code, lcode, rcode,
4809 truth_type, ll_arg, lr_arg);
4810 if (result)
4811 return result;
4813 else if (operand_equal_p (ll_arg, rr_arg, 0)
4814 && operand_equal_p (lr_arg, rl_arg, 0))
4816 result = combine_comparisons (code, lcode,
4817 swap_tree_comparison (rcode),
4818 truth_type, ll_arg, lr_arg);
4819 if (result)
4820 return result;
4824 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4825 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4827 /* If the RHS can be evaluated unconditionally and its operands are
4828 simple, it wins to evaluate the RHS unconditionally on machines
4829 with expensive branches. In this case, this isn't a comparison
4830 that can be merged. Avoid doing this if the RHS is a floating-point
4831 comparison since those can trap. */
4833 if (BRANCH_COST >= 2
4834 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4835 && simple_operand_p (rl_arg)
4836 && simple_operand_p (rr_arg))
4838 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4839 if (code == TRUTH_OR_EXPR
4840 && lcode == NE_EXPR && integer_zerop (lr_arg)
4841 && rcode == NE_EXPR && integer_zerop (rr_arg)
4842 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4843 return build2 (NE_EXPR, truth_type,
4844 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4845 ll_arg, rl_arg),
4846 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4848 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4849 if (code == TRUTH_AND_EXPR
4850 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4851 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4852 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4853 return build2 (EQ_EXPR, truth_type,
4854 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4855 ll_arg, rl_arg),
4856 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4858 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4859 return build2 (code, truth_type, lhs, rhs);
4862 /* See if the comparisons can be merged. Then get all the parameters for
4863 each side. */
4865 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4866 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4867 return 0;
4869 volatilep = 0;
4870 ll_inner = decode_field_reference (ll_arg,
4871 &ll_bitsize, &ll_bitpos, &ll_mode,
4872 &ll_unsignedp, &volatilep, &ll_mask,
4873 &ll_and_mask);
4874 lr_inner = decode_field_reference (lr_arg,
4875 &lr_bitsize, &lr_bitpos, &lr_mode,
4876 &lr_unsignedp, &volatilep, &lr_mask,
4877 &lr_and_mask);
4878 rl_inner = decode_field_reference (rl_arg,
4879 &rl_bitsize, &rl_bitpos, &rl_mode,
4880 &rl_unsignedp, &volatilep, &rl_mask,
4881 &rl_and_mask);
4882 rr_inner = decode_field_reference (rr_arg,
4883 &rr_bitsize, &rr_bitpos, &rr_mode,
4884 &rr_unsignedp, &volatilep, &rr_mask,
4885 &rr_and_mask);
4887 /* It must be true that the inner operation on the lhs of each
4888 comparison must be the same if we are to be able to do anything.
4889 Then see if we have constants. If not, the same must be true for
4890 the rhs's. */
4891 if (volatilep || ll_inner == 0 || rl_inner == 0
4892 || ! operand_equal_p (ll_inner, rl_inner, 0))
4893 return 0;
4895 if (TREE_CODE (lr_arg) == INTEGER_CST
4896 && TREE_CODE (rr_arg) == INTEGER_CST)
4897 l_const = lr_arg, r_const = rr_arg;
4898 else if (lr_inner == 0 || rr_inner == 0
4899 || ! operand_equal_p (lr_inner, rr_inner, 0))
4900 return 0;
4901 else
4902 l_const = r_const = 0;
4904 /* If either comparison code is not correct for our logical operation,
4905 fail. However, we can convert a one-bit comparison against zero into
4906 the opposite comparison against that bit being set in the field. */
4908 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4909 if (lcode != wanted_code)
4911 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4913 /* Make the left operand unsigned, since we are only interested
4914 in the value of one bit. Otherwise we are doing the wrong
4915 thing below. */
4916 ll_unsignedp = 1;
4917 l_const = ll_mask;
4919 else
4920 return 0;
4923 /* This is analogous to the code for l_const above. */
4924 if (rcode != wanted_code)
4926 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4928 rl_unsignedp = 1;
4929 r_const = rl_mask;
4931 else
4932 return 0;
4935 /* After this point all optimizations will generate bit-field
4936 references, which we might not want. */
4937 if (! lang_hooks.can_use_bit_fields_p ())
4938 return 0;
4940 /* See if we can find a mode that contains both fields being compared on
4941 the left. If we can't, fail. Otherwise, update all constants and masks
4942 to be relative to a field of that size. */
4943 first_bit = MIN (ll_bitpos, rl_bitpos);
4944 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4945 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4946 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4947 volatilep);
4948 if (lnmode == VOIDmode)
4949 return 0;
4951 lnbitsize = GET_MODE_BITSIZE (lnmode);
4952 lnbitpos = first_bit & ~ (lnbitsize - 1);
4953 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4954 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4956 if (BYTES_BIG_ENDIAN)
4958 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4959 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4962 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4963 size_int (xll_bitpos), 0);
4964 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4965 size_int (xrl_bitpos), 0);
4967 if (l_const)
4969 l_const = fold_convert (lntype, l_const);
4970 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4971 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4972 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4973 fold_build1 (BIT_NOT_EXPR,
4974 lntype, ll_mask),
4975 0)))
4977 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4979 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4982 if (r_const)
4984 r_const = fold_convert (lntype, r_const);
4985 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4986 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4987 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4988 fold_build1 (BIT_NOT_EXPR,
4989 lntype, rl_mask),
4990 0)))
4992 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4994 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4998 /* If the right sides are not constant, do the same for it. Also,
4999 disallow this optimization if a size or signedness mismatch occurs
5000 between the left and right sides. */
5001 if (l_const == 0)
5003 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5004 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5005 /* Make sure the two fields on the right
5006 correspond to the left without being swapped. */
5007 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5008 return 0;
5010 first_bit = MIN (lr_bitpos, rr_bitpos);
5011 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5012 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5013 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5014 volatilep);
5015 if (rnmode == VOIDmode)
5016 return 0;
5018 rnbitsize = GET_MODE_BITSIZE (rnmode);
5019 rnbitpos = first_bit & ~ (rnbitsize - 1);
5020 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5021 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5023 if (BYTES_BIG_ENDIAN)
5025 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5026 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5029 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5030 size_int (xlr_bitpos), 0);
5031 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5032 size_int (xrr_bitpos), 0);
5034 /* Make a mask that corresponds to both fields being compared.
5035 Do this for both items being compared. If the operands are the
5036 same size and the bits being compared are in the same position
5037 then we can do this by masking both and comparing the masked
5038 results. */
5039 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5040 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5041 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5043 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5044 ll_unsignedp || rl_unsignedp);
5045 if (! all_ones_mask_p (ll_mask, lnbitsize))
5046 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5048 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5049 lr_unsignedp || rr_unsignedp);
5050 if (! all_ones_mask_p (lr_mask, rnbitsize))
5051 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5053 return build2 (wanted_code, truth_type, lhs, rhs);
5056 /* There is still another way we can do something: If both pairs of
5057 fields being compared are adjacent, we may be able to make a wider
5058 field containing them both.
5060 Note that we still must mask the lhs/rhs expressions. Furthermore,
5061 the mask must be shifted to account for the shift done by
5062 make_bit_field_ref. */
5063 if ((ll_bitsize + ll_bitpos == rl_bitpos
5064 && lr_bitsize + lr_bitpos == rr_bitpos)
5065 || (ll_bitpos == rl_bitpos + rl_bitsize
5066 && lr_bitpos == rr_bitpos + rr_bitsize))
5068 tree type;
5070 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5071 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5072 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5073 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5075 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5076 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5077 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5078 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5080 /* Convert to the smaller type before masking out unwanted bits. */
5081 type = lntype;
5082 if (lntype != rntype)
5084 if (lnbitsize > rnbitsize)
5086 lhs = fold_convert (rntype, lhs);
5087 ll_mask = fold_convert (rntype, ll_mask);
5088 type = rntype;
5090 else if (lnbitsize < rnbitsize)
5092 rhs = fold_convert (lntype, rhs);
5093 lr_mask = fold_convert (lntype, lr_mask);
5094 type = lntype;
5098 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5099 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5101 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5102 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5104 return build2 (wanted_code, truth_type, lhs, rhs);
5107 return 0;
5110 /* Handle the case of comparisons with constants. If there is something in
5111 common between the masks, those bits of the constants must be the same.
5112 If not, the condition is always false. Test for this to avoid generating
5113 incorrect code below. */
5114 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5115 if (! integer_zerop (result)
5116 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5117 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5119 if (wanted_code == NE_EXPR)
5121 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5122 return constant_boolean_node (true, truth_type);
5124 else
5126 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5127 return constant_boolean_node (false, truth_type);
5131 /* Construct the expression we will return. First get the component
5132 reference we will make. Unless the mask is all ones the width of
5133 that field, perform the mask operation. Then compare with the
5134 merged constant. */
5135 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5136 ll_unsignedp || rl_unsignedp);
5138 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5139 if (! all_ones_mask_p (ll_mask, lnbitsize))
5140 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5142 return build2 (wanted_code, truth_type, result,
5143 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5146 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5147 constant. */
5149 static tree
5150 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5152 tree arg0 = op0;
5153 enum tree_code op_code;
5154 tree comp_const = op1;
5155 tree minmax_const;
5156 int consts_equal, consts_lt;
5157 tree inner;
5159 STRIP_SIGN_NOPS (arg0);
5161 op_code = TREE_CODE (arg0);
5162 minmax_const = TREE_OPERAND (arg0, 1);
5163 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5164 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5165 inner = TREE_OPERAND (arg0, 0);
5167 /* If something does not permit us to optimize, return the original tree. */
5168 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5169 || TREE_CODE (comp_const) != INTEGER_CST
5170 || TREE_CONSTANT_OVERFLOW (comp_const)
5171 || TREE_CODE (minmax_const) != INTEGER_CST
5172 || TREE_CONSTANT_OVERFLOW (minmax_const))
5173 return NULL_TREE;
5175 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5176 and GT_EXPR, doing the rest with recursive calls using logical
5177 simplifications. */
5178 switch (code)
5180 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5182 /* FIXME: We should be able to invert code without building a
5183 scratch tree node, but doing so would require us to
5184 duplicate a part of invert_truthvalue here. */
5185 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5186 tem = optimize_minmax_comparison (TREE_CODE (tem),
5187 TREE_TYPE (tem),
5188 TREE_OPERAND (tem, 0),
5189 TREE_OPERAND (tem, 1));
5190 return invert_truthvalue (tem);
5193 case GE_EXPR:
5194 return
5195 fold_build2 (TRUTH_ORIF_EXPR, type,
5196 optimize_minmax_comparison
5197 (EQ_EXPR, type, arg0, comp_const),
5198 optimize_minmax_comparison
5199 (GT_EXPR, type, arg0, comp_const));
5201 case EQ_EXPR:
5202 if (op_code == MAX_EXPR && consts_equal)
5203 /* MAX (X, 0) == 0 -> X <= 0 */
5204 return fold_build2 (LE_EXPR, type, inner, comp_const);
5206 else if (op_code == MAX_EXPR && consts_lt)
5207 /* MAX (X, 0) == 5 -> X == 5 */
5208 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5210 else if (op_code == MAX_EXPR)
5211 /* MAX (X, 0) == -1 -> false */
5212 return omit_one_operand (type, integer_zero_node, inner);
5214 else if (consts_equal)
5215 /* MIN (X, 0) == 0 -> X >= 0 */
5216 return fold_build2 (GE_EXPR, type, inner, comp_const);
5218 else if (consts_lt)
5219 /* MIN (X, 0) == 5 -> false */
5220 return omit_one_operand (type, integer_zero_node, inner);
5222 else
5223 /* MIN (X, 0) == -1 -> X == -1 */
5224 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5226 case GT_EXPR:
5227 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5228 /* MAX (X, 0) > 0 -> X > 0
5229 MAX (X, 0) > 5 -> X > 5 */
5230 return fold_build2 (GT_EXPR, type, inner, comp_const);
5232 else if (op_code == MAX_EXPR)
5233 /* MAX (X, 0) > -1 -> true */
5234 return omit_one_operand (type, integer_one_node, inner);
5236 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5237 /* MIN (X, 0) > 0 -> false
5238 MIN (X, 0) > 5 -> false */
5239 return omit_one_operand (type, integer_zero_node, inner);
5241 else
5242 /* MIN (X, 0) > -1 -> X > -1 */
5243 return fold_build2 (GT_EXPR, type, inner, comp_const);
5245 default:
5246 return NULL_TREE;
5250 /* T is an integer expression that is being multiplied, divided, or taken a
5251 modulus (CODE says which and what kind of divide or modulus) by a
5252 constant C. See if we can eliminate that operation by folding it with
5253 other operations already in T. WIDE_TYPE, if non-null, is a type that
5254 should be used for the computation if wider than our type.
5256 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5257 (X * 2) + (Y * 4). We must, however, be assured that either the original
5258 expression would not overflow or that overflow is undefined for the type
5259 in the language in question.
5261 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5262 the machine has a multiply-accumulate insn or that this is part of an
5263 addressing calculation.
5265 If we return a non-null expression, it is an equivalent form of the
5266 original computation, but need not be in the original type. */
5268 static tree
5269 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5271 /* To avoid exponential search depth, refuse to allow recursion past
5272 three levels. Beyond that (1) it's highly unlikely that we'll find
5273 something interesting and (2) we've probably processed it before
5274 when we built the inner expression. */
5276 static int depth;
5277 tree ret;
5279 if (depth > 3)
5280 return NULL;
5282 depth++;
5283 ret = extract_muldiv_1 (t, c, code, wide_type);
5284 depth--;
5286 return ret;
5289 static tree
5290 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5292 tree type = TREE_TYPE (t);
5293 enum tree_code tcode = TREE_CODE (t);
5294 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5295 > GET_MODE_SIZE (TYPE_MODE (type)))
5296 ? wide_type : type);
5297 tree t1, t2;
5298 int same_p = tcode == code;
5299 tree op0 = NULL_TREE, op1 = NULL_TREE;
5301 /* Don't deal with constants of zero here; they confuse the code below. */
5302 if (integer_zerop (c))
5303 return NULL_TREE;
5305 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5306 op0 = TREE_OPERAND (t, 0);
5308 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5309 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5311 /* Note that we need not handle conditional operations here since fold
5312 already handles those cases. So just do arithmetic here. */
5313 switch (tcode)
5315 case INTEGER_CST:
5316 /* For a constant, we can always simplify if we are a multiply
5317 or (for divide and modulus) if it is a multiple of our constant. */
5318 if (code == MULT_EXPR
5319 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5320 return const_binop (code, fold_convert (ctype, t),
5321 fold_convert (ctype, c), 0);
5322 break;
5324 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5325 /* If op0 is an expression ... */
5326 if ((COMPARISON_CLASS_P (op0)
5327 || UNARY_CLASS_P (op0)
5328 || BINARY_CLASS_P (op0)
5329 || EXPRESSION_CLASS_P (op0))
5330 /* ... and is unsigned, and its type is smaller than ctype,
5331 then we cannot pass through as widening. */
5332 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5333 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5334 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5335 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5336 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5337 /* ... or this is a truncation (t is narrower than op0),
5338 then we cannot pass through this narrowing. */
5339 || (GET_MODE_SIZE (TYPE_MODE (type))
5340 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5341 /* ... or signedness changes for division or modulus,
5342 then we cannot pass through this conversion. */
5343 || (code != MULT_EXPR
5344 && (TYPE_UNSIGNED (ctype)
5345 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5346 break;
5348 /* Pass the constant down and see if we can make a simplification. If
5349 we can, replace this expression with the inner simplification for
5350 possible later conversion to our or some other type. */
5351 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5352 && TREE_CODE (t2) == INTEGER_CST
5353 && ! TREE_CONSTANT_OVERFLOW (t2)
5354 && (0 != (t1 = extract_muldiv (op0, t2, code,
5355 code == MULT_EXPR
5356 ? ctype : NULL_TREE))))
5357 return t1;
5358 break;
5360 case ABS_EXPR:
5361 /* If widening the type changes it from signed to unsigned, then we
5362 must avoid building ABS_EXPR itself as unsigned. */
5363 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5365 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5366 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5368 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5369 return fold_convert (ctype, t1);
5371 break;
5373 /* FALLTHROUGH */
5374 case NEGATE_EXPR:
5375 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5376 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5377 break;
5379 case MIN_EXPR: case MAX_EXPR:
5380 /* If widening the type changes the signedness, then we can't perform
5381 this optimization as that changes the result. */
5382 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5383 break;
5385 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5386 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5387 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5389 if (tree_int_cst_sgn (c) < 0)
5390 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5392 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5393 fold_convert (ctype, t2));
5395 break;
5397 case LSHIFT_EXPR: case RSHIFT_EXPR:
5398 /* If the second operand is constant, this is a multiplication
5399 or floor division, by a power of two, so we can treat it that
5400 way unless the multiplier or divisor overflows. Signed
5401 left-shift overflow is implementation-defined rather than
5402 undefined in C90, so do not convert signed left shift into
5403 multiplication. */
5404 if (TREE_CODE (op1) == INTEGER_CST
5405 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5406 /* const_binop may not detect overflow correctly,
5407 so check for it explicitly here. */
5408 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5409 && TREE_INT_CST_HIGH (op1) == 0
5410 && 0 != (t1 = fold_convert (ctype,
5411 const_binop (LSHIFT_EXPR,
5412 size_one_node,
5413 op1, 0)))
5414 && ! TREE_OVERFLOW (t1))
5415 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5416 ? MULT_EXPR : FLOOR_DIV_EXPR,
5417 ctype, fold_convert (ctype, op0), t1),
5418 c, code, wide_type);
5419 break;
5421 case PLUS_EXPR: case MINUS_EXPR:
5422 /* See if we can eliminate the operation on both sides. If we can, we
5423 can return a new PLUS or MINUS. If we can't, the only remaining
5424 cases where we can do anything are if the second operand is a
5425 constant. */
5426 t1 = extract_muldiv (op0, c, code, wide_type);
5427 t2 = extract_muldiv (op1, c, code, wide_type);
5428 if (t1 != 0 && t2 != 0
5429 && (code == MULT_EXPR
5430 /* If not multiplication, we can only do this if both operands
5431 are divisible by c. */
5432 || (multiple_of_p (ctype, op0, c)
5433 && multiple_of_p (ctype, op1, c))))
5434 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5435 fold_convert (ctype, t2));
5437 /* If this was a subtraction, negate OP1 and set it to be an addition.
5438 This simplifies the logic below. */
5439 if (tcode == MINUS_EXPR)
5440 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5442 if (TREE_CODE (op1) != INTEGER_CST)
5443 break;
5445 /* If either OP1 or C are negative, this optimization is not safe for
5446 some of the division and remainder types while for others we need
5447 to change the code. */
5448 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5450 if (code == CEIL_DIV_EXPR)
5451 code = FLOOR_DIV_EXPR;
5452 else if (code == FLOOR_DIV_EXPR)
5453 code = CEIL_DIV_EXPR;
5454 else if (code != MULT_EXPR
5455 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5456 break;
5459 /* If it's a multiply or a division/modulus operation of a multiple
5460 of our constant, do the operation and verify it doesn't overflow. */
5461 if (code == MULT_EXPR
5462 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5464 op1 = const_binop (code, fold_convert (ctype, op1),
5465 fold_convert (ctype, c), 0);
5466 /* We allow the constant to overflow with wrapping semantics. */
5467 if (op1 == 0
5468 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5469 break;
5471 else
5472 break;
5474 /* If we have an unsigned type is not a sizetype, we cannot widen
5475 the operation since it will change the result if the original
5476 computation overflowed. */
5477 if (TYPE_UNSIGNED (ctype)
5478 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5479 && ctype != type)
5480 break;
5482 /* If we were able to eliminate our operation from the first side,
5483 apply our operation to the second side and reform the PLUS. */
5484 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5485 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5487 /* The last case is if we are a multiply. In that case, we can
5488 apply the distributive law to commute the multiply and addition
5489 if the multiplication of the constants doesn't overflow. */
5490 if (code == MULT_EXPR)
5491 return fold_build2 (tcode, ctype,
5492 fold_build2 (code, ctype,
5493 fold_convert (ctype, op0),
5494 fold_convert (ctype, c)),
5495 op1);
5497 break;
5499 case MULT_EXPR:
5500 /* We have a special case here if we are doing something like
5501 (C * 8) % 4 since we know that's zero. */
5502 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5503 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5504 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5505 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5506 return omit_one_operand (type, integer_zero_node, op0);
5508 /* ... fall through ... */
5510 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5511 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5512 /* If we can extract our operation from the LHS, do so and return a
5513 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5514 do something only if the second operand is a constant. */
5515 if (same_p
5516 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5517 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5518 fold_convert (ctype, op1));
5519 else if (tcode == MULT_EXPR && code == MULT_EXPR
5520 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5521 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5522 fold_convert (ctype, t1));
5523 else if (TREE_CODE (op1) != INTEGER_CST)
5524 return 0;
5526 /* If these are the same operation types, we can associate them
5527 assuming no overflow. */
5528 if (tcode == code
5529 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5530 fold_convert (ctype, c), 0))
5531 && ! TREE_OVERFLOW (t1))
5532 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5534 /* If these operations "cancel" each other, we have the main
5535 optimizations of this pass, which occur when either constant is a
5536 multiple of the other, in which case we replace this with either an
5537 operation or CODE or TCODE.
5539 If we have an unsigned type that is not a sizetype, we cannot do
5540 this since it will change the result if the original computation
5541 overflowed. */
5542 if ((! TYPE_UNSIGNED (ctype)
5543 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5544 && ! flag_wrapv
5545 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5546 || (tcode == MULT_EXPR
5547 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5548 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5550 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5551 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5552 fold_convert (ctype,
5553 const_binop (TRUNC_DIV_EXPR,
5554 op1, c, 0)));
5555 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5556 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5557 fold_convert (ctype,
5558 const_binop (TRUNC_DIV_EXPR,
5559 c, op1, 0)));
5561 break;
5563 default:
5564 break;
5567 return 0;
5570 /* Return a node which has the indicated constant VALUE (either 0 or
5571 1), and is of the indicated TYPE. */
5573 tree
5574 constant_boolean_node (int value, tree type)
5576 if (type == integer_type_node)
5577 return value ? integer_one_node : integer_zero_node;
5578 else if (type == boolean_type_node)
5579 return value ? boolean_true_node : boolean_false_node;
5580 else
5581 return build_int_cst (type, value);
5585 /* Return true if expr looks like an ARRAY_REF and set base and
5586 offset to the appropriate trees. If there is no offset,
5587 offset is set to NULL_TREE. Base will be canonicalized to
5588 something you can get the element type from using
5589 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5590 in bytes to the base. */
5592 static bool
5593 extract_array_ref (tree expr, tree *base, tree *offset)
5595 /* One canonical form is a PLUS_EXPR with the first
5596 argument being an ADDR_EXPR with a possible NOP_EXPR
5597 attached. */
5598 if (TREE_CODE (expr) == PLUS_EXPR)
5600 tree op0 = TREE_OPERAND (expr, 0);
5601 tree inner_base, dummy1;
5602 /* Strip NOP_EXPRs here because the C frontends and/or
5603 folders present us (int *)&x.a + 4B possibly. */
5604 STRIP_NOPS (op0);
5605 if (extract_array_ref (op0, &inner_base, &dummy1))
5607 *base = inner_base;
5608 if (dummy1 == NULL_TREE)
5609 *offset = TREE_OPERAND (expr, 1);
5610 else
5611 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5612 dummy1, TREE_OPERAND (expr, 1));
5613 return true;
5616 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5617 which we transform into an ADDR_EXPR with appropriate
5618 offset. For other arguments to the ADDR_EXPR we assume
5619 zero offset and as such do not care about the ADDR_EXPR
5620 type and strip possible nops from it. */
5621 else if (TREE_CODE (expr) == ADDR_EXPR)
5623 tree op0 = TREE_OPERAND (expr, 0);
5624 if (TREE_CODE (op0) == ARRAY_REF)
5626 tree idx = TREE_OPERAND (op0, 1);
5627 *base = TREE_OPERAND (op0, 0);
5628 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5629 array_ref_element_size (op0));
5631 else
5633 /* Handle array-to-pointer decay as &a. */
5634 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5635 *base = TREE_OPERAND (expr, 0);
5636 else
5637 *base = expr;
5638 *offset = NULL_TREE;
5640 return true;
5642 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5643 else if (SSA_VAR_P (expr)
5644 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5646 *base = expr;
5647 *offset = NULL_TREE;
5648 return true;
5651 return false;
5655 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5656 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5657 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5658 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5659 COND is the first argument to CODE; otherwise (as in the example
5660 given here), it is the second argument. TYPE is the type of the
5661 original expression. Return NULL_TREE if no simplification is
5662 possible. */
5664 static tree
5665 fold_binary_op_with_conditional_arg (enum tree_code code,
5666 tree type, tree op0, tree op1,
5667 tree cond, tree arg, int cond_first_p)
5669 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5670 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5671 tree test, true_value, false_value;
5672 tree lhs = NULL_TREE;
5673 tree rhs = NULL_TREE;
5675 /* This transformation is only worthwhile if we don't have to wrap
5676 arg in a SAVE_EXPR, and the operation can be simplified on at least
5677 one of the branches once its pushed inside the COND_EXPR. */
5678 if (!TREE_CONSTANT (arg))
5679 return NULL_TREE;
5681 if (TREE_CODE (cond) == COND_EXPR)
5683 test = TREE_OPERAND (cond, 0);
5684 true_value = TREE_OPERAND (cond, 1);
5685 false_value = TREE_OPERAND (cond, 2);
5686 /* If this operand throws an expression, then it does not make
5687 sense to try to perform a logical or arithmetic operation
5688 involving it. */
5689 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5690 lhs = true_value;
5691 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5692 rhs = false_value;
5694 else
5696 tree testtype = TREE_TYPE (cond);
5697 test = cond;
5698 true_value = constant_boolean_node (true, testtype);
5699 false_value = constant_boolean_node (false, testtype);
5702 arg = fold_convert (arg_type, arg);
5703 if (lhs == 0)
5705 true_value = fold_convert (cond_type, true_value);
5706 if (cond_first_p)
5707 lhs = fold_build2 (code, type, true_value, arg);
5708 else
5709 lhs = fold_build2 (code, type, arg, true_value);
5711 if (rhs == 0)
5713 false_value = fold_convert (cond_type, false_value);
5714 if (cond_first_p)
5715 rhs = fold_build2 (code, type, false_value, arg);
5716 else
5717 rhs = fold_build2 (code, type, arg, false_value);
5720 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5721 return fold_convert (type, test);
5725 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5727 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5728 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5729 ADDEND is the same as X.
5731 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5732 and finite. The problematic cases are when X is zero, and its mode
5733 has signed zeros. In the case of rounding towards -infinity,
5734 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5735 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5737 static bool
5738 fold_real_zero_addition_p (tree type, tree addend, int negate)
5740 if (!real_zerop (addend))
5741 return false;
5743 /* Don't allow the fold with -fsignaling-nans. */
5744 if (HONOR_SNANS (TYPE_MODE (type)))
5745 return false;
5747 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5748 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5749 return true;
5751 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5752 if (TREE_CODE (addend) == REAL_CST
5753 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5754 negate = !negate;
5756 /* The mode has signed zeros, and we have to honor their sign.
5757 In this situation, there is only one case we can return true for.
5758 X - 0 is the same as X unless rounding towards -infinity is
5759 supported. */
5760 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5763 /* Subroutine of fold() that checks comparisons of built-in math
5764 functions against real constants.
5766 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5767 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5768 is the type of the result and ARG0 and ARG1 are the operands of the
5769 comparison. ARG1 must be a TREE_REAL_CST.
5771 The function returns the constant folded tree if a simplification
5772 can be made, and NULL_TREE otherwise. */
5774 static tree
5775 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5776 tree type, tree arg0, tree arg1)
5778 REAL_VALUE_TYPE c;
5780 if (BUILTIN_SQRT_P (fcode))
5782 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5783 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5785 c = TREE_REAL_CST (arg1);
5786 if (REAL_VALUE_NEGATIVE (c))
5788 /* sqrt(x) < y is always false, if y is negative. */
5789 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5790 return omit_one_operand (type, integer_zero_node, arg);
5792 /* sqrt(x) > y is always true, if y is negative and we
5793 don't care about NaNs, i.e. negative values of x. */
5794 if (code == NE_EXPR || !HONOR_NANS (mode))
5795 return omit_one_operand (type, integer_one_node, arg);
5797 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5798 return fold_build2 (GE_EXPR, type, arg,
5799 build_real (TREE_TYPE (arg), dconst0));
5801 else if (code == GT_EXPR || code == GE_EXPR)
5803 REAL_VALUE_TYPE c2;
5805 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5806 real_convert (&c2, mode, &c2);
5808 if (REAL_VALUE_ISINF (c2))
5810 /* sqrt(x) > y is x == +Inf, when y is very large. */
5811 if (HONOR_INFINITIES (mode))
5812 return fold_build2 (EQ_EXPR, type, arg,
5813 build_real (TREE_TYPE (arg), c2));
5815 /* sqrt(x) > y is always false, when y is very large
5816 and we don't care about infinities. */
5817 return omit_one_operand (type, integer_zero_node, arg);
5820 /* sqrt(x) > c is the same as x > c*c. */
5821 return fold_build2 (code, type, arg,
5822 build_real (TREE_TYPE (arg), c2));
5824 else if (code == LT_EXPR || code == LE_EXPR)
5826 REAL_VALUE_TYPE c2;
5828 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5829 real_convert (&c2, mode, &c2);
5831 if (REAL_VALUE_ISINF (c2))
5833 /* sqrt(x) < y is always true, when y is a very large
5834 value and we don't care about NaNs or Infinities. */
5835 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5836 return omit_one_operand (type, integer_one_node, arg);
5838 /* sqrt(x) < y is x != +Inf when y is very large and we
5839 don't care about NaNs. */
5840 if (! HONOR_NANS (mode))
5841 return fold_build2 (NE_EXPR, type, arg,
5842 build_real (TREE_TYPE (arg), c2));
5844 /* sqrt(x) < y is x >= 0 when y is very large and we
5845 don't care about Infinities. */
5846 if (! HONOR_INFINITIES (mode))
5847 return fold_build2 (GE_EXPR, type, arg,
5848 build_real (TREE_TYPE (arg), dconst0));
5850 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5851 if (lang_hooks.decls.global_bindings_p () != 0
5852 || CONTAINS_PLACEHOLDER_P (arg))
5853 return NULL_TREE;
5855 arg = save_expr (arg);
5856 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5857 fold_build2 (GE_EXPR, type, arg,
5858 build_real (TREE_TYPE (arg),
5859 dconst0)),
5860 fold_build2 (NE_EXPR, type, arg,
5861 build_real (TREE_TYPE (arg),
5862 c2)));
5865 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5866 if (! HONOR_NANS (mode))
5867 return fold_build2 (code, type, arg,
5868 build_real (TREE_TYPE (arg), c2));
5870 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5871 if (lang_hooks.decls.global_bindings_p () == 0
5872 && ! CONTAINS_PLACEHOLDER_P (arg))
5874 arg = save_expr (arg);
5875 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5876 fold_build2 (GE_EXPR, type, arg,
5877 build_real (TREE_TYPE (arg),
5878 dconst0)),
5879 fold_build2 (code, type, arg,
5880 build_real (TREE_TYPE (arg),
5881 c2)));
5886 return NULL_TREE;
5889 /* Subroutine of fold() that optimizes comparisons against Infinities,
5890 either +Inf or -Inf.
5892 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5893 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5894 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5896 The function returns the constant folded tree if a simplification
5897 can be made, and NULL_TREE otherwise. */
5899 static tree
5900 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5902 enum machine_mode mode;
5903 REAL_VALUE_TYPE max;
5904 tree temp;
5905 bool neg;
5907 mode = TYPE_MODE (TREE_TYPE (arg0));
5909 /* For negative infinity swap the sense of the comparison. */
5910 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5911 if (neg)
5912 code = swap_tree_comparison (code);
5914 switch (code)
5916 case GT_EXPR:
5917 /* x > +Inf is always false, if with ignore sNANs. */
5918 if (HONOR_SNANS (mode))
5919 return NULL_TREE;
5920 return omit_one_operand (type, integer_zero_node, arg0);
5922 case LE_EXPR:
5923 /* x <= +Inf is always true, if we don't case about NaNs. */
5924 if (! HONOR_NANS (mode))
5925 return omit_one_operand (type, integer_one_node, arg0);
5927 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5928 if (lang_hooks.decls.global_bindings_p () == 0
5929 && ! CONTAINS_PLACEHOLDER_P (arg0))
5931 arg0 = save_expr (arg0);
5932 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5934 break;
5936 case EQ_EXPR:
5937 case GE_EXPR:
5938 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5939 real_maxval (&max, neg, mode);
5940 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5941 arg0, build_real (TREE_TYPE (arg0), max));
5943 case LT_EXPR:
5944 /* x < +Inf is always equal to x <= DBL_MAX. */
5945 real_maxval (&max, neg, mode);
5946 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5947 arg0, build_real (TREE_TYPE (arg0), max));
5949 case NE_EXPR:
5950 /* x != +Inf is always equal to !(x > DBL_MAX). */
5951 real_maxval (&max, neg, mode);
5952 if (! HONOR_NANS (mode))
5953 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5954 arg0, build_real (TREE_TYPE (arg0), max));
5956 /* The transformation below creates non-gimple code and thus is
5957 not appropriate if we are in gimple form. */
5958 if (in_gimple_form)
5959 return NULL_TREE;
5961 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5962 arg0, build_real (TREE_TYPE (arg0), max));
5963 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5965 default:
5966 break;
5969 return NULL_TREE;
5972 /* Subroutine of fold() that optimizes comparisons of a division by
5973 a nonzero integer constant against an integer constant, i.e.
5974 X/C1 op C2.
5976 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5977 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5978 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5980 The function returns the constant folded tree if a simplification
5981 can be made, and NULL_TREE otherwise. */
5983 static tree
5984 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5986 tree prod, tmp, hi, lo;
5987 tree arg00 = TREE_OPERAND (arg0, 0);
5988 tree arg01 = TREE_OPERAND (arg0, 1);
5989 unsigned HOST_WIDE_INT lpart;
5990 HOST_WIDE_INT hpart;
5991 int overflow;
5993 /* We have to do this the hard way to detect unsigned overflow.
5994 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5995 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5996 TREE_INT_CST_HIGH (arg01),
5997 TREE_INT_CST_LOW (arg1),
5998 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5999 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6000 prod = force_fit_type (prod, -1, overflow, false);
6002 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
6004 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6005 lo = prod;
6007 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6008 overflow = add_double (TREE_INT_CST_LOW (prod),
6009 TREE_INT_CST_HIGH (prod),
6010 TREE_INT_CST_LOW (tmp),
6011 TREE_INT_CST_HIGH (tmp),
6012 &lpart, &hpart);
6013 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6014 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6015 TREE_CONSTANT_OVERFLOW (prod));
6017 else if (tree_int_cst_sgn (arg01) >= 0)
6019 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6020 switch (tree_int_cst_sgn (arg1))
6022 case -1:
6023 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6024 hi = prod;
6025 break;
6027 case 0:
6028 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6029 hi = tmp;
6030 break;
6032 case 1:
6033 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6034 lo = prod;
6035 break;
6037 default:
6038 gcc_unreachable ();
6041 else
6043 /* A negative divisor reverses the relational operators. */
6044 code = swap_tree_comparison (code);
6046 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6047 switch (tree_int_cst_sgn (arg1))
6049 case -1:
6050 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6051 lo = prod;
6052 break;
6054 case 0:
6055 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6056 lo = tmp;
6057 break;
6059 case 1:
6060 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6061 hi = prod;
6062 break;
6064 default:
6065 gcc_unreachable ();
6069 switch (code)
6071 case EQ_EXPR:
6072 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6073 return omit_one_operand (type, integer_zero_node, arg00);
6074 if (TREE_OVERFLOW (hi))
6075 return fold_build2 (GE_EXPR, type, arg00, lo);
6076 if (TREE_OVERFLOW (lo))
6077 return fold_build2 (LE_EXPR, type, arg00, hi);
6078 return build_range_check (type, arg00, 1, lo, hi);
6080 case NE_EXPR:
6081 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6082 return omit_one_operand (type, integer_one_node, arg00);
6083 if (TREE_OVERFLOW (hi))
6084 return fold_build2 (LT_EXPR, type, arg00, lo);
6085 if (TREE_OVERFLOW (lo))
6086 return fold_build2 (GT_EXPR, type, arg00, hi);
6087 return build_range_check (type, arg00, 0, lo, hi);
6089 case LT_EXPR:
6090 if (TREE_OVERFLOW (lo))
6091 return omit_one_operand (type, integer_zero_node, arg00);
6092 return fold_build2 (LT_EXPR, type, arg00, lo);
6094 case LE_EXPR:
6095 if (TREE_OVERFLOW (hi))
6096 return omit_one_operand (type, integer_one_node, arg00);
6097 return fold_build2 (LE_EXPR, type, arg00, hi);
6099 case GT_EXPR:
6100 if (TREE_OVERFLOW (hi))
6101 return omit_one_operand (type, integer_zero_node, arg00);
6102 return fold_build2 (GT_EXPR, type, arg00, hi);
6104 case GE_EXPR:
6105 if (TREE_OVERFLOW (lo))
6106 return omit_one_operand (type, integer_one_node, arg00);
6107 return fold_build2 (GE_EXPR, type, arg00, lo);
6109 default:
6110 break;
6113 return NULL_TREE;
6117 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6118 equality/inequality test, then return a simplified form of the test
6119 using a sign testing. Otherwise return NULL. TYPE is the desired
6120 result type. */
6122 static tree
6123 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6124 tree result_type)
6126 /* If this is testing a single bit, we can optimize the test. */
6127 if ((code == NE_EXPR || code == EQ_EXPR)
6128 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6129 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6131 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6132 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6133 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6135 if (arg00 != NULL_TREE
6136 /* This is only a win if casting to a signed type is cheap,
6137 i.e. when arg00's type is not a partial mode. */
6138 && TYPE_PRECISION (TREE_TYPE (arg00))
6139 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6141 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6142 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6143 result_type, fold_convert (stype, arg00),
6144 fold_convert (stype, integer_zero_node));
6148 return NULL_TREE;
6151 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6152 equality/inequality test, then return a simplified form of
6153 the test using shifts and logical operations. Otherwise return
6154 NULL. TYPE is the desired result type. */
6156 tree
6157 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6158 tree result_type)
6160 /* If this is testing a single bit, we can optimize the test. */
6161 if ((code == NE_EXPR || code == EQ_EXPR)
6162 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6163 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6165 tree inner = TREE_OPERAND (arg0, 0);
6166 tree type = TREE_TYPE (arg0);
6167 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6168 enum machine_mode operand_mode = TYPE_MODE (type);
6169 int ops_unsigned;
6170 tree signed_type, unsigned_type, intermediate_type;
6171 tree tem;
6173 /* First, see if we can fold the single bit test into a sign-bit
6174 test. */
6175 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6176 result_type);
6177 if (tem)
6178 return tem;
6180 /* Otherwise we have (A & C) != 0 where C is a single bit,
6181 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6182 Similarly for (A & C) == 0. */
6184 /* If INNER is a right shift of a constant and it plus BITNUM does
6185 not overflow, adjust BITNUM and INNER. */
6186 if (TREE_CODE (inner) == RSHIFT_EXPR
6187 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6188 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6189 && bitnum < TYPE_PRECISION (type)
6190 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6191 bitnum - TYPE_PRECISION (type)))
6193 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6194 inner = TREE_OPERAND (inner, 0);
6197 /* If we are going to be able to omit the AND below, we must do our
6198 operations as unsigned. If we must use the AND, we have a choice.
6199 Normally unsigned is faster, but for some machines signed is. */
6200 #ifdef LOAD_EXTEND_OP
6201 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6202 && !flag_syntax_only) ? 0 : 1;
6203 #else
6204 ops_unsigned = 1;
6205 #endif
6207 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6208 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6209 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6210 inner = fold_convert (intermediate_type, inner);
6212 if (bitnum != 0)
6213 inner = build2 (RSHIFT_EXPR, intermediate_type,
6214 inner, size_int (bitnum));
6216 if (code == EQ_EXPR)
6217 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6218 inner, integer_one_node);
6220 /* Put the AND last so it can combine with more things. */
6221 inner = build2 (BIT_AND_EXPR, intermediate_type,
6222 inner, integer_one_node);
6224 /* Make sure to return the proper type. */
6225 inner = fold_convert (result_type, inner);
6227 return inner;
6229 return NULL_TREE;
6232 /* Check whether we are allowed to reorder operands arg0 and arg1,
6233 such that the evaluation of arg1 occurs before arg0. */
6235 static bool
6236 reorder_operands_p (tree arg0, tree arg1)
6238 if (! flag_evaluation_order)
6239 return true;
6240 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6241 return true;
6242 return ! TREE_SIDE_EFFECTS (arg0)
6243 && ! TREE_SIDE_EFFECTS (arg1);
6246 /* Test whether it is preferable two swap two operands, ARG0 and
6247 ARG1, for example because ARG0 is an integer constant and ARG1
6248 isn't. If REORDER is true, only recommend swapping if we can
6249 evaluate the operands in reverse order. */
6251 bool
6252 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6254 STRIP_SIGN_NOPS (arg0);
6255 STRIP_SIGN_NOPS (arg1);
6257 if (TREE_CODE (arg1) == INTEGER_CST)
6258 return 0;
6259 if (TREE_CODE (arg0) == INTEGER_CST)
6260 return 1;
6262 if (TREE_CODE (arg1) == REAL_CST)
6263 return 0;
6264 if (TREE_CODE (arg0) == REAL_CST)
6265 return 1;
6267 if (TREE_CODE (arg1) == COMPLEX_CST)
6268 return 0;
6269 if (TREE_CODE (arg0) == COMPLEX_CST)
6270 return 1;
6272 if (TREE_CONSTANT (arg1))
6273 return 0;
6274 if (TREE_CONSTANT (arg0))
6275 return 1;
6277 if (optimize_size)
6278 return 0;
6280 if (reorder && flag_evaluation_order
6281 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6282 return 0;
6284 if (DECL_P (arg1))
6285 return 0;
6286 if (DECL_P (arg0))
6287 return 1;
6289 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6290 for commutative and comparison operators. Ensuring a canonical
6291 form allows the optimizers to find additional redundancies without
6292 having to explicitly check for both orderings. */
6293 if (TREE_CODE (arg0) == SSA_NAME
6294 && TREE_CODE (arg1) == SSA_NAME
6295 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6296 return 1;
6298 return 0;
6301 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6302 ARG0 is extended to a wider type. */
6304 static tree
6305 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6307 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6308 tree arg1_unw;
6309 tree shorter_type, outer_type;
6310 tree min, max;
6311 bool above, below;
6313 if (arg0_unw == arg0)
6314 return NULL_TREE;
6315 shorter_type = TREE_TYPE (arg0_unw);
6317 #ifdef HAVE_canonicalize_funcptr_for_compare
6318 /* Disable this optimization if we're casting a function pointer
6319 type on targets that require function pointer canonicalization. */
6320 if (HAVE_canonicalize_funcptr_for_compare
6321 && TREE_CODE (shorter_type) == POINTER_TYPE
6322 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6323 return NULL_TREE;
6324 #endif
6326 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6327 return NULL_TREE;
6329 arg1_unw = get_unwidened (arg1, shorter_type);
6331 /* If possible, express the comparison in the shorter mode. */
6332 if ((code == EQ_EXPR || code == NE_EXPR
6333 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6334 && (TREE_TYPE (arg1_unw) == shorter_type
6335 || (TREE_CODE (arg1_unw) == INTEGER_CST
6336 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6337 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6338 && int_fits_type_p (arg1_unw, shorter_type))))
6339 return fold_build2 (code, type, arg0_unw,
6340 fold_convert (shorter_type, arg1_unw));
6342 if (TREE_CODE (arg1_unw) != INTEGER_CST
6343 || TREE_CODE (shorter_type) != INTEGER_TYPE
6344 || !int_fits_type_p (arg1_unw, shorter_type))
6345 return NULL_TREE;
6347 /* If we are comparing with the integer that does not fit into the range
6348 of the shorter type, the result is known. */
6349 outer_type = TREE_TYPE (arg1_unw);
6350 min = lower_bound_in_type (outer_type, shorter_type);
6351 max = upper_bound_in_type (outer_type, shorter_type);
6353 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6354 max, arg1_unw));
6355 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6356 arg1_unw, min));
6358 switch (code)
6360 case EQ_EXPR:
6361 if (above || below)
6362 return omit_one_operand (type, integer_zero_node, arg0);
6363 break;
6365 case NE_EXPR:
6366 if (above || below)
6367 return omit_one_operand (type, integer_one_node, arg0);
6368 break;
6370 case LT_EXPR:
6371 case LE_EXPR:
6372 if (above)
6373 return omit_one_operand (type, integer_one_node, arg0);
6374 else if (below)
6375 return omit_one_operand (type, integer_zero_node, arg0);
6377 case GT_EXPR:
6378 case GE_EXPR:
6379 if (above)
6380 return omit_one_operand (type, integer_zero_node, arg0);
6381 else if (below)
6382 return omit_one_operand (type, integer_one_node, arg0);
6384 default:
6385 break;
6388 return NULL_TREE;
6391 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6392 ARG0 just the signedness is changed. */
6394 static tree
6395 fold_sign_changed_comparison (enum tree_code code, tree type,
6396 tree arg0, tree arg1)
6398 tree arg0_inner, tmp;
6399 tree inner_type, outer_type;
6401 if (TREE_CODE (arg0) != NOP_EXPR
6402 && TREE_CODE (arg0) != CONVERT_EXPR)
6403 return NULL_TREE;
6405 outer_type = TREE_TYPE (arg0);
6406 arg0_inner = TREE_OPERAND (arg0, 0);
6407 inner_type = TREE_TYPE (arg0_inner);
6409 #ifdef HAVE_canonicalize_funcptr_for_compare
6410 /* Disable this optimization if we're casting a function pointer
6411 type on targets that require function pointer canonicalization. */
6412 if (HAVE_canonicalize_funcptr_for_compare
6413 && TREE_CODE (inner_type) == POINTER_TYPE
6414 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6415 return NULL_TREE;
6416 #endif
6418 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6419 return NULL_TREE;
6421 if (TREE_CODE (arg1) != INTEGER_CST
6422 && !((TREE_CODE (arg1) == NOP_EXPR
6423 || TREE_CODE (arg1) == CONVERT_EXPR)
6424 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6425 return NULL_TREE;
6427 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6428 && code != NE_EXPR
6429 && code != EQ_EXPR)
6430 return NULL_TREE;
6432 if (TREE_CODE (arg1) == INTEGER_CST)
6434 tmp = build_int_cst_wide (inner_type,
6435 TREE_INT_CST_LOW (arg1),
6436 TREE_INT_CST_HIGH (arg1));
6437 arg1 = force_fit_type (tmp, 0,
6438 TREE_OVERFLOW (arg1),
6439 TREE_CONSTANT_OVERFLOW (arg1));
6441 else
6442 arg1 = fold_convert (inner_type, arg1);
6444 return fold_build2 (code, type, arg0_inner, arg1);
6447 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6448 step of the array. Reconstructs s and delta in the case of s * delta
6449 being an integer constant (and thus already folded).
6450 ADDR is the address. MULT is the multiplicative expression.
6451 If the function succeeds, the new address expression is returned. Otherwise
6452 NULL_TREE is returned. */
6454 static tree
6455 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6457 tree s, delta, step;
6458 tree ref = TREE_OPERAND (addr, 0), pref;
6459 tree ret, pos;
6460 tree itype;
6462 /* Canonicalize op1 into a possibly non-constant delta
6463 and an INTEGER_CST s. */
6464 if (TREE_CODE (op1) == MULT_EXPR)
6466 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6468 STRIP_NOPS (arg0);
6469 STRIP_NOPS (arg1);
6471 if (TREE_CODE (arg0) == INTEGER_CST)
6473 s = arg0;
6474 delta = arg1;
6476 else if (TREE_CODE (arg1) == INTEGER_CST)
6478 s = arg1;
6479 delta = arg0;
6481 else
6482 return NULL_TREE;
6484 else if (TREE_CODE (op1) == INTEGER_CST)
6486 delta = op1;
6487 s = NULL_TREE;
6489 else
6491 /* Simulate we are delta * 1. */
6492 delta = op1;
6493 s = integer_one_node;
6496 for (;; ref = TREE_OPERAND (ref, 0))
6498 if (TREE_CODE (ref) == ARRAY_REF)
6500 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6501 if (! itype)
6502 continue;
6504 step = array_ref_element_size (ref);
6505 if (TREE_CODE (step) != INTEGER_CST)
6506 continue;
6508 if (s)
6510 if (! tree_int_cst_equal (step, s))
6511 continue;
6513 else
6515 /* Try if delta is a multiple of step. */
6516 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6517 if (! tmp)
6518 continue;
6519 delta = tmp;
6522 break;
6525 if (!handled_component_p (ref))
6526 return NULL_TREE;
6529 /* We found the suitable array reference. So copy everything up to it,
6530 and replace the index. */
6532 pref = TREE_OPERAND (addr, 0);
6533 ret = copy_node (pref);
6534 pos = ret;
6536 while (pref != ref)
6538 pref = TREE_OPERAND (pref, 0);
6539 TREE_OPERAND (pos, 0) = copy_node (pref);
6540 pos = TREE_OPERAND (pos, 0);
6543 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6544 fold_convert (itype,
6545 TREE_OPERAND (pos, 1)),
6546 fold_convert (itype, delta));
6548 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6552 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6553 means A >= Y && A != MAX, but in this case we know that
6554 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6556 static tree
6557 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6559 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6561 if (TREE_CODE (bound) == LT_EXPR)
6562 a = TREE_OPERAND (bound, 0);
6563 else if (TREE_CODE (bound) == GT_EXPR)
6564 a = TREE_OPERAND (bound, 1);
6565 else
6566 return NULL_TREE;
6568 typea = TREE_TYPE (a);
6569 if (!INTEGRAL_TYPE_P (typea)
6570 && !POINTER_TYPE_P (typea))
6571 return NULL_TREE;
6573 if (TREE_CODE (ineq) == LT_EXPR)
6575 a1 = TREE_OPERAND (ineq, 1);
6576 y = TREE_OPERAND (ineq, 0);
6578 else if (TREE_CODE (ineq) == GT_EXPR)
6580 a1 = TREE_OPERAND (ineq, 0);
6581 y = TREE_OPERAND (ineq, 1);
6583 else
6584 return NULL_TREE;
6586 if (TREE_TYPE (a1) != typea)
6587 return NULL_TREE;
6589 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6590 if (!integer_onep (diff))
6591 return NULL_TREE;
6593 return fold_build2 (GE_EXPR, type, a, y);
6596 /* Fold a sum or difference of at least one multiplication.
6597 Returns the folded tree or NULL if no simplification could be made. */
6599 static tree
6600 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6602 tree arg00, arg01, arg10, arg11;
6603 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6605 /* (A * C) +- (B * C) -> (A+-B) * C.
6606 (A * C) +- A -> A * (C+-1).
6607 We are most concerned about the case where C is a constant,
6608 but other combinations show up during loop reduction. Since
6609 it is not difficult, try all four possibilities. */
6611 if (TREE_CODE (arg0) == MULT_EXPR)
6613 arg00 = TREE_OPERAND (arg0, 0);
6614 arg01 = TREE_OPERAND (arg0, 1);
6616 else
6618 arg00 = arg0;
6619 if (!FLOAT_TYPE_P (type))
6620 arg01 = build_int_cst (type, 1);
6621 else
6622 arg01 = build_real (type, dconst1);
6624 if (TREE_CODE (arg1) == MULT_EXPR)
6626 arg10 = TREE_OPERAND (arg1, 0);
6627 arg11 = TREE_OPERAND (arg1, 1);
6629 else
6631 arg10 = arg1;
6632 if (!FLOAT_TYPE_P (type))
6633 arg11 = build_int_cst (type, 1);
6634 else
6635 arg11 = build_real (type, dconst1);
6637 same = NULL_TREE;
6639 if (operand_equal_p (arg01, arg11, 0))
6640 same = arg01, alt0 = arg00, alt1 = arg10;
6641 else if (operand_equal_p (arg00, arg10, 0))
6642 same = arg00, alt0 = arg01, alt1 = arg11;
6643 else if (operand_equal_p (arg00, arg11, 0))
6644 same = arg00, alt0 = arg01, alt1 = arg10;
6645 else if (operand_equal_p (arg01, arg10, 0))
6646 same = arg01, alt0 = arg00, alt1 = arg11;
6648 /* No identical multiplicands; see if we can find a common
6649 power-of-two factor in non-power-of-two multiplies. This
6650 can help in multi-dimensional array access. */
6651 else if (host_integerp (arg01, 0)
6652 && host_integerp (arg11, 0))
6654 HOST_WIDE_INT int01, int11, tmp;
6655 bool swap = false;
6656 tree maybe_same;
6657 int01 = TREE_INT_CST_LOW (arg01);
6658 int11 = TREE_INT_CST_LOW (arg11);
6660 /* Move min of absolute values to int11. */
6661 if ((int01 >= 0 ? int01 : -int01)
6662 < (int11 >= 0 ? int11 : -int11))
6664 tmp = int01, int01 = int11, int11 = tmp;
6665 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6666 maybe_same = arg01;
6667 swap = true;
6669 else
6670 maybe_same = arg11;
6672 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6674 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6675 build_int_cst (TREE_TYPE (arg00),
6676 int01 / int11));
6677 alt1 = arg10;
6678 same = maybe_same;
6679 if (swap)
6680 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6684 if (same)
6685 return fold_build2 (MULT_EXPR, type,
6686 fold_build2 (code, type,
6687 fold_convert (type, alt0),
6688 fold_convert (type, alt1)),
6689 fold_convert (type, same));
6691 return NULL_TREE;
6694 /* Fold a unary expression of code CODE and type TYPE with operand
6695 OP0. Return the folded expression if folding is successful.
6696 Otherwise, return NULL_TREE. */
6698 tree
6699 fold_unary (enum tree_code code, tree type, tree op0)
6701 tree tem;
6702 tree arg0;
6703 enum tree_code_class kind = TREE_CODE_CLASS (code);
6705 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6706 && TREE_CODE_LENGTH (code) == 1);
6708 arg0 = op0;
6709 if (arg0)
6711 if (code == NOP_EXPR || code == CONVERT_EXPR
6712 || code == FLOAT_EXPR || code == ABS_EXPR)
6714 /* Don't use STRIP_NOPS, because signedness of argument type
6715 matters. */
6716 STRIP_SIGN_NOPS (arg0);
6718 else
6720 /* Strip any conversions that don't change the mode. This
6721 is safe for every expression, except for a comparison
6722 expression because its signedness is derived from its
6723 operands.
6725 Note that this is done as an internal manipulation within
6726 the constant folder, in order to find the simplest
6727 representation of the arguments so that their form can be
6728 studied. In any cases, the appropriate type conversions
6729 should be put back in the tree that will get out of the
6730 constant folder. */
6731 STRIP_NOPS (arg0);
6735 if (TREE_CODE_CLASS (code) == tcc_unary)
6737 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6738 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6739 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6740 else if (TREE_CODE (arg0) == COND_EXPR)
6742 tree arg01 = TREE_OPERAND (arg0, 1);
6743 tree arg02 = TREE_OPERAND (arg0, 2);
6744 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6745 arg01 = fold_build1 (code, type, arg01);
6746 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6747 arg02 = fold_build1 (code, type, arg02);
6748 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6749 arg01, arg02);
6751 /* If this was a conversion, and all we did was to move into
6752 inside the COND_EXPR, bring it back out. But leave it if
6753 it is a conversion from integer to integer and the
6754 result precision is no wider than a word since such a
6755 conversion is cheap and may be optimized away by combine,
6756 while it couldn't if it were outside the COND_EXPR. Then return
6757 so we don't get into an infinite recursion loop taking the
6758 conversion out and then back in. */
6760 if ((code == NOP_EXPR || code == CONVERT_EXPR
6761 || code == NON_LVALUE_EXPR)
6762 && TREE_CODE (tem) == COND_EXPR
6763 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6764 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6765 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6766 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6767 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6768 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6769 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6770 && (INTEGRAL_TYPE_P
6771 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6772 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6773 || flag_syntax_only))
6774 tem = build1 (code, type,
6775 build3 (COND_EXPR,
6776 TREE_TYPE (TREE_OPERAND
6777 (TREE_OPERAND (tem, 1), 0)),
6778 TREE_OPERAND (tem, 0),
6779 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6780 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6781 return tem;
6783 else if (COMPARISON_CLASS_P (arg0))
6785 if (TREE_CODE (type) == BOOLEAN_TYPE)
6787 arg0 = copy_node (arg0);
6788 TREE_TYPE (arg0) = type;
6789 return arg0;
6791 else if (TREE_CODE (type) != INTEGER_TYPE)
6792 return fold_build3 (COND_EXPR, type, arg0,
6793 fold_build1 (code, type,
6794 integer_one_node),
6795 fold_build1 (code, type,
6796 integer_zero_node));
6800 switch (code)
6802 case NOP_EXPR:
6803 case FLOAT_EXPR:
6804 case CONVERT_EXPR:
6805 case FIX_TRUNC_EXPR:
6806 case FIX_CEIL_EXPR:
6807 case FIX_FLOOR_EXPR:
6808 case FIX_ROUND_EXPR:
6809 if (TREE_TYPE (op0) == type)
6810 return op0;
6812 /* If we have (type) (a CMP b) and type is an integral type, return
6813 new expression involving the new type. */
6814 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
6815 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
6816 TREE_OPERAND (op0, 1));
6818 /* Handle cases of two conversions in a row. */
6819 if (TREE_CODE (op0) == NOP_EXPR
6820 || TREE_CODE (op0) == CONVERT_EXPR)
6822 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6823 tree inter_type = TREE_TYPE (op0);
6824 int inside_int = INTEGRAL_TYPE_P (inside_type);
6825 int inside_ptr = POINTER_TYPE_P (inside_type);
6826 int inside_float = FLOAT_TYPE_P (inside_type);
6827 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6828 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6829 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6830 int inter_int = INTEGRAL_TYPE_P (inter_type);
6831 int inter_ptr = POINTER_TYPE_P (inter_type);
6832 int inter_float = FLOAT_TYPE_P (inter_type);
6833 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6834 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6835 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6836 int final_int = INTEGRAL_TYPE_P (type);
6837 int final_ptr = POINTER_TYPE_P (type);
6838 int final_float = FLOAT_TYPE_P (type);
6839 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6840 unsigned int final_prec = TYPE_PRECISION (type);
6841 int final_unsignedp = TYPE_UNSIGNED (type);
6843 /* In addition to the cases of two conversions in a row
6844 handled below, if we are converting something to its own
6845 type via an object of identical or wider precision, neither
6846 conversion is needed. */
6847 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6848 && ((inter_int && final_int) || (inter_float && final_float))
6849 && inter_prec >= final_prec)
6850 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6852 /* Likewise, if the intermediate and final types are either both
6853 float or both integer, we don't need the middle conversion if
6854 it is wider than the final type and doesn't change the signedness
6855 (for integers). Avoid this if the final type is a pointer
6856 since then we sometimes need the inner conversion. Likewise if
6857 the outer has a precision not equal to the size of its mode. */
6858 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6859 || (inter_float && inside_float)
6860 || (inter_vec && inside_vec))
6861 && inter_prec >= inside_prec
6862 && (inter_float || inter_vec
6863 || inter_unsignedp == inside_unsignedp)
6864 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6865 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6866 && ! final_ptr
6867 && (! final_vec || inter_prec == inside_prec))
6868 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6870 /* If we have a sign-extension of a zero-extended value, we can
6871 replace that by a single zero-extension. */
6872 if (inside_int && inter_int && final_int
6873 && inside_prec < inter_prec && inter_prec < final_prec
6874 && inside_unsignedp && !inter_unsignedp)
6875 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6877 /* Two conversions in a row are not needed unless:
6878 - some conversion is floating-point (overstrict for now), or
6879 - some conversion is a vector (overstrict for now), or
6880 - the intermediate type is narrower than both initial and
6881 final, or
6882 - the intermediate type and innermost type differ in signedness,
6883 and the outermost type is wider than the intermediate, or
6884 - the initial type is a pointer type and the precisions of the
6885 intermediate and final types differ, or
6886 - the final type is a pointer type and the precisions of the
6887 initial and intermediate types differ. */
6888 if (! inside_float && ! inter_float && ! final_float
6889 && ! inside_vec && ! inter_vec && ! final_vec
6890 && (inter_prec > inside_prec || inter_prec > final_prec)
6891 && ! (inside_int && inter_int
6892 && inter_unsignedp != inside_unsignedp
6893 && inter_prec < final_prec)
6894 && ((inter_unsignedp && inter_prec > inside_prec)
6895 == (final_unsignedp && final_prec > inter_prec))
6896 && ! (inside_ptr && inter_prec != final_prec)
6897 && ! (final_ptr && inside_prec != inter_prec)
6898 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6899 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6900 && ! final_ptr)
6901 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6904 /* Handle (T *)&A.B.C for A being of type T and B and C
6905 living at offset zero. This occurs frequently in
6906 C++ upcasting and then accessing the base. */
6907 if (TREE_CODE (op0) == ADDR_EXPR
6908 && POINTER_TYPE_P (type)
6909 && handled_component_p (TREE_OPERAND (op0, 0)))
6911 HOST_WIDE_INT bitsize, bitpos;
6912 tree offset;
6913 enum machine_mode mode;
6914 int unsignedp, volatilep;
6915 tree base = TREE_OPERAND (op0, 0);
6916 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6917 &mode, &unsignedp, &volatilep, false);
6918 /* If the reference was to a (constant) zero offset, we can use
6919 the address of the base if it has the same base type
6920 as the result type. */
6921 if (! offset && bitpos == 0
6922 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
6923 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
6924 return fold_convert (type, build_fold_addr_expr (base));
6927 if (TREE_CODE (op0) == MODIFY_EXPR
6928 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6929 /* Detect assigning a bitfield. */
6930 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6931 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6933 /* Don't leave an assignment inside a conversion
6934 unless assigning a bitfield. */
6935 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6936 /* First do the assignment, then return converted constant. */
6937 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6938 TREE_NO_WARNING (tem) = 1;
6939 TREE_USED (tem) = 1;
6940 return tem;
6943 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6944 constants (if x has signed type, the sign bit cannot be set
6945 in c). This folds extension into the BIT_AND_EXPR. */
6946 if (INTEGRAL_TYPE_P (type)
6947 && TREE_CODE (type) != BOOLEAN_TYPE
6948 && TREE_CODE (op0) == BIT_AND_EXPR
6949 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6951 tree and = op0;
6952 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6953 int change = 0;
6955 if (TYPE_UNSIGNED (TREE_TYPE (and))
6956 || (TYPE_PRECISION (type)
6957 <= TYPE_PRECISION (TREE_TYPE (and))))
6958 change = 1;
6959 else if (TYPE_PRECISION (TREE_TYPE (and1))
6960 <= HOST_BITS_PER_WIDE_INT
6961 && host_integerp (and1, 1))
6963 unsigned HOST_WIDE_INT cst;
6965 cst = tree_low_cst (and1, 1);
6966 cst &= (HOST_WIDE_INT) -1
6967 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6968 change = (cst == 0);
6969 #ifdef LOAD_EXTEND_OP
6970 if (change
6971 && !flag_syntax_only
6972 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6973 == ZERO_EXTEND))
6975 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6976 and0 = fold_convert (uns, and0);
6977 and1 = fold_convert (uns, and1);
6979 #endif
6981 if (change)
6983 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6984 TREE_INT_CST_HIGH (and1));
6985 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6986 TREE_CONSTANT_OVERFLOW (and1));
6987 return fold_build2 (BIT_AND_EXPR, type,
6988 fold_convert (type, and0), tem);
6992 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6993 T2 being pointers to types of the same size. */
6994 if (POINTER_TYPE_P (type)
6995 && BINARY_CLASS_P (arg0)
6996 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6997 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6999 tree arg00 = TREE_OPERAND (arg0, 0);
7000 tree t0 = type;
7001 tree t1 = TREE_TYPE (arg00);
7002 tree tt0 = TREE_TYPE (t0);
7003 tree tt1 = TREE_TYPE (t1);
7004 tree s0 = TYPE_SIZE (tt0);
7005 tree s1 = TYPE_SIZE (tt1);
7007 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7008 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7009 TREE_OPERAND (arg0, 1));
7012 tem = fold_convert_const (code, type, arg0);
7013 return tem ? tem : NULL_TREE;
7015 case VIEW_CONVERT_EXPR:
7016 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7017 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7018 return NULL_TREE;
7020 case NEGATE_EXPR:
7021 if (negate_expr_p (arg0))
7022 return fold_convert (type, negate_expr (arg0));
7023 return NULL_TREE;
7025 case ABS_EXPR:
7026 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7027 return fold_abs_const (arg0, type);
7028 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7029 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7030 /* Convert fabs((double)float) into (double)fabsf(float). */
7031 else if (TREE_CODE (arg0) == NOP_EXPR
7032 && TREE_CODE (type) == REAL_TYPE)
7034 tree targ0 = strip_float_extensions (arg0);
7035 if (targ0 != arg0)
7036 return fold_convert (type, fold_build1 (ABS_EXPR,
7037 TREE_TYPE (targ0),
7038 targ0));
7040 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7041 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7042 return arg0;
7044 /* Strip sign ops from argument. */
7045 if (TREE_CODE (type) == REAL_TYPE)
7047 tem = fold_strip_sign_ops (arg0);
7048 if (tem)
7049 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7051 return NULL_TREE;
7053 case CONJ_EXPR:
7054 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7055 return fold_convert (type, arg0);
7056 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7057 return build2 (COMPLEX_EXPR, type,
7058 TREE_OPERAND (arg0, 0),
7059 negate_expr (TREE_OPERAND (arg0, 1)));
7060 else if (TREE_CODE (arg0) == COMPLEX_CST)
7061 return build_complex (type, TREE_REALPART (arg0),
7062 negate_expr (TREE_IMAGPART (arg0)));
7063 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7064 return fold_build2 (TREE_CODE (arg0), type,
7065 fold_build1 (CONJ_EXPR, type,
7066 TREE_OPERAND (arg0, 0)),
7067 fold_build1 (CONJ_EXPR, type,
7068 TREE_OPERAND (arg0, 1)));
7069 else if (TREE_CODE (arg0) == CONJ_EXPR)
7070 return TREE_OPERAND (arg0, 0);
7071 return NULL_TREE;
7073 case BIT_NOT_EXPR:
7074 if (TREE_CODE (arg0) == INTEGER_CST)
7075 return fold_not_const (arg0, type);
7076 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7077 return TREE_OPERAND (arg0, 0);
7078 /* Convert ~ (-A) to A - 1. */
7079 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7080 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7081 build_int_cst (type, 1));
7082 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7083 else if (INTEGRAL_TYPE_P (type)
7084 && ((TREE_CODE (arg0) == MINUS_EXPR
7085 && integer_onep (TREE_OPERAND (arg0, 1)))
7086 || (TREE_CODE (arg0) == PLUS_EXPR
7087 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7088 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7089 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7090 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7091 && (tem = fold_unary (BIT_NOT_EXPR, type,
7092 fold_convert (type,
7093 TREE_OPERAND (arg0, 0)))))
7094 return fold_build2 (BIT_XOR_EXPR, type, tem,
7095 fold_convert (type, TREE_OPERAND (arg0, 1)));
7096 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7097 && (tem = fold_unary (BIT_NOT_EXPR, type,
7098 fold_convert (type,
7099 TREE_OPERAND (arg0, 1)))))
7100 return fold_build2 (BIT_XOR_EXPR, type,
7101 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7103 return NULL_TREE;
7105 case TRUTH_NOT_EXPR:
7106 /* The argument to invert_truthvalue must have Boolean type. */
7107 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7108 arg0 = fold_convert (boolean_type_node, arg0);
7110 /* Note that the operand of this must be an int
7111 and its values must be 0 or 1.
7112 ("true" is a fixed value perhaps depending on the language,
7113 but we don't handle values other than 1 correctly yet.) */
7114 tem = invert_truthvalue (arg0);
7115 /* Avoid infinite recursion. */
7116 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7117 return NULL_TREE;
7118 return fold_convert (type, tem);
7120 case REALPART_EXPR:
7121 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7122 return NULL_TREE;
7123 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7124 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7125 TREE_OPERAND (arg0, 1));
7126 else if (TREE_CODE (arg0) == COMPLEX_CST)
7127 return TREE_REALPART (arg0);
7128 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7129 return fold_build2 (TREE_CODE (arg0), type,
7130 fold_build1 (REALPART_EXPR, type,
7131 TREE_OPERAND (arg0, 0)),
7132 fold_build1 (REALPART_EXPR, type,
7133 TREE_OPERAND (arg0, 1)));
7134 return NULL_TREE;
7136 case IMAGPART_EXPR:
7137 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7138 return fold_convert (type, integer_zero_node);
7139 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7140 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7141 TREE_OPERAND (arg0, 0));
7142 else if (TREE_CODE (arg0) == COMPLEX_CST)
7143 return TREE_IMAGPART (arg0);
7144 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7145 return fold_build2 (TREE_CODE (arg0), type,
7146 fold_build1 (IMAGPART_EXPR, type,
7147 TREE_OPERAND (arg0, 0)),
7148 fold_build1 (IMAGPART_EXPR, type,
7149 TREE_OPERAND (arg0, 1)));
7150 return NULL_TREE;
7152 default:
7153 return NULL_TREE;
7154 } /* switch (code) */
7157 /* Fold a binary expression of code CODE and type TYPE with operands
7158 OP0 and OP1. Return the folded expression if folding is
7159 successful. Otherwise, return NULL_TREE. */
7161 tree
7162 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7164 tree t1 = NULL_TREE;
7165 tree tem;
7166 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7167 enum tree_code_class kind = TREE_CODE_CLASS (code);
7169 /* WINS will be nonzero when the switch is done
7170 if all operands are constant. */
7171 int wins = 1;
7173 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7174 && TREE_CODE_LENGTH (code) == 2);
7176 arg0 = op0;
7177 arg1 = op1;
7179 if (arg0)
7181 tree subop;
7183 /* Strip any conversions that don't change the mode. This is
7184 safe for every expression, except for a comparison expression
7185 because its signedness is derived from its operands. So, in
7186 the latter case, only strip conversions that don't change the
7187 signedness.
7189 Note that this is done as an internal manipulation within the
7190 constant folder, in order to find the simplest representation
7191 of the arguments so that their form can be studied. In any
7192 cases, the appropriate type conversions should be put back in
7193 the tree that will get out of the constant folder. */
7194 if (kind == tcc_comparison)
7195 STRIP_SIGN_NOPS (arg0);
7196 else
7197 STRIP_NOPS (arg0);
7199 if (TREE_CODE (arg0) == COMPLEX_CST)
7200 subop = TREE_REALPART (arg0);
7201 else
7202 subop = arg0;
7204 if (TREE_CODE (subop) != INTEGER_CST
7205 && TREE_CODE (subop) != REAL_CST)
7206 /* Note that TREE_CONSTANT isn't enough:
7207 static var addresses are constant but we can't
7208 do arithmetic on them. */
7209 wins = 0;
7212 if (arg1)
7214 tree subop;
7216 /* Strip any conversions that don't change the mode. This is
7217 safe for every expression, except for a comparison expression
7218 because its signedness is derived from its operands. So, in
7219 the latter case, only strip conversions that don't change the
7220 signedness.
7222 Note that this is done as an internal manipulation within the
7223 constant folder, in order to find the simplest representation
7224 of the arguments so that their form can be studied. In any
7225 cases, the appropriate type conversions should be put back in
7226 the tree that will get out of the constant folder. */
7227 if (kind == tcc_comparison)
7228 STRIP_SIGN_NOPS (arg1);
7229 else
7230 STRIP_NOPS (arg1);
7232 if (TREE_CODE (arg1) == COMPLEX_CST)
7233 subop = TREE_REALPART (arg1);
7234 else
7235 subop = arg1;
7237 if (TREE_CODE (subop) != INTEGER_CST
7238 && TREE_CODE (subop) != REAL_CST)
7239 /* Note that TREE_CONSTANT isn't enough:
7240 static var addresses are constant but we can't
7241 do arithmetic on them. */
7242 wins = 0;
7245 /* If this is a commutative operation, and ARG0 is a constant, move it
7246 to ARG1 to reduce the number of tests below. */
7247 if (commutative_tree_code (code)
7248 && tree_swap_operands_p (arg0, arg1, true))
7249 return fold_build2 (code, type, op1, op0);
7251 /* Now WINS is set as described above,
7252 ARG0 is the first operand of EXPR,
7253 and ARG1 is the second operand (if it has more than one operand).
7255 First check for cases where an arithmetic operation is applied to a
7256 compound, conditional, or comparison operation. Push the arithmetic
7257 operation inside the compound or conditional to see if any folding
7258 can then be done. Convert comparison to conditional for this purpose.
7259 The also optimizes non-constant cases that used to be done in
7260 expand_expr.
7262 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7263 one of the operands is a comparison and the other is a comparison, a
7264 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7265 code below would make the expression more complex. Change it to a
7266 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7267 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7269 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7270 || code == EQ_EXPR || code == NE_EXPR)
7271 && ((truth_value_p (TREE_CODE (arg0))
7272 && (truth_value_p (TREE_CODE (arg1))
7273 || (TREE_CODE (arg1) == BIT_AND_EXPR
7274 && integer_onep (TREE_OPERAND (arg1, 1)))))
7275 || (truth_value_p (TREE_CODE (arg1))
7276 && (truth_value_p (TREE_CODE (arg0))
7277 || (TREE_CODE (arg0) == BIT_AND_EXPR
7278 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7280 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7281 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7282 : TRUTH_XOR_EXPR,
7283 boolean_type_node,
7284 fold_convert (boolean_type_node, arg0),
7285 fold_convert (boolean_type_node, arg1));
7287 if (code == EQ_EXPR)
7288 tem = invert_truthvalue (tem);
7290 return fold_convert (type, tem);
7293 if (TREE_CODE_CLASS (code) == tcc_binary
7294 || TREE_CODE_CLASS (code) == tcc_comparison)
7296 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7297 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7298 fold_build2 (code, type,
7299 TREE_OPERAND (arg0, 1), op1));
7300 if (TREE_CODE (arg1) == COMPOUND_EXPR
7301 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7302 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7303 fold_build2 (code, type,
7304 op0, TREE_OPERAND (arg1, 1)));
7306 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7308 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7309 arg0, arg1,
7310 /*cond_first_p=*/1);
7311 if (tem != NULL_TREE)
7312 return tem;
7315 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7317 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7318 arg1, arg0,
7319 /*cond_first_p=*/0);
7320 if (tem != NULL_TREE)
7321 return tem;
7325 switch (code)
7327 case PLUS_EXPR:
7328 /* A + (-B) -> A - B */
7329 if (TREE_CODE (arg1) == NEGATE_EXPR)
7330 return fold_build2 (MINUS_EXPR, type,
7331 fold_convert (type, arg0),
7332 fold_convert (type, TREE_OPERAND (arg1, 0)));
7333 /* (-A) + B -> B - A */
7334 if (TREE_CODE (arg0) == NEGATE_EXPR
7335 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7336 return fold_build2 (MINUS_EXPR, type,
7337 fold_convert (type, arg1),
7338 fold_convert (type, TREE_OPERAND (arg0, 0)));
7339 /* Convert ~A + 1 to -A. */
7340 if (INTEGRAL_TYPE_P (type)
7341 && TREE_CODE (arg0) == BIT_NOT_EXPR
7342 && integer_onep (arg1))
7343 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7345 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
7346 same or one. */
7347 if ((TREE_CODE (arg0) == MULT_EXPR
7348 || TREE_CODE (arg1) == MULT_EXPR)
7349 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7351 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
7352 if (tem)
7353 return tem;
7356 if (! FLOAT_TYPE_P (type))
7358 if (integer_zerop (arg1))
7359 return non_lvalue (fold_convert (type, arg0));
7361 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7362 with a constant, and the two constants have no bits in common,
7363 we should treat this as a BIT_IOR_EXPR since this may produce more
7364 simplifications. */
7365 if (TREE_CODE (arg0) == BIT_AND_EXPR
7366 && TREE_CODE (arg1) == BIT_AND_EXPR
7367 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7368 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7369 && integer_zerop (const_binop (BIT_AND_EXPR,
7370 TREE_OPERAND (arg0, 1),
7371 TREE_OPERAND (arg1, 1), 0)))
7373 code = BIT_IOR_EXPR;
7374 goto bit_ior;
7377 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7378 (plus (plus (mult) (mult)) (foo)) so that we can
7379 take advantage of the factoring cases below. */
7380 if (((TREE_CODE (arg0) == PLUS_EXPR
7381 || TREE_CODE (arg0) == MINUS_EXPR)
7382 && TREE_CODE (arg1) == MULT_EXPR)
7383 || ((TREE_CODE (arg1) == PLUS_EXPR
7384 || TREE_CODE (arg1) == MINUS_EXPR)
7385 && TREE_CODE (arg0) == MULT_EXPR))
7387 tree parg0, parg1, parg, marg;
7388 enum tree_code pcode;
7390 if (TREE_CODE (arg1) == MULT_EXPR)
7391 parg = arg0, marg = arg1;
7392 else
7393 parg = arg1, marg = arg0;
7394 pcode = TREE_CODE (parg);
7395 parg0 = TREE_OPERAND (parg, 0);
7396 parg1 = TREE_OPERAND (parg, 1);
7397 STRIP_NOPS (parg0);
7398 STRIP_NOPS (parg1);
7400 if (TREE_CODE (parg0) == MULT_EXPR
7401 && TREE_CODE (parg1) != MULT_EXPR)
7402 return fold_build2 (pcode, type,
7403 fold_build2 (PLUS_EXPR, type,
7404 fold_convert (type, parg0),
7405 fold_convert (type, marg)),
7406 fold_convert (type, parg1));
7407 if (TREE_CODE (parg0) != MULT_EXPR
7408 && TREE_CODE (parg1) == MULT_EXPR)
7409 return fold_build2 (PLUS_EXPR, type,
7410 fold_convert (type, parg0),
7411 fold_build2 (pcode, type,
7412 fold_convert (type, marg),
7413 fold_convert (type,
7414 parg1)));
7417 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7418 of the array. Loop optimizer sometimes produce this type of
7419 expressions. */
7420 if (TREE_CODE (arg0) == ADDR_EXPR)
7422 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7423 if (tem)
7424 return fold_convert (type, tem);
7426 else if (TREE_CODE (arg1) == ADDR_EXPR)
7428 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7429 if (tem)
7430 return fold_convert (type, tem);
7433 else
7435 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7436 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7437 return non_lvalue (fold_convert (type, arg0));
7439 /* Likewise if the operands are reversed. */
7440 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7441 return non_lvalue (fold_convert (type, arg1));
7443 /* Convert X + -C into X - C. */
7444 if (TREE_CODE (arg1) == REAL_CST
7445 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7447 tem = fold_negate_const (arg1, type);
7448 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7449 return fold_build2 (MINUS_EXPR, type,
7450 fold_convert (type, arg0),
7451 fold_convert (type, tem));
7454 if (flag_unsafe_math_optimizations
7455 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7456 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7457 && (tem = distribute_real_division (code, type, arg0, arg1)))
7458 return tem;
7460 /* Convert x+x into x*2.0. */
7461 if (operand_equal_p (arg0, arg1, 0)
7462 && SCALAR_FLOAT_TYPE_P (type))
7463 return fold_build2 (MULT_EXPR, type, arg0,
7464 build_real (type, dconst2));
7466 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7467 if (flag_unsafe_math_optimizations
7468 && TREE_CODE (arg1) == PLUS_EXPR
7469 && TREE_CODE (arg0) != MULT_EXPR)
7471 tree tree10 = TREE_OPERAND (arg1, 0);
7472 tree tree11 = TREE_OPERAND (arg1, 1);
7473 if (TREE_CODE (tree11) == MULT_EXPR
7474 && TREE_CODE (tree10) == MULT_EXPR)
7476 tree tree0;
7477 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7478 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7481 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7482 if (flag_unsafe_math_optimizations
7483 && TREE_CODE (arg0) == PLUS_EXPR
7484 && TREE_CODE (arg1) != MULT_EXPR)
7486 tree tree00 = TREE_OPERAND (arg0, 0);
7487 tree tree01 = TREE_OPERAND (arg0, 1);
7488 if (TREE_CODE (tree01) == MULT_EXPR
7489 && TREE_CODE (tree00) == MULT_EXPR)
7491 tree tree0;
7492 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7493 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7498 bit_rotate:
7499 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7500 is a rotate of A by C1 bits. */
7501 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7502 is a rotate of A by B bits. */
7504 enum tree_code code0, code1;
7505 code0 = TREE_CODE (arg0);
7506 code1 = TREE_CODE (arg1);
7507 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7508 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7509 && operand_equal_p (TREE_OPERAND (arg0, 0),
7510 TREE_OPERAND (arg1, 0), 0)
7511 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7513 tree tree01, tree11;
7514 enum tree_code code01, code11;
7516 tree01 = TREE_OPERAND (arg0, 1);
7517 tree11 = TREE_OPERAND (arg1, 1);
7518 STRIP_NOPS (tree01);
7519 STRIP_NOPS (tree11);
7520 code01 = TREE_CODE (tree01);
7521 code11 = TREE_CODE (tree11);
7522 if (code01 == INTEGER_CST
7523 && code11 == INTEGER_CST
7524 && TREE_INT_CST_HIGH (tree01) == 0
7525 && TREE_INT_CST_HIGH (tree11) == 0
7526 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7527 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7528 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7529 code0 == LSHIFT_EXPR ? tree01 : tree11);
7530 else if (code11 == MINUS_EXPR)
7532 tree tree110, tree111;
7533 tree110 = TREE_OPERAND (tree11, 0);
7534 tree111 = TREE_OPERAND (tree11, 1);
7535 STRIP_NOPS (tree110);
7536 STRIP_NOPS (tree111);
7537 if (TREE_CODE (tree110) == INTEGER_CST
7538 && 0 == compare_tree_int (tree110,
7539 TYPE_PRECISION
7540 (TREE_TYPE (TREE_OPERAND
7541 (arg0, 0))))
7542 && operand_equal_p (tree01, tree111, 0))
7543 return build2 ((code0 == LSHIFT_EXPR
7544 ? LROTATE_EXPR
7545 : RROTATE_EXPR),
7546 type, TREE_OPERAND (arg0, 0), tree01);
7548 else if (code01 == MINUS_EXPR)
7550 tree tree010, tree011;
7551 tree010 = TREE_OPERAND (tree01, 0);
7552 tree011 = TREE_OPERAND (tree01, 1);
7553 STRIP_NOPS (tree010);
7554 STRIP_NOPS (tree011);
7555 if (TREE_CODE (tree010) == INTEGER_CST
7556 && 0 == compare_tree_int (tree010,
7557 TYPE_PRECISION
7558 (TREE_TYPE (TREE_OPERAND
7559 (arg0, 0))))
7560 && operand_equal_p (tree11, tree011, 0))
7561 return build2 ((code0 != LSHIFT_EXPR
7562 ? LROTATE_EXPR
7563 : RROTATE_EXPR),
7564 type, TREE_OPERAND (arg0, 0), tree11);
7569 associate:
7570 /* In most languages, can't associate operations on floats through
7571 parentheses. Rather than remember where the parentheses were, we
7572 don't associate floats at all, unless the user has specified
7573 -funsafe-math-optimizations. */
7575 if (! wins
7576 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7578 tree var0, con0, lit0, minus_lit0;
7579 tree var1, con1, lit1, minus_lit1;
7581 /* Split both trees into variables, constants, and literals. Then
7582 associate each group together, the constants with literals,
7583 then the result with variables. This increases the chances of
7584 literals being recombined later and of generating relocatable
7585 expressions for the sum of a constant and literal. */
7586 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7587 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7588 code == MINUS_EXPR);
7590 /* Only do something if we found more than two objects. Otherwise,
7591 nothing has changed and we risk infinite recursion. */
7592 if (2 < ((var0 != 0) + (var1 != 0)
7593 + (con0 != 0) + (con1 != 0)
7594 + (lit0 != 0) + (lit1 != 0)
7595 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7597 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7598 if (code == MINUS_EXPR)
7599 code = PLUS_EXPR;
7601 var0 = associate_trees (var0, var1, code, type);
7602 con0 = associate_trees (con0, con1, code, type);
7603 lit0 = associate_trees (lit0, lit1, code, type);
7604 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7606 /* Preserve the MINUS_EXPR if the negative part of the literal is
7607 greater than the positive part. Otherwise, the multiplicative
7608 folding code (i.e extract_muldiv) may be fooled in case
7609 unsigned constants are subtracted, like in the following
7610 example: ((X*2 + 4) - 8U)/2. */
7611 if (minus_lit0 && lit0)
7613 if (TREE_CODE (lit0) == INTEGER_CST
7614 && TREE_CODE (minus_lit0) == INTEGER_CST
7615 && tree_int_cst_lt (lit0, minus_lit0))
7617 minus_lit0 = associate_trees (minus_lit0, lit0,
7618 MINUS_EXPR, type);
7619 lit0 = 0;
7621 else
7623 lit0 = associate_trees (lit0, minus_lit0,
7624 MINUS_EXPR, type);
7625 minus_lit0 = 0;
7628 if (minus_lit0)
7630 if (con0 == 0)
7631 return fold_convert (type,
7632 associate_trees (var0, minus_lit0,
7633 MINUS_EXPR, type));
7634 else
7636 con0 = associate_trees (con0, minus_lit0,
7637 MINUS_EXPR, type);
7638 return fold_convert (type,
7639 associate_trees (var0, con0,
7640 PLUS_EXPR, type));
7644 con0 = associate_trees (con0, lit0, code, type);
7645 return fold_convert (type, associate_trees (var0, con0,
7646 code, type));
7650 binary:
7651 if (wins)
7652 t1 = const_binop (code, arg0, arg1, 0);
7653 if (t1 != NULL_TREE)
7655 /* The return value should always have
7656 the same type as the original expression. */
7657 if (TREE_TYPE (t1) != type)
7658 t1 = fold_convert (type, t1);
7660 return t1;
7662 return NULL_TREE;
7664 case MINUS_EXPR:
7665 /* A - (-B) -> A + B */
7666 if (TREE_CODE (arg1) == NEGATE_EXPR)
7667 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7668 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7669 if (TREE_CODE (arg0) == NEGATE_EXPR
7670 && (FLOAT_TYPE_P (type)
7671 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7672 && negate_expr_p (arg1)
7673 && reorder_operands_p (arg0, arg1))
7674 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7675 TREE_OPERAND (arg0, 0));
7676 /* Convert -A - 1 to ~A. */
7677 if (INTEGRAL_TYPE_P (type)
7678 && TREE_CODE (arg0) == NEGATE_EXPR
7679 && integer_onep (arg1))
7680 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7682 /* Convert -1 - A to ~A. */
7683 if (INTEGRAL_TYPE_P (type)
7684 && integer_all_onesp (arg0))
7685 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7687 if (! FLOAT_TYPE_P (type))
7689 if (! wins && integer_zerop (arg0))
7690 return negate_expr (fold_convert (type, arg1));
7691 if (integer_zerop (arg1))
7692 return non_lvalue (fold_convert (type, arg0));
7694 /* Fold A - (A & B) into ~B & A. */
7695 if (!TREE_SIDE_EFFECTS (arg0)
7696 && TREE_CODE (arg1) == BIT_AND_EXPR)
7698 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7699 return fold_build2 (BIT_AND_EXPR, type,
7700 fold_build1 (BIT_NOT_EXPR, type,
7701 TREE_OPERAND (arg1, 0)),
7702 arg0);
7703 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7704 return fold_build2 (BIT_AND_EXPR, type,
7705 fold_build1 (BIT_NOT_EXPR, type,
7706 TREE_OPERAND (arg1, 1)),
7707 arg0);
7710 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7711 any power of 2 minus 1. */
7712 if (TREE_CODE (arg0) == BIT_AND_EXPR
7713 && TREE_CODE (arg1) == BIT_AND_EXPR
7714 && operand_equal_p (TREE_OPERAND (arg0, 0),
7715 TREE_OPERAND (arg1, 0), 0))
7717 tree mask0 = TREE_OPERAND (arg0, 1);
7718 tree mask1 = TREE_OPERAND (arg1, 1);
7719 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7721 if (operand_equal_p (tem, mask1, 0))
7723 tem = fold_build2 (BIT_XOR_EXPR, type,
7724 TREE_OPERAND (arg0, 0), mask1);
7725 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7730 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7731 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7732 return non_lvalue (fold_convert (type, arg0));
7734 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7735 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7736 (-ARG1 + ARG0) reduces to -ARG1. */
7737 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7738 return negate_expr (fold_convert (type, arg1));
7740 /* Fold &x - &x. This can happen from &x.foo - &x.
7741 This is unsafe for certain floats even in non-IEEE formats.
7742 In IEEE, it is unsafe because it does wrong for NaNs.
7743 Also note that operand_equal_p is always false if an operand
7744 is volatile. */
7746 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7747 && operand_equal_p (arg0, arg1, 0))
7748 return fold_convert (type, integer_zero_node);
7750 /* A - B -> A + (-B) if B is easily negatable. */
7751 if (!wins && negate_expr_p (arg1)
7752 && ((FLOAT_TYPE_P (type)
7753 /* Avoid this transformation if B is a positive REAL_CST. */
7754 && (TREE_CODE (arg1) != REAL_CST
7755 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7756 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7757 return fold_build2 (PLUS_EXPR, type,
7758 fold_convert (type, arg0),
7759 fold_convert (type, negate_expr (arg1)));
7761 /* Try folding difference of addresses. */
7763 HOST_WIDE_INT diff;
7765 if ((TREE_CODE (arg0) == ADDR_EXPR
7766 || TREE_CODE (arg1) == ADDR_EXPR)
7767 && ptr_difference_const (arg0, arg1, &diff))
7768 return build_int_cst_type (type, diff);
7771 /* Fold &a[i] - &a[j] to i-j. */
7772 if (TREE_CODE (arg0) == ADDR_EXPR
7773 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7774 && TREE_CODE (arg1) == ADDR_EXPR
7775 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7777 tree aref0 = TREE_OPERAND (arg0, 0);
7778 tree aref1 = TREE_OPERAND (arg1, 0);
7779 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7780 TREE_OPERAND (aref1, 0), 0))
7782 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7783 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7784 tree esz = array_ref_element_size (aref0);
7785 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7786 return fold_build2 (MULT_EXPR, type, diff,
7787 fold_convert (type, esz));
7792 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7793 of the array. Loop optimizer sometimes produce this type of
7794 expressions. */
7795 if (TREE_CODE (arg0) == ADDR_EXPR)
7797 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7798 if (tem)
7799 return fold_convert (type, tem);
7802 if (flag_unsafe_math_optimizations
7803 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7804 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7805 && (tem = distribute_real_division (code, type, arg0, arg1)))
7806 return tem;
7808 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
7809 same or one. */
7810 if ((TREE_CODE (arg0) == MULT_EXPR
7811 || TREE_CODE (arg1) == MULT_EXPR)
7812 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7814 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
7815 if (tem)
7816 return tem;
7819 goto associate;
7821 case MULT_EXPR:
7822 /* (-A) * (-B) -> A * B */
7823 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7824 return fold_build2 (MULT_EXPR, type,
7825 TREE_OPERAND (arg0, 0),
7826 negate_expr (arg1));
7827 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7828 return fold_build2 (MULT_EXPR, type,
7829 negate_expr (arg0),
7830 TREE_OPERAND (arg1, 0));
7832 if (! FLOAT_TYPE_P (type))
7834 if (integer_zerop (arg1))
7835 return omit_one_operand (type, arg1, arg0);
7836 if (integer_onep (arg1))
7837 return non_lvalue (fold_convert (type, arg0));
7838 /* Transform x * -1 into -x. */
7839 if (integer_all_onesp (arg1))
7840 return fold_convert (type, negate_expr (arg0));
7842 /* (a * (1 << b)) is (a << b) */
7843 if (TREE_CODE (arg1) == LSHIFT_EXPR
7844 && integer_onep (TREE_OPERAND (arg1, 0)))
7845 return fold_build2 (LSHIFT_EXPR, type, arg0,
7846 TREE_OPERAND (arg1, 1));
7847 if (TREE_CODE (arg0) == LSHIFT_EXPR
7848 && integer_onep (TREE_OPERAND (arg0, 0)))
7849 return fold_build2 (LSHIFT_EXPR, type, arg1,
7850 TREE_OPERAND (arg0, 1));
7852 if (TREE_CODE (arg1) == INTEGER_CST
7853 && 0 != (tem = extract_muldiv (op0,
7854 fold_convert (type, arg1),
7855 code, NULL_TREE)))
7856 return fold_convert (type, tem);
7859 else
7861 /* Maybe fold x * 0 to 0. The expressions aren't the same
7862 when x is NaN, since x * 0 is also NaN. Nor are they the
7863 same in modes with signed zeros, since multiplying a
7864 negative value by 0 gives -0, not +0. */
7865 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7866 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7867 && real_zerop (arg1))
7868 return omit_one_operand (type, arg1, arg0);
7869 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7870 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7871 && real_onep (arg1))
7872 return non_lvalue (fold_convert (type, arg0));
7874 /* Transform x * -1.0 into -x. */
7875 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7876 && real_minus_onep (arg1))
7877 return fold_convert (type, negate_expr (arg0));
7879 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7880 if (flag_unsafe_math_optimizations
7881 && TREE_CODE (arg0) == RDIV_EXPR
7882 && TREE_CODE (arg1) == REAL_CST
7883 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7885 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7886 arg1, 0);
7887 if (tem)
7888 return fold_build2 (RDIV_EXPR, type, tem,
7889 TREE_OPERAND (arg0, 1));
7892 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7893 if (operand_equal_p (arg0, arg1, 0))
7895 tree tem = fold_strip_sign_ops (arg0);
7896 if (tem != NULL_TREE)
7898 tem = fold_convert (type, tem);
7899 return fold_build2 (MULT_EXPR, type, tem, tem);
7903 if (flag_unsafe_math_optimizations)
7905 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7906 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7908 /* Optimizations of root(...)*root(...). */
7909 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7911 tree rootfn, arg, arglist;
7912 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7913 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7915 /* Optimize sqrt(x)*sqrt(x) as x. */
7916 if (BUILTIN_SQRT_P (fcode0)
7917 && operand_equal_p (arg00, arg10, 0)
7918 && ! HONOR_SNANS (TYPE_MODE (type)))
7919 return arg00;
7921 /* Optimize root(x)*root(y) as root(x*y). */
7922 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7923 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7924 arglist = build_tree_list (NULL_TREE, arg);
7925 return build_function_call_expr (rootfn, arglist);
7928 /* Optimize expN(x)*expN(y) as expN(x+y). */
7929 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7931 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7932 tree arg = fold_build2 (PLUS_EXPR, type,
7933 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7934 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7935 tree arglist = build_tree_list (NULL_TREE, arg);
7936 return build_function_call_expr (expfn, arglist);
7939 /* Optimizations of pow(...)*pow(...). */
7940 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7941 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7942 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7944 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7945 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7946 1)));
7947 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7948 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7949 1)));
7951 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7952 if (operand_equal_p (arg01, arg11, 0))
7954 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7955 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7956 tree arglist = tree_cons (NULL_TREE, arg,
7957 build_tree_list (NULL_TREE,
7958 arg01));
7959 return build_function_call_expr (powfn, arglist);
7962 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7963 if (operand_equal_p (arg00, arg10, 0))
7965 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7966 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7967 tree arglist = tree_cons (NULL_TREE, arg00,
7968 build_tree_list (NULL_TREE,
7969 arg));
7970 return build_function_call_expr (powfn, arglist);
7974 /* Optimize tan(x)*cos(x) as sin(x). */
7975 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7976 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7977 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7978 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7979 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7980 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7981 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7982 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7984 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7986 if (sinfn != NULL_TREE)
7987 return build_function_call_expr (sinfn,
7988 TREE_OPERAND (arg0, 1));
7991 /* Optimize x*pow(x,c) as pow(x,c+1). */
7992 if (fcode1 == BUILT_IN_POW
7993 || fcode1 == BUILT_IN_POWF
7994 || fcode1 == BUILT_IN_POWL)
7996 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7997 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7998 1)));
7999 if (TREE_CODE (arg11) == REAL_CST
8000 && ! TREE_CONSTANT_OVERFLOW (arg11)
8001 && operand_equal_p (arg0, arg10, 0))
8003 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8004 REAL_VALUE_TYPE c;
8005 tree arg, arglist;
8007 c = TREE_REAL_CST (arg11);
8008 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8009 arg = build_real (type, c);
8010 arglist = build_tree_list (NULL_TREE, arg);
8011 arglist = tree_cons (NULL_TREE, arg0, arglist);
8012 return build_function_call_expr (powfn, arglist);
8016 /* Optimize pow(x,c)*x as pow(x,c+1). */
8017 if (fcode0 == BUILT_IN_POW
8018 || fcode0 == BUILT_IN_POWF
8019 || fcode0 == BUILT_IN_POWL)
8021 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8022 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8023 1)));
8024 if (TREE_CODE (arg01) == REAL_CST
8025 && ! TREE_CONSTANT_OVERFLOW (arg01)
8026 && operand_equal_p (arg1, arg00, 0))
8028 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8029 REAL_VALUE_TYPE c;
8030 tree arg, arglist;
8032 c = TREE_REAL_CST (arg01);
8033 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8034 arg = build_real (type, c);
8035 arglist = build_tree_list (NULL_TREE, arg);
8036 arglist = tree_cons (NULL_TREE, arg1, arglist);
8037 return build_function_call_expr (powfn, arglist);
8041 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8042 if (! optimize_size
8043 && operand_equal_p (arg0, arg1, 0))
8045 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8047 if (powfn)
8049 tree arg = build_real (type, dconst2);
8050 tree arglist = build_tree_list (NULL_TREE, arg);
8051 arglist = tree_cons (NULL_TREE, arg0, arglist);
8052 return build_function_call_expr (powfn, arglist);
8057 goto associate;
8059 case BIT_IOR_EXPR:
8060 bit_ior:
8061 if (integer_all_onesp (arg1))
8062 return omit_one_operand (type, arg1, arg0);
8063 if (integer_zerop (arg1))
8064 return non_lvalue (fold_convert (type, arg0));
8065 if (operand_equal_p (arg0, arg1, 0))
8066 return non_lvalue (fold_convert (type, arg0));
8068 /* ~X | X is -1. */
8069 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8070 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8072 t1 = build_int_cst (type, -1);
8073 t1 = force_fit_type (t1, 0, false, false);
8074 return omit_one_operand (type, t1, arg1);
8077 /* X | ~X is -1. */
8078 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8079 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8081 t1 = build_int_cst (type, -1);
8082 t1 = force_fit_type (t1, 0, false, false);
8083 return omit_one_operand (type, t1, arg0);
8086 t1 = distribute_bit_expr (code, type, arg0, arg1);
8087 if (t1 != NULL_TREE)
8088 return t1;
8090 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8092 This results in more efficient code for machines without a NAND
8093 instruction. Combine will canonicalize to the first form
8094 which will allow use of NAND instructions provided by the
8095 backend if they exist. */
8096 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8097 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8099 return fold_build1 (BIT_NOT_EXPR, type,
8100 build2 (BIT_AND_EXPR, type,
8101 TREE_OPERAND (arg0, 0),
8102 TREE_OPERAND (arg1, 0)));
8105 /* See if this can be simplified into a rotate first. If that
8106 is unsuccessful continue in the association code. */
8107 goto bit_rotate;
8109 case BIT_XOR_EXPR:
8110 if (integer_zerop (arg1))
8111 return non_lvalue (fold_convert (type, arg0));
8112 if (integer_all_onesp (arg1))
8113 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8114 if (operand_equal_p (arg0, arg1, 0))
8115 return omit_one_operand (type, integer_zero_node, arg0);
8117 /* ~X ^ X is -1. */
8118 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8119 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8121 t1 = build_int_cst (type, -1);
8122 t1 = force_fit_type (t1, 0, false, false);
8123 return omit_one_operand (type, t1, arg1);
8126 /* X ^ ~X is -1. */
8127 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8128 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8130 t1 = build_int_cst (type, -1);
8131 t1 = force_fit_type (t1, 0, false, false);
8132 return omit_one_operand (type, t1, arg0);
8135 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8136 with a constant, and the two constants have no bits in common,
8137 we should treat this as a BIT_IOR_EXPR since this may produce more
8138 simplifications. */
8139 if (TREE_CODE (arg0) == BIT_AND_EXPR
8140 && TREE_CODE (arg1) == BIT_AND_EXPR
8141 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8142 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8143 && integer_zerop (const_binop (BIT_AND_EXPR,
8144 TREE_OPERAND (arg0, 1),
8145 TREE_OPERAND (arg1, 1), 0)))
8147 code = BIT_IOR_EXPR;
8148 goto bit_ior;
8151 /* (X | Y) ^ X -> Y & ~ X*/
8152 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8153 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8155 tree t2 = TREE_OPERAND (arg0, 1);
8156 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8157 arg1);
8158 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8159 fold_convert (type, t1));
8160 return t1;
8163 /* (Y | X) ^ X -> Y & ~ X*/
8164 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8165 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8167 tree t2 = TREE_OPERAND (arg0, 0);
8168 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8169 arg1);
8170 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8171 fold_convert (type, t1));
8172 return t1;
8175 /* X ^ (X | Y) -> Y & ~ X*/
8176 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8177 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8179 tree t2 = TREE_OPERAND (arg1, 1);
8180 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8181 arg0);
8182 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8183 fold_convert (type, t1));
8184 return t1;
8187 /* X ^ (Y | X) -> Y & ~ X*/
8188 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8189 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8191 tree t2 = TREE_OPERAND (arg1, 0);
8192 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8193 arg0);
8194 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8195 fold_convert (type, t1));
8196 return t1;
8199 /* Convert ~X ^ ~Y to X ^ Y. */
8200 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8201 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8202 return fold_build2 (code, type,
8203 fold_convert (type, TREE_OPERAND (arg0, 0)),
8204 fold_convert (type, TREE_OPERAND (arg1, 0)));
8206 /* See if this can be simplified into a rotate first. If that
8207 is unsuccessful continue in the association code. */
8208 goto bit_rotate;
8210 case BIT_AND_EXPR:
8211 if (integer_all_onesp (arg1))
8212 return non_lvalue (fold_convert (type, arg0));
8213 if (integer_zerop (arg1))
8214 return omit_one_operand (type, arg1, arg0);
8215 if (operand_equal_p (arg0, arg1, 0))
8216 return non_lvalue (fold_convert (type, arg0));
8218 /* ~X & X is always zero. */
8219 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8220 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8221 return omit_one_operand (type, integer_zero_node, arg1);
8223 /* X & ~X is always zero. */
8224 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8225 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8226 return omit_one_operand (type, integer_zero_node, arg0);
8228 t1 = distribute_bit_expr (code, type, arg0, arg1);
8229 if (t1 != NULL_TREE)
8230 return t1;
8231 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8232 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8233 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8235 unsigned int prec
8236 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8238 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8239 && (~TREE_INT_CST_LOW (arg1)
8240 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8241 return fold_convert (type, TREE_OPERAND (arg0, 0));
8244 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8246 This results in more efficient code for machines without a NOR
8247 instruction. Combine will canonicalize to the first form
8248 which will allow use of NOR instructions provided by the
8249 backend if they exist. */
8250 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8251 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8253 return fold_build1 (BIT_NOT_EXPR, type,
8254 build2 (BIT_IOR_EXPR, type,
8255 TREE_OPERAND (arg0, 0),
8256 TREE_OPERAND (arg1, 0)));
8259 goto associate;
8261 case RDIV_EXPR:
8262 /* Don't touch a floating-point divide by zero unless the mode
8263 of the constant can represent infinity. */
8264 if (TREE_CODE (arg1) == REAL_CST
8265 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8266 && real_zerop (arg1))
8267 return NULL_TREE;
8269 /* Optimize A / A to 1.0 if we don't care about
8270 NaNs or Infinities. */
8271 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8272 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
8273 && operand_equal_p (arg0, arg1, 0))
8275 tree r = build_real (TREE_TYPE (arg0), dconst1);
8277 return omit_two_operands (type, r, arg0, arg1);
8280 /* (-A) / (-B) -> A / B */
8281 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8282 return fold_build2 (RDIV_EXPR, type,
8283 TREE_OPERAND (arg0, 0),
8284 negate_expr (arg1));
8285 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8286 return fold_build2 (RDIV_EXPR, type,
8287 negate_expr (arg0),
8288 TREE_OPERAND (arg1, 0));
8290 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8291 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8292 && real_onep (arg1))
8293 return non_lvalue (fold_convert (type, arg0));
8295 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8296 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8297 && real_minus_onep (arg1))
8298 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8300 /* If ARG1 is a constant, we can convert this to a multiply by the
8301 reciprocal. This does not have the same rounding properties,
8302 so only do this if -funsafe-math-optimizations. We can actually
8303 always safely do it if ARG1 is a power of two, but it's hard to
8304 tell if it is or not in a portable manner. */
8305 if (TREE_CODE (arg1) == REAL_CST)
8307 if (flag_unsafe_math_optimizations
8308 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8309 arg1, 0)))
8310 return fold_build2 (MULT_EXPR, type, arg0, tem);
8311 /* Find the reciprocal if optimizing and the result is exact. */
8312 if (optimize)
8314 REAL_VALUE_TYPE r;
8315 r = TREE_REAL_CST (arg1);
8316 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8318 tem = build_real (type, r);
8319 return fold_build2 (MULT_EXPR, type,
8320 fold_convert (type, arg0), tem);
8324 /* Convert A/B/C to A/(B*C). */
8325 if (flag_unsafe_math_optimizations
8326 && TREE_CODE (arg0) == RDIV_EXPR)
8327 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8328 fold_build2 (MULT_EXPR, type,
8329 TREE_OPERAND (arg0, 1), arg1));
8331 /* Convert A/(B/C) to (A/B)*C. */
8332 if (flag_unsafe_math_optimizations
8333 && TREE_CODE (arg1) == RDIV_EXPR)
8334 return fold_build2 (MULT_EXPR, type,
8335 fold_build2 (RDIV_EXPR, type, arg0,
8336 TREE_OPERAND (arg1, 0)),
8337 TREE_OPERAND (arg1, 1));
8339 /* Convert C1/(X*C2) into (C1/C2)/X. */
8340 if (flag_unsafe_math_optimizations
8341 && TREE_CODE (arg1) == MULT_EXPR
8342 && TREE_CODE (arg0) == REAL_CST
8343 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8345 tree tem = const_binop (RDIV_EXPR, arg0,
8346 TREE_OPERAND (arg1, 1), 0);
8347 if (tem)
8348 return fold_build2 (RDIV_EXPR, type, tem,
8349 TREE_OPERAND (arg1, 0));
8352 if (flag_unsafe_math_optimizations)
8354 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8355 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8357 /* Optimize sin(x)/cos(x) as tan(x). */
8358 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8359 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8360 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8361 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8362 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8364 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8366 if (tanfn != NULL_TREE)
8367 return build_function_call_expr (tanfn,
8368 TREE_OPERAND (arg0, 1));
8371 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8372 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8373 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8374 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8375 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8376 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8378 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8380 if (tanfn != NULL_TREE)
8382 tree tmp = TREE_OPERAND (arg0, 1);
8383 tmp = build_function_call_expr (tanfn, tmp);
8384 return fold_build2 (RDIV_EXPR, type,
8385 build_real (type, dconst1), tmp);
8389 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
8390 NaNs or Infinities. */
8391 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
8392 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
8393 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
8395 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8396 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8398 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
8399 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
8400 && operand_equal_p (arg00, arg01, 0))
8402 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
8404 if (cosfn != NULL_TREE)
8405 return build_function_call_expr (cosfn,
8406 TREE_OPERAND (arg0, 1));
8410 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
8411 NaNs or Infinities. */
8412 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
8413 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
8414 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
8416 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8417 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8419 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
8420 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
8421 && operand_equal_p (arg00, arg01, 0))
8423 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
8425 if (cosfn != NULL_TREE)
8427 tree tmp = TREE_OPERAND (arg0, 1);
8428 tmp = build_function_call_expr (cosfn, tmp);
8429 return fold_build2 (RDIV_EXPR, type,
8430 build_real (type, dconst1),
8431 tmp);
8436 /* Optimize pow(x,c)/x as pow(x,c-1). */
8437 if (fcode0 == BUILT_IN_POW
8438 || fcode0 == BUILT_IN_POWF
8439 || fcode0 == BUILT_IN_POWL)
8441 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8442 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8443 if (TREE_CODE (arg01) == REAL_CST
8444 && ! TREE_CONSTANT_OVERFLOW (arg01)
8445 && operand_equal_p (arg1, arg00, 0))
8447 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8448 REAL_VALUE_TYPE c;
8449 tree arg, arglist;
8451 c = TREE_REAL_CST (arg01);
8452 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8453 arg = build_real (type, c);
8454 arglist = build_tree_list (NULL_TREE, arg);
8455 arglist = tree_cons (NULL_TREE, arg1, arglist);
8456 return build_function_call_expr (powfn, arglist);
8460 /* Optimize x/expN(y) into x*expN(-y). */
8461 if (BUILTIN_EXPONENT_P (fcode1))
8463 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8464 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8465 tree arglist = build_tree_list (NULL_TREE,
8466 fold_convert (type, arg));
8467 arg1 = build_function_call_expr (expfn, arglist);
8468 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8471 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8472 if (fcode1 == BUILT_IN_POW
8473 || fcode1 == BUILT_IN_POWF
8474 || fcode1 == BUILT_IN_POWL)
8476 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8477 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8478 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8479 tree neg11 = fold_convert (type, negate_expr (arg11));
8480 tree arglist = tree_cons(NULL_TREE, arg10,
8481 build_tree_list (NULL_TREE, neg11));
8482 arg1 = build_function_call_expr (powfn, arglist);
8483 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8486 goto binary;
8488 case TRUNC_DIV_EXPR:
8489 case ROUND_DIV_EXPR:
8490 case FLOOR_DIV_EXPR:
8491 case CEIL_DIV_EXPR:
8492 case EXACT_DIV_EXPR:
8493 if (integer_onep (arg1))
8494 return non_lvalue (fold_convert (type, arg0));
8495 if (integer_zerop (arg1))
8496 return NULL_TREE;
8497 /* X / -1 is -X. */
8498 if (!TYPE_UNSIGNED (type)
8499 && TREE_CODE (arg1) == INTEGER_CST
8500 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8501 && TREE_INT_CST_HIGH (arg1) == -1)
8502 return fold_convert (type, negate_expr (arg0));
8504 /* Convert -A / -B to A / B when the type is signed and overflow is
8505 undefined. */
8506 if (!TYPE_UNSIGNED (type) && !flag_wrapv
8507 && TREE_CODE (arg0) == NEGATE_EXPR
8508 && negate_expr_p (arg1))
8509 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8510 negate_expr (arg1));
8511 if (!TYPE_UNSIGNED (type) && !flag_wrapv
8512 && TREE_CODE (arg1) == NEGATE_EXPR
8513 && negate_expr_p (arg0))
8514 return fold_build2 (code, type, negate_expr (arg0),
8515 TREE_OPERAND (arg1, 0));
8517 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8518 operation, EXACT_DIV_EXPR.
8520 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8521 At one time others generated faster code, it's not clear if they do
8522 after the last round to changes to the DIV code in expmed.c. */
8523 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8524 && multiple_of_p (type, arg0, arg1))
8525 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8527 if (TREE_CODE (arg1) == INTEGER_CST
8528 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8529 return fold_convert (type, tem);
8531 goto binary;
8533 case CEIL_MOD_EXPR:
8534 case FLOOR_MOD_EXPR:
8535 case ROUND_MOD_EXPR:
8536 case TRUNC_MOD_EXPR:
8537 /* X % 1 is always zero, but be sure to preserve any side
8538 effects in X. */
8539 if (integer_onep (arg1))
8540 return omit_one_operand (type, integer_zero_node, arg0);
8542 /* X % 0, return X % 0 unchanged so that we can get the
8543 proper warnings and errors. */
8544 if (integer_zerop (arg1))
8545 return NULL_TREE;
8547 /* 0 % X is always zero, but be sure to preserve any side
8548 effects in X. Place this after checking for X == 0. */
8549 if (integer_zerop (arg0))
8550 return omit_one_operand (type, integer_zero_node, arg1);
8552 /* X % -1 is zero. */
8553 if (!TYPE_UNSIGNED (type)
8554 && TREE_CODE (arg1) == INTEGER_CST
8555 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8556 && TREE_INT_CST_HIGH (arg1) == -1)
8557 return omit_one_operand (type, integer_zero_node, arg0);
8559 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8560 i.e. "X % C" into "X & C2", if X and C are positive. */
8561 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8562 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8563 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8565 unsigned HOST_WIDE_INT high, low;
8566 tree mask;
8567 int l;
8569 l = tree_log2 (arg1);
8570 if (l >= HOST_BITS_PER_WIDE_INT)
8572 high = ((unsigned HOST_WIDE_INT) 1
8573 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8574 low = -1;
8576 else
8578 high = 0;
8579 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8582 mask = build_int_cst_wide (type, low, high);
8583 return fold_build2 (BIT_AND_EXPR, type,
8584 fold_convert (type, arg0), mask);
8587 /* X % -C is the same as X % C. */
8588 if (code == TRUNC_MOD_EXPR
8589 && !TYPE_UNSIGNED (type)
8590 && TREE_CODE (arg1) == INTEGER_CST
8591 && !TREE_CONSTANT_OVERFLOW (arg1)
8592 && TREE_INT_CST_HIGH (arg1) < 0
8593 && !flag_trapv
8594 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8595 && !sign_bit_p (arg1, arg1))
8596 return fold_build2 (code, type, fold_convert (type, arg0),
8597 fold_convert (type, negate_expr (arg1)));
8599 /* X % -Y is the same as X % Y. */
8600 if (code == TRUNC_MOD_EXPR
8601 && !TYPE_UNSIGNED (type)
8602 && TREE_CODE (arg1) == NEGATE_EXPR
8603 && !flag_trapv)
8604 return fold_build2 (code, type, fold_convert (type, arg0),
8605 fold_convert (type, TREE_OPERAND (arg1, 0)));
8607 if (TREE_CODE (arg1) == INTEGER_CST
8608 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8609 return fold_convert (type, tem);
8611 goto binary;
8613 case LROTATE_EXPR:
8614 case RROTATE_EXPR:
8615 if (integer_all_onesp (arg0))
8616 return omit_one_operand (type, arg0, arg1);
8617 goto shift;
8619 case RSHIFT_EXPR:
8620 /* Optimize -1 >> x for arithmetic right shifts. */
8621 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8622 return omit_one_operand (type, arg0, arg1);
8623 /* ... fall through ... */
8625 case LSHIFT_EXPR:
8626 shift:
8627 if (integer_zerop (arg1))
8628 return non_lvalue (fold_convert (type, arg0));
8629 if (integer_zerop (arg0))
8630 return omit_one_operand (type, arg0, arg1);
8632 /* Since negative shift count is not well-defined,
8633 don't try to compute it in the compiler. */
8634 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8635 return NULL_TREE;
8637 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8638 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
8639 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8640 && host_integerp (TREE_OPERAND (arg0, 1), false)
8641 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8643 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8644 + TREE_INT_CST_LOW (arg1));
8646 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8647 being well defined. */
8648 if (low >= TYPE_PRECISION (type))
8650 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8651 low = low % TYPE_PRECISION (type);
8652 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8653 return build_int_cst (type, 0);
8654 else
8655 low = TYPE_PRECISION (type) - 1;
8658 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8659 build_int_cst (type, low));
8662 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8663 into x & ((unsigned)-1 >> c) for unsigned types. */
8664 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8665 || (TYPE_UNSIGNED (type)
8666 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8667 && host_integerp (arg1, false)
8668 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8669 && host_integerp (TREE_OPERAND (arg0, 1), false)
8670 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8672 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8673 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8674 tree lshift;
8675 tree arg00;
8677 if (low0 == low1)
8679 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8681 lshift = build_int_cst (type, -1);
8682 lshift = int_const_binop (code, lshift, arg1, 0);
8684 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8688 /* Rewrite an LROTATE_EXPR by a constant into an
8689 RROTATE_EXPR by a new constant. */
8690 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8692 tree tem = build_int_cst (NULL_TREE,
8693 GET_MODE_BITSIZE (TYPE_MODE (type)));
8694 tem = fold_convert (TREE_TYPE (arg1), tem);
8695 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8696 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8699 /* If we have a rotate of a bit operation with the rotate count and
8700 the second operand of the bit operation both constant,
8701 permute the two operations. */
8702 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8703 && (TREE_CODE (arg0) == BIT_AND_EXPR
8704 || TREE_CODE (arg0) == BIT_IOR_EXPR
8705 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8706 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8707 return fold_build2 (TREE_CODE (arg0), type,
8708 fold_build2 (code, type,
8709 TREE_OPERAND (arg0, 0), arg1),
8710 fold_build2 (code, type,
8711 TREE_OPERAND (arg0, 1), arg1));
8713 /* Two consecutive rotates adding up to the width of the mode can
8714 be ignored. */
8715 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8716 && TREE_CODE (arg0) == RROTATE_EXPR
8717 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8718 && TREE_INT_CST_HIGH (arg1) == 0
8719 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8720 && ((TREE_INT_CST_LOW (arg1)
8721 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8722 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8723 return TREE_OPERAND (arg0, 0);
8725 goto binary;
8727 case MIN_EXPR:
8728 if (operand_equal_p (arg0, arg1, 0))
8729 return omit_one_operand (type, arg0, arg1);
8730 if (INTEGRAL_TYPE_P (type)
8731 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8732 return omit_one_operand (type, arg1, arg0);
8733 goto associate;
8735 case MAX_EXPR:
8736 if (operand_equal_p (arg0, arg1, 0))
8737 return omit_one_operand (type, arg0, arg1);
8738 if (INTEGRAL_TYPE_P (type)
8739 && TYPE_MAX_VALUE (type)
8740 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8741 return omit_one_operand (type, arg1, arg0);
8742 goto associate;
8744 case TRUTH_ANDIF_EXPR:
8745 /* Note that the operands of this must be ints
8746 and their values must be 0 or 1.
8747 ("true" is a fixed value perhaps depending on the language.) */
8748 /* If first arg is constant zero, return it. */
8749 if (integer_zerop (arg0))
8750 return fold_convert (type, arg0);
8751 case TRUTH_AND_EXPR:
8752 /* If either arg is constant true, drop it. */
8753 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8754 return non_lvalue (fold_convert (type, arg1));
8755 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8756 /* Preserve sequence points. */
8757 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8758 return non_lvalue (fold_convert (type, arg0));
8759 /* If second arg is constant zero, result is zero, but first arg
8760 must be evaluated. */
8761 if (integer_zerop (arg1))
8762 return omit_one_operand (type, arg1, arg0);
8763 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8764 case will be handled here. */
8765 if (integer_zerop (arg0))
8766 return omit_one_operand (type, arg0, arg1);
8768 /* !X && X is always false. */
8769 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8770 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8771 return omit_one_operand (type, integer_zero_node, arg1);
8772 /* X && !X is always false. */
8773 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8774 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8775 return omit_one_operand (type, integer_zero_node, arg0);
8777 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8778 means A >= Y && A != MAX, but in this case we know that
8779 A < X <= MAX. */
8781 if (!TREE_SIDE_EFFECTS (arg0)
8782 && !TREE_SIDE_EFFECTS (arg1))
8784 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8785 if (tem && !operand_equal_p (tem, arg0, 0))
8786 return fold_build2 (code, type, tem, arg1);
8788 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8789 if (tem && !operand_equal_p (tem, arg1, 0))
8790 return fold_build2 (code, type, arg0, tem);
8793 truth_andor:
8794 /* We only do these simplifications if we are optimizing. */
8795 if (!optimize)
8796 return NULL_TREE;
8798 /* Check for things like (A || B) && (A || C). We can convert this
8799 to A || (B && C). Note that either operator can be any of the four
8800 truth and/or operations and the transformation will still be
8801 valid. Also note that we only care about order for the
8802 ANDIF and ORIF operators. If B contains side effects, this
8803 might change the truth-value of A. */
8804 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8805 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8806 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8807 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8808 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8809 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8811 tree a00 = TREE_OPERAND (arg0, 0);
8812 tree a01 = TREE_OPERAND (arg0, 1);
8813 tree a10 = TREE_OPERAND (arg1, 0);
8814 tree a11 = TREE_OPERAND (arg1, 1);
8815 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8816 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8817 && (code == TRUTH_AND_EXPR
8818 || code == TRUTH_OR_EXPR));
8820 if (operand_equal_p (a00, a10, 0))
8821 return fold_build2 (TREE_CODE (arg0), type, a00,
8822 fold_build2 (code, type, a01, a11));
8823 else if (commutative && operand_equal_p (a00, a11, 0))
8824 return fold_build2 (TREE_CODE (arg0), type, a00,
8825 fold_build2 (code, type, a01, a10));
8826 else if (commutative && operand_equal_p (a01, a10, 0))
8827 return fold_build2 (TREE_CODE (arg0), type, a01,
8828 fold_build2 (code, type, a00, a11));
8830 /* This case if tricky because we must either have commutative
8831 operators or else A10 must not have side-effects. */
8833 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8834 && operand_equal_p (a01, a11, 0))
8835 return fold_build2 (TREE_CODE (arg0), type,
8836 fold_build2 (code, type, a00, a10),
8837 a01);
8840 /* See if we can build a range comparison. */
8841 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8842 return tem;
8844 /* Check for the possibility of merging component references. If our
8845 lhs is another similar operation, try to merge its rhs with our
8846 rhs. Then try to merge our lhs and rhs. */
8847 if (TREE_CODE (arg0) == code
8848 && 0 != (tem = fold_truthop (code, type,
8849 TREE_OPERAND (arg0, 1), arg1)))
8850 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8852 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8853 return tem;
8855 return NULL_TREE;
8857 case TRUTH_ORIF_EXPR:
8858 /* Note that the operands of this must be ints
8859 and their values must be 0 or true.
8860 ("true" is a fixed value perhaps depending on the language.) */
8861 /* If first arg is constant true, return it. */
8862 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8863 return fold_convert (type, arg0);
8864 case TRUTH_OR_EXPR:
8865 /* If either arg is constant zero, drop it. */
8866 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8867 return non_lvalue (fold_convert (type, arg1));
8868 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8869 /* Preserve sequence points. */
8870 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8871 return non_lvalue (fold_convert (type, arg0));
8872 /* If second arg is constant true, result is true, but we must
8873 evaluate first arg. */
8874 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8875 return omit_one_operand (type, arg1, arg0);
8876 /* Likewise for first arg, but note this only occurs here for
8877 TRUTH_OR_EXPR. */
8878 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8879 return omit_one_operand (type, arg0, arg1);
8881 /* !X || X is always true. */
8882 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8883 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8884 return omit_one_operand (type, integer_one_node, arg1);
8885 /* X || !X is always true. */
8886 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8887 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8888 return omit_one_operand (type, integer_one_node, arg0);
8890 goto truth_andor;
8892 case TRUTH_XOR_EXPR:
8893 /* If the second arg is constant zero, drop it. */
8894 if (integer_zerop (arg1))
8895 return non_lvalue (fold_convert (type, arg0));
8896 /* If the second arg is constant true, this is a logical inversion. */
8897 if (integer_onep (arg1))
8899 /* Only call invert_truthvalue if operand is a truth value. */
8900 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8901 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8902 else
8903 tem = invert_truthvalue (arg0);
8904 return non_lvalue (fold_convert (type, tem));
8906 /* Identical arguments cancel to zero. */
8907 if (operand_equal_p (arg0, arg1, 0))
8908 return omit_one_operand (type, integer_zero_node, arg0);
8910 /* !X ^ X is always true. */
8911 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8912 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8913 return omit_one_operand (type, integer_one_node, arg1);
8915 /* X ^ !X is always true. */
8916 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8917 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8918 return omit_one_operand (type, integer_one_node, arg0);
8920 return NULL_TREE;
8922 case EQ_EXPR:
8923 case NE_EXPR:
8924 case LT_EXPR:
8925 case GT_EXPR:
8926 case LE_EXPR:
8927 case GE_EXPR:
8928 /* If one arg is a real or integer constant, put it last. */
8929 if (tree_swap_operands_p (arg0, arg1, true))
8930 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8932 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
8933 if (TREE_CODE (arg0) == BIT_NOT_EXPR && TREE_CODE (arg1) == INTEGER_CST
8934 && (code == NE_EXPR || code == EQ_EXPR))
8935 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8936 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8937 arg1));
8939 /* bool_var != 0 becomes bool_var. */
8940 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8941 && code == NE_EXPR)
8942 return non_lvalue (fold_convert (type, arg0));
8944 /* bool_var == 1 becomes bool_var. */
8945 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8946 && code == EQ_EXPR)
8947 return non_lvalue (fold_convert (type, arg0));
8949 /* bool_var != 1 becomes !bool_var. */
8950 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8951 && code == NE_EXPR)
8952 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
8954 /* bool_var == 0 becomes !bool_var. */
8955 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8956 && code == EQ_EXPR)
8957 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
8959 /* If this is an equality comparison of the address of a non-weak
8960 object against zero, then we know the result. */
8961 if ((code == EQ_EXPR || code == NE_EXPR)
8962 && TREE_CODE (arg0) == ADDR_EXPR
8963 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8964 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8965 && integer_zerop (arg1))
8966 return constant_boolean_node (code != EQ_EXPR, type);
8968 /* If this is an equality comparison of the address of two non-weak,
8969 unaliased symbols neither of which are extern (since we do not
8970 have access to attributes for externs), then we know the result. */
8971 if ((code == EQ_EXPR || code == NE_EXPR)
8972 && TREE_CODE (arg0) == ADDR_EXPR
8973 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8974 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8975 && ! lookup_attribute ("alias",
8976 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8977 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8978 && TREE_CODE (arg1) == ADDR_EXPR
8979 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
8980 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8981 && ! lookup_attribute ("alias",
8982 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8983 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8985 /* We know that we're looking at the address of two
8986 non-weak, unaliased, static _DECL nodes.
8988 It is both wasteful and incorrect to call operand_equal_p
8989 to compare the two ADDR_EXPR nodes. It is wasteful in that
8990 all we need to do is test pointer equality for the arguments
8991 to the two ADDR_EXPR nodes. It is incorrect to use
8992 operand_equal_p as that function is NOT equivalent to a
8993 C equality test. It can in fact return false for two
8994 objects which would test as equal using the C equality
8995 operator. */
8996 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
8997 return constant_boolean_node (equal
8998 ? code == EQ_EXPR : code != EQ_EXPR,
8999 type);
9002 /* If this is a comparison of two exprs that look like an
9003 ARRAY_REF of the same object, then we can fold this to a
9004 comparison of the two offsets. */
9005 if (TREE_CODE_CLASS (code) == tcc_comparison)
9007 tree base0, offset0, base1, offset1;
9009 if (extract_array_ref (arg0, &base0, &offset0)
9010 && extract_array_ref (arg1, &base1, &offset1)
9011 && operand_equal_p (base0, base1, 0))
9013 /* Handle no offsets on both sides specially. */
9014 if (offset0 == NULL_TREE
9015 && offset1 == NULL_TREE)
9016 return fold_build2 (code, type, integer_zero_node,
9017 integer_zero_node);
9019 if (!offset0 || !offset1
9020 || TREE_TYPE (offset0) == TREE_TYPE (offset1))
9022 if (offset0 == NULL_TREE)
9023 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
9024 if (offset1 == NULL_TREE)
9025 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
9026 return fold_build2 (code, type, offset0, offset1);
9031 /* Transform comparisons of the form X +- C CMP X. */
9032 if ((code != EQ_EXPR && code != NE_EXPR)
9033 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9034 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9035 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9036 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
9037 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9038 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9039 && !(flag_wrapv || flag_trapv))))
9041 tree arg01 = TREE_OPERAND (arg0, 1);
9042 enum tree_code code0 = TREE_CODE (arg0);
9043 int is_positive;
9045 if (TREE_CODE (arg01) == REAL_CST)
9046 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
9047 else
9048 is_positive = tree_int_cst_sgn (arg01);
9050 /* (X - c) > X becomes false. */
9051 if (code == GT_EXPR
9052 && ((code0 == MINUS_EXPR && is_positive >= 0)
9053 || (code0 == PLUS_EXPR && is_positive <= 0)))
9054 return constant_boolean_node (0, type);
9056 /* Likewise (X + c) < X becomes false. */
9057 if (code == LT_EXPR
9058 && ((code0 == PLUS_EXPR && is_positive >= 0)
9059 || (code0 == MINUS_EXPR && is_positive <= 0)))
9060 return constant_boolean_node (0, type);
9062 /* Convert (X - c) <= X to true. */
9063 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9064 && code == LE_EXPR
9065 && ((code0 == MINUS_EXPR && is_positive >= 0)
9066 || (code0 == PLUS_EXPR && is_positive <= 0)))
9067 return constant_boolean_node (1, type);
9069 /* Convert (X + c) >= X to true. */
9070 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9071 && code == GE_EXPR
9072 && ((code0 == PLUS_EXPR && is_positive >= 0)
9073 || (code0 == MINUS_EXPR && is_positive <= 0)))
9074 return constant_boolean_node (1, type);
9076 if (TREE_CODE (arg01) == INTEGER_CST)
9078 /* Convert X + c > X and X - c < X to true for integers. */
9079 if (code == GT_EXPR
9080 && ((code0 == PLUS_EXPR && is_positive > 0)
9081 || (code0 == MINUS_EXPR && is_positive < 0)))
9082 return constant_boolean_node (1, type);
9084 if (code == LT_EXPR
9085 && ((code0 == MINUS_EXPR && is_positive > 0)
9086 || (code0 == PLUS_EXPR && is_positive < 0)))
9087 return constant_boolean_node (1, type);
9089 /* Convert X + c <= X and X - c >= X to false for integers. */
9090 if (code == LE_EXPR
9091 && ((code0 == PLUS_EXPR && is_positive > 0)
9092 || (code0 == MINUS_EXPR && is_positive < 0)))
9093 return constant_boolean_node (0, type);
9095 if (code == GE_EXPR
9096 && ((code0 == MINUS_EXPR && is_positive > 0)
9097 || (code0 == PLUS_EXPR && is_positive < 0)))
9098 return constant_boolean_node (0, type);
9102 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9103 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9104 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9105 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9106 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9107 && !(flag_wrapv || flag_trapv))
9108 && (TREE_CODE (arg1) == INTEGER_CST
9109 && !TREE_OVERFLOW (arg1)))
9111 tree const1 = TREE_OPERAND (arg0, 1);
9112 tree const2 = arg1;
9113 tree variable = TREE_OPERAND (arg0, 0);
9114 tree lhs;
9115 int lhs_add;
9116 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9118 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
9119 TREE_TYPE (arg1), const2, const1);
9120 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9121 && (TREE_CODE (lhs) != INTEGER_CST
9122 || !TREE_OVERFLOW (lhs)))
9123 return fold_build2 (code, type, variable, lhs);
9126 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9128 tree targ0 = strip_float_extensions (arg0);
9129 tree targ1 = strip_float_extensions (arg1);
9130 tree newtype = TREE_TYPE (targ0);
9132 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9133 newtype = TREE_TYPE (targ1);
9135 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9136 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9137 return fold_build2 (code, type, fold_convert (newtype, targ0),
9138 fold_convert (newtype, targ1));
9140 /* (-a) CMP (-b) -> b CMP a */
9141 if (TREE_CODE (arg0) == NEGATE_EXPR
9142 && TREE_CODE (arg1) == NEGATE_EXPR)
9143 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9144 TREE_OPERAND (arg0, 0));
9146 if (TREE_CODE (arg1) == REAL_CST)
9148 REAL_VALUE_TYPE cst;
9149 cst = TREE_REAL_CST (arg1);
9151 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9152 if (TREE_CODE (arg0) == NEGATE_EXPR)
9153 return
9154 fold_build2 (swap_tree_comparison (code), type,
9155 TREE_OPERAND (arg0, 0),
9156 build_real (TREE_TYPE (arg1),
9157 REAL_VALUE_NEGATE (cst)));
9159 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9160 /* a CMP (-0) -> a CMP 0 */
9161 if (REAL_VALUE_MINUS_ZERO (cst))
9162 return fold_build2 (code, type, arg0,
9163 build_real (TREE_TYPE (arg1), dconst0));
9165 /* x != NaN is always true, other ops are always false. */
9166 if (REAL_VALUE_ISNAN (cst)
9167 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9169 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9170 return omit_one_operand (type, tem, arg0);
9173 /* Fold comparisons against infinity. */
9174 if (REAL_VALUE_ISINF (cst))
9176 tem = fold_inf_compare (code, type, arg0, arg1);
9177 if (tem != NULL_TREE)
9178 return tem;
9182 /* If this is a comparison of a real constant with a PLUS_EXPR
9183 or a MINUS_EXPR of a real constant, we can convert it into a
9184 comparison with a revised real constant as long as no overflow
9185 occurs when unsafe_math_optimizations are enabled. */
9186 if (flag_unsafe_math_optimizations
9187 && TREE_CODE (arg1) == REAL_CST
9188 && (TREE_CODE (arg0) == PLUS_EXPR
9189 || TREE_CODE (arg0) == MINUS_EXPR)
9190 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9191 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9192 ? MINUS_EXPR : PLUS_EXPR,
9193 arg1, TREE_OPERAND (arg0, 1), 0))
9194 && ! TREE_CONSTANT_OVERFLOW (tem))
9195 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9197 /* Likewise, we can simplify a comparison of a real constant with
9198 a MINUS_EXPR whose first operand is also a real constant, i.e.
9199 (c1 - x) < c2 becomes x > c1-c2. */
9200 if (flag_unsafe_math_optimizations
9201 && TREE_CODE (arg1) == REAL_CST
9202 && TREE_CODE (arg0) == MINUS_EXPR
9203 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9204 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9205 arg1, 0))
9206 && ! TREE_CONSTANT_OVERFLOW (tem))
9207 return fold_build2 (swap_tree_comparison (code), type,
9208 TREE_OPERAND (arg0, 1), tem);
9210 /* Fold comparisons against built-in math functions. */
9211 if (TREE_CODE (arg1) == REAL_CST
9212 && flag_unsafe_math_optimizations
9213 && ! flag_errno_math)
9215 enum built_in_function fcode = builtin_mathfn_code (arg0);
9217 if (fcode != END_BUILTINS)
9219 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9220 if (tem != NULL_TREE)
9221 return tem;
9226 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9227 if (TREE_CONSTANT (arg1)
9228 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9229 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9230 /* This optimization is invalid for ordered comparisons
9231 if CONST+INCR overflows or if foo+incr might overflow.
9232 This optimization is invalid for floating point due to rounding.
9233 For pointer types we assume overflow doesn't happen. */
9234 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9235 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9236 && (code == EQ_EXPR || code == NE_EXPR))))
9238 tree varop, newconst;
9240 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9242 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9243 arg1, TREE_OPERAND (arg0, 1));
9244 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9245 TREE_OPERAND (arg0, 0),
9246 TREE_OPERAND (arg0, 1));
9248 else
9250 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9251 arg1, TREE_OPERAND (arg0, 1));
9252 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9253 TREE_OPERAND (arg0, 0),
9254 TREE_OPERAND (arg0, 1));
9258 /* If VAROP is a reference to a bitfield, we must mask
9259 the constant by the width of the field. */
9260 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9261 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9262 && host_integerp (DECL_SIZE (TREE_OPERAND
9263 (TREE_OPERAND (varop, 0), 1)), 1))
9265 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9266 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9267 tree folded_compare, shift;
9269 /* First check whether the comparison would come out
9270 always the same. If we don't do that we would
9271 change the meaning with the masking. */
9272 folded_compare = fold_build2 (code, type,
9273 TREE_OPERAND (varop, 0), arg1);
9274 if (integer_zerop (folded_compare)
9275 || integer_onep (folded_compare))
9276 return omit_one_operand (type, folded_compare, varop);
9278 shift = build_int_cst (NULL_TREE,
9279 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9280 shift = fold_convert (TREE_TYPE (varop), shift);
9281 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9282 newconst, shift);
9283 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9284 newconst, shift);
9287 return fold_build2 (code, type, varop, newconst);
9290 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9291 This transformation affects the cases which are handled in later
9292 optimizations involving comparisons with non-negative constants. */
9293 if (TREE_CODE (arg1) == INTEGER_CST
9294 && TREE_CODE (arg0) != INTEGER_CST
9295 && tree_int_cst_sgn (arg1) > 0)
9297 switch (code)
9299 case GE_EXPR:
9300 arg1 = const_binop (MINUS_EXPR, arg1,
9301 build_int_cst (TREE_TYPE (arg1), 1), 0);
9302 return fold_build2 (GT_EXPR, type, arg0,
9303 fold_convert (TREE_TYPE (arg0), arg1));
9305 case LT_EXPR:
9306 arg1 = const_binop (MINUS_EXPR, arg1,
9307 build_int_cst (TREE_TYPE (arg1), 1), 0);
9308 return fold_build2 (LE_EXPR, type, arg0,
9309 fold_convert (TREE_TYPE (arg0), arg1));
9311 default:
9312 break;
9316 /* Comparisons with the highest or lowest possible integer of
9317 the specified size will have known values. */
9319 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9321 if (TREE_CODE (arg1) == INTEGER_CST
9322 && ! TREE_CONSTANT_OVERFLOW (arg1)
9323 && width <= 2 * HOST_BITS_PER_WIDE_INT
9324 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9325 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9327 HOST_WIDE_INT signed_max_hi;
9328 unsigned HOST_WIDE_INT signed_max_lo;
9329 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9331 if (width <= HOST_BITS_PER_WIDE_INT)
9333 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9334 - 1;
9335 signed_max_hi = 0;
9336 max_hi = 0;
9338 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9340 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9341 min_lo = 0;
9342 min_hi = 0;
9344 else
9346 max_lo = signed_max_lo;
9347 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9348 min_hi = -1;
9351 else
9353 width -= HOST_BITS_PER_WIDE_INT;
9354 signed_max_lo = -1;
9355 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9356 - 1;
9357 max_lo = -1;
9358 min_lo = 0;
9360 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9362 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9363 min_hi = 0;
9365 else
9367 max_hi = signed_max_hi;
9368 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9372 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9373 && TREE_INT_CST_LOW (arg1) == max_lo)
9374 switch (code)
9376 case GT_EXPR:
9377 return omit_one_operand (type, integer_zero_node, arg0);
9379 case GE_EXPR:
9380 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9382 case LE_EXPR:
9383 return omit_one_operand (type, integer_one_node, arg0);
9385 case LT_EXPR:
9386 return fold_build2 (NE_EXPR, type, arg0, arg1);
9388 /* The GE_EXPR and LT_EXPR cases above are not normally
9389 reached because of previous transformations. */
9391 default:
9392 break;
9394 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9395 == max_hi
9396 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9397 switch (code)
9399 case GT_EXPR:
9400 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9401 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9402 case LE_EXPR:
9403 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9404 return fold_build2 (NE_EXPR, type, arg0, arg1);
9405 default:
9406 break;
9408 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9409 == min_hi
9410 && TREE_INT_CST_LOW (arg1) == min_lo)
9411 switch (code)
9413 case LT_EXPR:
9414 return omit_one_operand (type, integer_zero_node, arg0);
9416 case LE_EXPR:
9417 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9419 case GE_EXPR:
9420 return omit_one_operand (type, integer_one_node, arg0);
9422 case GT_EXPR:
9423 return fold_build2 (NE_EXPR, type, op0, op1);
9425 default:
9426 break;
9428 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9429 == min_hi
9430 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9431 switch (code)
9433 case GE_EXPR:
9434 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9435 return fold_build2 (NE_EXPR, type, arg0, arg1);
9436 case LT_EXPR:
9437 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9438 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9439 default:
9440 break;
9443 else if (!in_gimple_form
9444 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9445 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9446 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9447 /* signed_type does not work on pointer types. */
9448 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9450 /* The following case also applies to X < signed_max+1
9451 and X >= signed_max+1 because previous transformations. */
9452 if (code == LE_EXPR || code == GT_EXPR)
9454 tree st0, st1;
9455 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9456 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9457 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9458 type, fold_convert (st0, arg0),
9459 build_int_cst (st1, 0));
9465 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9466 a MINUS_EXPR of a constant, we can convert it into a comparison with
9467 a revised constant as long as no overflow occurs. */
9468 if ((code == EQ_EXPR || code == NE_EXPR)
9469 && TREE_CODE (arg1) == INTEGER_CST
9470 && (TREE_CODE (arg0) == PLUS_EXPR
9471 || TREE_CODE (arg0) == MINUS_EXPR)
9472 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9473 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9474 ? MINUS_EXPR : PLUS_EXPR,
9475 arg1, TREE_OPERAND (arg0, 1), 0))
9476 && ! TREE_CONSTANT_OVERFLOW (tem))
9477 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9479 /* Similarly for a NEGATE_EXPR. */
9480 else if ((code == EQ_EXPR || code == NE_EXPR)
9481 && TREE_CODE (arg0) == NEGATE_EXPR
9482 && TREE_CODE (arg1) == INTEGER_CST
9483 && 0 != (tem = negate_expr (arg1))
9484 && TREE_CODE (tem) == INTEGER_CST
9485 && ! TREE_CONSTANT_OVERFLOW (tem))
9486 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9488 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9489 for !=. Don't do this for ordered comparisons due to overflow. */
9490 else if ((code == NE_EXPR || code == EQ_EXPR)
9491 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9492 return fold_build2 (code, type,
9493 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9495 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9496 && (TREE_CODE (arg0) == NOP_EXPR
9497 || TREE_CODE (arg0) == CONVERT_EXPR))
9499 /* If we are widening one operand of an integer comparison,
9500 see if the other operand is similarly being widened. Perhaps we
9501 can do the comparison in the narrower type. */
9502 tem = fold_widened_comparison (code, type, arg0, arg1);
9503 if (tem)
9504 return tem;
9506 /* Or if we are changing signedness. */
9507 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9508 if (tem)
9509 return tem;
9512 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9513 constant, we can simplify it. */
9514 else if (TREE_CODE (arg1) == INTEGER_CST
9515 && (TREE_CODE (arg0) == MIN_EXPR
9516 || TREE_CODE (arg0) == MAX_EXPR)
9517 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9519 tem = optimize_minmax_comparison (code, type, op0, op1);
9520 if (tem)
9521 return tem;
9523 return NULL_TREE;
9526 /* If we are comparing an ABS_EXPR with a constant, we can
9527 convert all the cases into explicit comparisons, but they may
9528 well not be faster than doing the ABS and one comparison.
9529 But ABS (X) <= C is a range comparison, which becomes a subtraction
9530 and a comparison, and is probably faster. */
9531 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9532 && TREE_CODE (arg0) == ABS_EXPR
9533 && ! TREE_SIDE_EFFECTS (arg0)
9534 && (0 != (tem = negate_expr (arg1)))
9535 && TREE_CODE (tem) == INTEGER_CST
9536 && ! TREE_CONSTANT_OVERFLOW (tem))
9537 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9538 build2 (GE_EXPR, type,
9539 TREE_OPERAND (arg0, 0), tem),
9540 build2 (LE_EXPR, type,
9541 TREE_OPERAND (arg0, 0), arg1));
9543 /* Convert ABS_EXPR<x> >= 0 to true. */
9544 else if (code == GE_EXPR
9545 && tree_expr_nonnegative_p (arg0)
9546 && (integer_zerop (arg1)
9547 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9548 && real_zerop (arg1))))
9549 return omit_one_operand (type, integer_one_node, arg0);
9551 /* Convert ABS_EXPR<x> < 0 to false. */
9552 else if (code == LT_EXPR
9553 && tree_expr_nonnegative_p (arg0)
9554 && (integer_zerop (arg1) || real_zerop (arg1)))
9555 return omit_one_operand (type, integer_zero_node, arg0);
9557 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9558 else if ((code == EQ_EXPR || code == NE_EXPR)
9559 && TREE_CODE (arg0) == ABS_EXPR
9560 && (integer_zerop (arg1) || real_zerop (arg1)))
9561 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9563 /* If this is an EQ or NE comparison with zero and ARG0 is
9564 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9565 two operations, but the latter can be done in one less insn
9566 on machines that have only two-operand insns or on which a
9567 constant cannot be the first operand. */
9568 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9569 && TREE_CODE (arg0) == BIT_AND_EXPR)
9571 tree arg00 = TREE_OPERAND (arg0, 0);
9572 tree arg01 = TREE_OPERAND (arg0, 1);
9573 if (TREE_CODE (arg00) == LSHIFT_EXPR
9574 && integer_onep (TREE_OPERAND (arg00, 0)))
9575 return
9576 fold_build2 (code, type,
9577 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9578 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9579 arg01, TREE_OPERAND (arg00, 1)),
9580 fold_convert (TREE_TYPE (arg0),
9581 integer_one_node)),
9582 arg1);
9583 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9584 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9585 return
9586 fold_build2 (code, type,
9587 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9588 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9589 arg00, TREE_OPERAND (arg01, 1)),
9590 fold_convert (TREE_TYPE (arg0),
9591 integer_one_node)),
9592 arg1);
9595 /* If this is an NE or EQ comparison of zero against the result of a
9596 signed MOD operation whose second operand is a power of 2, make
9597 the MOD operation unsigned since it is simpler and equivalent. */
9598 if ((code == NE_EXPR || code == EQ_EXPR)
9599 && integer_zerop (arg1)
9600 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9601 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9602 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9603 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9604 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9605 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9607 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9608 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9609 fold_convert (newtype,
9610 TREE_OPERAND (arg0, 0)),
9611 fold_convert (newtype,
9612 TREE_OPERAND (arg0, 1)));
9614 return fold_build2 (code, type, newmod,
9615 fold_convert (newtype, arg1));
9618 /* If this is an NE comparison of zero with an AND of one, remove the
9619 comparison since the AND will give the correct value. */
9620 if (code == NE_EXPR && integer_zerop (arg1)
9621 && TREE_CODE (arg0) == BIT_AND_EXPR
9622 && integer_onep (TREE_OPERAND (arg0, 1)))
9623 return fold_convert (type, arg0);
9625 /* If we have (A & C) == C where C is a power of 2, convert this into
9626 (A & C) != 0. Similarly for NE_EXPR. */
9627 if ((code == EQ_EXPR || code == NE_EXPR)
9628 && TREE_CODE (arg0) == BIT_AND_EXPR
9629 && integer_pow2p (TREE_OPERAND (arg0, 1))
9630 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9631 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9632 arg0, fold_convert (TREE_TYPE (arg0),
9633 integer_zero_node));
9635 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9636 bit, then fold the expression into A < 0 or A >= 0. */
9637 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9638 if (tem)
9639 return tem;
9641 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9642 Similarly for NE_EXPR. */
9643 if ((code == EQ_EXPR || code == NE_EXPR)
9644 && TREE_CODE (arg0) == BIT_AND_EXPR
9645 && TREE_CODE (arg1) == INTEGER_CST
9646 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9648 tree notc = fold_build1 (BIT_NOT_EXPR,
9649 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9650 TREE_OPERAND (arg0, 1));
9651 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9652 arg1, notc);
9653 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9654 if (integer_nonzerop (dandnotc))
9655 return omit_one_operand (type, rslt, arg0);
9658 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9659 Similarly for NE_EXPR. */
9660 if ((code == EQ_EXPR || code == NE_EXPR)
9661 && TREE_CODE (arg0) == BIT_IOR_EXPR
9662 && TREE_CODE (arg1) == INTEGER_CST
9663 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9665 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9666 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9667 TREE_OPERAND (arg0, 1), notd);
9668 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9669 if (integer_nonzerop (candnotd))
9670 return omit_one_operand (type, rslt, arg0);
9673 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9674 and similarly for >= into !=. */
9675 if ((code == LT_EXPR || code == GE_EXPR)
9676 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9677 && TREE_CODE (arg1) == LSHIFT_EXPR
9678 && integer_onep (TREE_OPERAND (arg1, 0)))
9679 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9680 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9681 TREE_OPERAND (arg1, 1)),
9682 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9684 else if ((code == LT_EXPR || code == GE_EXPR)
9685 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9686 && (TREE_CODE (arg1) == NOP_EXPR
9687 || TREE_CODE (arg1) == CONVERT_EXPR)
9688 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9689 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9690 return
9691 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9692 fold_convert (TREE_TYPE (arg0),
9693 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9694 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9695 1))),
9696 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9698 /* Simplify comparison of something with itself. (For IEEE
9699 floating-point, we can only do some of these simplifications.) */
9700 if (operand_equal_p (arg0, arg1, 0))
9702 switch (code)
9704 case EQ_EXPR:
9705 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9706 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9707 return constant_boolean_node (1, type);
9708 break;
9710 case GE_EXPR:
9711 case LE_EXPR:
9712 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9713 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9714 return constant_boolean_node (1, type);
9715 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9717 case NE_EXPR:
9718 /* For NE, we can only do this simplification if integer
9719 or we don't honor IEEE floating point NaNs. */
9720 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9721 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9722 break;
9723 /* ... fall through ... */
9724 case GT_EXPR:
9725 case LT_EXPR:
9726 return constant_boolean_node (0, type);
9727 default:
9728 gcc_unreachable ();
9732 /* If we are comparing an expression that just has comparisons
9733 of two integer values, arithmetic expressions of those comparisons,
9734 and constants, we can simplify it. There are only three cases
9735 to check: the two values can either be equal, the first can be
9736 greater, or the second can be greater. Fold the expression for
9737 those three values. Since each value must be 0 or 1, we have
9738 eight possibilities, each of which corresponds to the constant 0
9739 or 1 or one of the six possible comparisons.
9741 This handles common cases like (a > b) == 0 but also handles
9742 expressions like ((x > y) - (y > x)) > 0, which supposedly
9743 occur in macroized code. */
9745 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9747 tree cval1 = 0, cval2 = 0;
9748 int save_p = 0;
9750 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9751 /* Don't handle degenerate cases here; they should already
9752 have been handled anyway. */
9753 && cval1 != 0 && cval2 != 0
9754 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9755 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9756 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9757 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9758 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9759 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9760 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9762 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9763 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9765 /* We can't just pass T to eval_subst in case cval1 or cval2
9766 was the same as ARG1. */
9768 tree high_result
9769 = fold_build2 (code, type,
9770 eval_subst (arg0, cval1, maxval,
9771 cval2, minval),
9772 arg1);
9773 tree equal_result
9774 = fold_build2 (code, type,
9775 eval_subst (arg0, cval1, maxval,
9776 cval2, maxval),
9777 arg1);
9778 tree low_result
9779 = fold_build2 (code, type,
9780 eval_subst (arg0, cval1, minval,
9781 cval2, maxval),
9782 arg1);
9784 /* All three of these results should be 0 or 1. Confirm they
9785 are. Then use those values to select the proper code
9786 to use. */
9788 if ((integer_zerop (high_result)
9789 || integer_onep (high_result))
9790 && (integer_zerop (equal_result)
9791 || integer_onep (equal_result))
9792 && (integer_zerop (low_result)
9793 || integer_onep (low_result)))
9795 /* Make a 3-bit mask with the high-order bit being the
9796 value for `>', the next for '=', and the low for '<'. */
9797 switch ((integer_onep (high_result) * 4)
9798 + (integer_onep (equal_result) * 2)
9799 + integer_onep (low_result))
9801 case 0:
9802 /* Always false. */
9803 return omit_one_operand (type, integer_zero_node, arg0);
9804 case 1:
9805 code = LT_EXPR;
9806 break;
9807 case 2:
9808 code = EQ_EXPR;
9809 break;
9810 case 3:
9811 code = LE_EXPR;
9812 break;
9813 case 4:
9814 code = GT_EXPR;
9815 break;
9816 case 5:
9817 code = NE_EXPR;
9818 break;
9819 case 6:
9820 code = GE_EXPR;
9821 break;
9822 case 7:
9823 /* Always true. */
9824 return omit_one_operand (type, integer_one_node, arg0);
9827 if (save_p)
9828 return save_expr (build2 (code, type, cval1, cval2));
9829 else
9830 return fold_build2 (code, type, cval1, cval2);
9835 /* If this is a comparison of a field, we may be able to simplify it. */
9836 if (((TREE_CODE (arg0) == COMPONENT_REF
9837 && lang_hooks.can_use_bit_fields_p ())
9838 || TREE_CODE (arg0) == BIT_FIELD_REF)
9839 && (code == EQ_EXPR || code == NE_EXPR)
9840 /* Handle the constant case even without -O
9841 to make sure the warnings are given. */
9842 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9844 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9845 if (t1)
9846 return t1;
9849 /* Fold a comparison of the address of COMPONENT_REFs with the same
9850 type and component to a comparison of the address of the base
9851 object. In short, &x->a OP &y->a to x OP y and
9852 &x->a OP &y.a to x OP &y */
9853 if (TREE_CODE (arg0) == ADDR_EXPR
9854 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9855 && TREE_CODE (arg1) == ADDR_EXPR
9856 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9858 tree cref0 = TREE_OPERAND (arg0, 0);
9859 tree cref1 = TREE_OPERAND (arg1, 0);
9860 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9862 tree op0 = TREE_OPERAND (cref0, 0);
9863 tree op1 = TREE_OPERAND (cref1, 0);
9864 return fold_build2 (code, type,
9865 build_fold_addr_expr (op0),
9866 build_fold_addr_expr (op1));
9870 /* Optimize comparisons of strlen vs zero to a compare of the
9871 first character of the string vs zero. To wit,
9872 strlen(ptr) == 0 => *ptr == 0
9873 strlen(ptr) != 0 => *ptr != 0
9874 Other cases should reduce to one of these two (or a constant)
9875 due to the return value of strlen being unsigned. */
9876 if ((code == EQ_EXPR || code == NE_EXPR)
9877 && integer_zerop (arg1)
9878 && TREE_CODE (arg0) == CALL_EXPR)
9880 tree fndecl = get_callee_fndecl (arg0);
9881 tree arglist;
9883 if (fndecl
9884 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9885 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9886 && (arglist = TREE_OPERAND (arg0, 1))
9887 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9888 && ! TREE_CHAIN (arglist))
9890 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9891 return fold_build2 (code, type, iref,
9892 build_int_cst (TREE_TYPE (iref), 0));
9896 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9897 into a single range test. */
9898 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9899 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9900 && TREE_CODE (arg1) == INTEGER_CST
9901 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9902 && !integer_zerop (TREE_OPERAND (arg0, 1))
9903 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9904 && !TREE_OVERFLOW (arg1))
9906 t1 = fold_div_compare (code, type, arg0, arg1);
9907 if (t1 != NULL_TREE)
9908 return t1;
9911 if ((code == EQ_EXPR || code == NE_EXPR)
9912 && integer_zerop (arg1)
9913 && tree_expr_nonzero_p (arg0))
9915 tree res = constant_boolean_node (code==NE_EXPR, type);
9916 return omit_one_operand (type, res, arg0);
9919 t1 = fold_relational_const (code, type, arg0, arg1);
9920 return t1 == NULL_TREE ? NULL_TREE : t1;
9922 case UNORDERED_EXPR:
9923 case ORDERED_EXPR:
9924 case UNLT_EXPR:
9925 case UNLE_EXPR:
9926 case UNGT_EXPR:
9927 case UNGE_EXPR:
9928 case UNEQ_EXPR:
9929 case LTGT_EXPR:
9930 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9932 t1 = fold_relational_const (code, type, arg0, arg1);
9933 if (t1 != NULL_TREE)
9934 return t1;
9937 /* If the first operand is NaN, the result is constant. */
9938 if (TREE_CODE (arg0) == REAL_CST
9939 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9940 && (code != LTGT_EXPR || ! flag_trapping_math))
9942 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9943 ? integer_zero_node
9944 : integer_one_node;
9945 return omit_one_operand (type, t1, arg1);
9948 /* If the second operand is NaN, the result is constant. */
9949 if (TREE_CODE (arg1) == REAL_CST
9950 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9951 && (code != LTGT_EXPR || ! flag_trapping_math))
9953 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9954 ? integer_zero_node
9955 : integer_one_node;
9956 return omit_one_operand (type, t1, arg0);
9959 /* Simplify unordered comparison of something with itself. */
9960 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9961 && operand_equal_p (arg0, arg1, 0))
9962 return constant_boolean_node (1, type);
9964 if (code == LTGT_EXPR
9965 && !flag_trapping_math
9966 && operand_equal_p (arg0, arg1, 0))
9967 return constant_boolean_node (0, type);
9969 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9971 tree targ0 = strip_float_extensions (arg0);
9972 tree targ1 = strip_float_extensions (arg1);
9973 tree newtype = TREE_TYPE (targ0);
9975 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9976 newtype = TREE_TYPE (targ1);
9978 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9979 return fold_build2 (code, type, fold_convert (newtype, targ0),
9980 fold_convert (newtype, targ1));
9983 return NULL_TREE;
9985 case COMPOUND_EXPR:
9986 /* When pedantic, a compound expression can be neither an lvalue
9987 nor an integer constant expression. */
9988 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9989 return NULL_TREE;
9990 /* Don't let (0, 0) be null pointer constant. */
9991 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9992 : fold_convert (type, arg1);
9993 return pedantic_non_lvalue (tem);
9995 case COMPLEX_EXPR:
9996 if (wins)
9997 return build_complex (type, arg0, arg1);
9998 return NULL_TREE;
10000 case ASSERT_EXPR:
10001 /* An ASSERT_EXPR should never be passed to fold_binary. */
10002 gcc_unreachable ();
10004 default:
10005 return NULL_TREE;
10006 } /* switch (code) */
10009 /* Callback for walk_tree, looking for LABEL_EXPR.
10010 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10011 Do not check the sub-tree of GOTO_EXPR. */
10013 static tree
10014 contains_label_1 (tree *tp,
10015 int *walk_subtrees,
10016 void *data ATTRIBUTE_UNUSED)
10018 switch (TREE_CODE (*tp))
10020 case LABEL_EXPR:
10021 return *tp;
10022 case GOTO_EXPR:
10023 *walk_subtrees = 0;
10024 /* no break */
10025 default:
10026 return NULL_TREE;
10030 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
10031 accessible from outside the sub-tree. Returns NULL_TREE if no
10032 addressable label is found. */
10034 static bool
10035 contains_label_p (tree st)
10037 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
10040 /* Fold a ternary expression of code CODE and type TYPE with operands
10041 OP0, OP1, and OP2. Return the folded expression if folding is
10042 successful. Otherwise, return NULL_TREE. */
10044 tree
10045 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10047 tree tem;
10048 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
10049 enum tree_code_class kind = TREE_CODE_CLASS (code);
10051 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10052 && TREE_CODE_LENGTH (code) == 3);
10054 /* Strip any conversions that don't change the mode. This is safe
10055 for every expression, except for a comparison expression because
10056 its signedness is derived from its operands. So, in the latter
10057 case, only strip conversions that don't change the signedness.
10059 Note that this is done as an internal manipulation within the
10060 constant folder, in order to find the simplest representation of
10061 the arguments so that their form can be studied. In any cases,
10062 the appropriate type conversions should be put back in the tree
10063 that will get out of the constant folder. */
10064 if (op0)
10066 arg0 = op0;
10067 STRIP_NOPS (arg0);
10070 if (op1)
10072 arg1 = op1;
10073 STRIP_NOPS (arg1);
10076 switch (code)
10078 case COMPONENT_REF:
10079 if (TREE_CODE (arg0) == CONSTRUCTOR
10080 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
10082 unsigned HOST_WIDE_INT idx;
10083 tree field, value;
10084 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
10085 if (field == arg1)
10086 return value;
10088 return NULL_TREE;
10090 case COND_EXPR:
10091 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10092 so all simple results must be passed through pedantic_non_lvalue. */
10093 if (TREE_CODE (arg0) == INTEGER_CST)
10095 tree unused_op = integer_zerop (arg0) ? op1 : op2;
10096 tem = integer_zerop (arg0) ? op2 : op1;
10097 /* Only optimize constant conditions when the selected branch
10098 has the same type as the COND_EXPR. This avoids optimizing
10099 away "c ? x : throw", where the throw has a void type.
10100 Avoid throwing away that operand which contains label. */
10101 if ((!TREE_SIDE_EFFECTS (unused_op)
10102 || !contains_label_p (unused_op))
10103 && (! VOID_TYPE_P (TREE_TYPE (tem))
10104 || VOID_TYPE_P (type)))
10105 return pedantic_non_lvalue (tem);
10106 return NULL_TREE;
10108 if (operand_equal_p (arg1, op2, 0))
10109 return pedantic_omit_one_operand (type, arg1, arg0);
10111 /* If we have A op B ? A : C, we may be able to convert this to a
10112 simpler expression, depending on the operation and the values
10113 of B and C. Signed zeros prevent all of these transformations,
10114 for reasons given above each one.
10116 Also try swapping the arguments and inverting the conditional. */
10117 if (COMPARISON_CLASS_P (arg0)
10118 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10119 arg1, TREE_OPERAND (arg0, 1))
10120 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10122 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10123 if (tem)
10124 return tem;
10127 if (COMPARISON_CLASS_P (arg0)
10128 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10129 op2,
10130 TREE_OPERAND (arg0, 1))
10131 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10133 tem = invert_truthvalue (arg0);
10134 if (COMPARISON_CLASS_P (tem))
10136 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10137 if (tem)
10138 return tem;
10142 /* If the second operand is simpler than the third, swap them
10143 since that produces better jump optimization results. */
10144 if (truth_value_p (TREE_CODE (arg0))
10145 && tree_swap_operands_p (op1, op2, false))
10147 /* See if this can be inverted. If it can't, possibly because
10148 it was a floating-point inequality comparison, don't do
10149 anything. */
10150 tem = invert_truthvalue (arg0);
10152 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10153 return fold_build3 (code, type, tem, op2, op1);
10156 /* Convert A ? 1 : 0 to simply A. */
10157 if (integer_onep (op1)
10158 && integer_zerop (op2)
10159 /* If we try to convert OP0 to our type, the
10160 call to fold will try to move the conversion inside
10161 a COND, which will recurse. In that case, the COND_EXPR
10162 is probably the best choice, so leave it alone. */
10163 && type == TREE_TYPE (arg0))
10164 return pedantic_non_lvalue (arg0);
10166 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10167 over COND_EXPR in cases such as floating point comparisons. */
10168 if (integer_zerop (op1)
10169 && integer_onep (op2)
10170 && truth_value_p (TREE_CODE (arg0)))
10171 return pedantic_non_lvalue (fold_convert (type,
10172 invert_truthvalue (arg0)));
10174 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10175 if (TREE_CODE (arg0) == LT_EXPR
10176 && integer_zerop (TREE_OPERAND (arg0, 1))
10177 && integer_zerop (op2)
10178 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10179 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10180 TREE_TYPE (tem), tem, arg1));
10182 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10183 already handled above. */
10184 if (TREE_CODE (arg0) == BIT_AND_EXPR
10185 && integer_onep (TREE_OPERAND (arg0, 1))
10186 && integer_zerop (op2)
10187 && integer_pow2p (arg1))
10189 tree tem = TREE_OPERAND (arg0, 0);
10190 STRIP_NOPS (tem);
10191 if (TREE_CODE (tem) == RSHIFT_EXPR
10192 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10193 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10194 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10195 return fold_build2 (BIT_AND_EXPR, type,
10196 TREE_OPERAND (tem, 0), arg1);
10199 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10200 is probably obsolete because the first operand should be a
10201 truth value (that's why we have the two cases above), but let's
10202 leave it in until we can confirm this for all front-ends. */
10203 if (integer_zerop (op2)
10204 && TREE_CODE (arg0) == NE_EXPR
10205 && integer_zerop (TREE_OPERAND (arg0, 1))
10206 && integer_pow2p (arg1)
10207 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10208 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10209 arg1, OEP_ONLY_CONST))
10210 return pedantic_non_lvalue (fold_convert (type,
10211 TREE_OPERAND (arg0, 0)));
10213 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10214 if (integer_zerop (op2)
10215 && truth_value_p (TREE_CODE (arg0))
10216 && truth_value_p (TREE_CODE (arg1)))
10217 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10219 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10220 if (integer_onep (op2)
10221 && truth_value_p (TREE_CODE (arg0))
10222 && truth_value_p (TREE_CODE (arg1)))
10224 /* Only perform transformation if ARG0 is easily inverted. */
10225 tem = invert_truthvalue (arg0);
10226 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10227 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10230 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10231 if (integer_zerop (arg1)
10232 && truth_value_p (TREE_CODE (arg0))
10233 && truth_value_p (TREE_CODE (op2)))
10235 /* Only perform transformation if ARG0 is easily inverted. */
10236 tem = invert_truthvalue (arg0);
10237 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10238 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10241 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10242 if (integer_onep (arg1)
10243 && truth_value_p (TREE_CODE (arg0))
10244 && truth_value_p (TREE_CODE (op2)))
10245 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10247 return NULL_TREE;
10249 case CALL_EXPR:
10250 /* Check for a built-in function. */
10251 if (TREE_CODE (op0) == ADDR_EXPR
10252 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10253 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10254 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
10255 return NULL_TREE;
10257 case BIT_FIELD_REF:
10258 if (TREE_CODE (arg0) == VECTOR_CST
10259 && type == TREE_TYPE (TREE_TYPE (arg0))
10260 && host_integerp (arg1, 1)
10261 && host_integerp (op2, 1))
10263 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10264 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10266 if (width != 0
10267 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10268 && (idx % width) == 0
10269 && (idx = idx / width)
10270 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10272 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10273 while (idx-- > 0 && elements)
10274 elements = TREE_CHAIN (elements);
10275 if (elements)
10276 return TREE_VALUE (elements);
10277 else
10278 return fold_convert (type, integer_zero_node);
10281 return NULL_TREE;
10283 default:
10284 return NULL_TREE;
10285 } /* switch (code) */
10288 /* Perform constant folding and related simplification of EXPR.
10289 The related simplifications include x*1 => x, x*0 => 0, etc.,
10290 and application of the associative law.
10291 NOP_EXPR conversions may be removed freely (as long as we
10292 are careful not to change the type of the overall expression).
10293 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10294 but we can constant-fold them if they have constant operands. */
10296 #ifdef ENABLE_FOLD_CHECKING
10297 # define fold(x) fold_1 (x)
10298 static tree fold_1 (tree);
10299 static
10300 #endif
10301 tree
10302 fold (tree expr)
10304 const tree t = expr;
10305 enum tree_code code = TREE_CODE (t);
10306 enum tree_code_class kind = TREE_CODE_CLASS (code);
10307 tree tem;
10309 /* Return right away if a constant. */
10310 if (kind == tcc_constant)
10311 return t;
10313 if (IS_EXPR_CODE_CLASS (kind))
10315 tree type = TREE_TYPE (t);
10316 tree op0, op1, op2;
10318 switch (TREE_CODE_LENGTH (code))
10320 case 1:
10321 op0 = TREE_OPERAND (t, 0);
10322 tem = fold_unary (code, type, op0);
10323 return tem ? tem : expr;
10324 case 2:
10325 op0 = TREE_OPERAND (t, 0);
10326 op1 = TREE_OPERAND (t, 1);
10327 tem = fold_binary (code, type, op0, op1);
10328 return tem ? tem : expr;
10329 case 3:
10330 op0 = TREE_OPERAND (t, 0);
10331 op1 = TREE_OPERAND (t, 1);
10332 op2 = TREE_OPERAND (t, 2);
10333 tem = fold_ternary (code, type, op0, op1, op2);
10334 return tem ? tem : expr;
10335 default:
10336 break;
10340 switch (code)
10342 case CONST_DECL:
10343 return fold (DECL_INITIAL (t));
10345 default:
10346 return t;
10347 } /* switch (code) */
10350 #ifdef ENABLE_FOLD_CHECKING
10351 #undef fold
10353 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10354 static void fold_check_failed (tree, tree);
10355 void print_fold_checksum (tree);
10357 /* When --enable-checking=fold, compute a digest of expr before
10358 and after actual fold call to see if fold did not accidentally
10359 change original expr. */
10361 tree
10362 fold (tree expr)
10364 tree ret;
10365 struct md5_ctx ctx;
10366 unsigned char checksum_before[16], checksum_after[16];
10367 htab_t ht;
10369 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10370 md5_init_ctx (&ctx);
10371 fold_checksum_tree (expr, &ctx, ht);
10372 md5_finish_ctx (&ctx, checksum_before);
10373 htab_empty (ht);
10375 ret = fold_1 (expr);
10377 md5_init_ctx (&ctx);
10378 fold_checksum_tree (expr, &ctx, ht);
10379 md5_finish_ctx (&ctx, checksum_after);
10380 htab_delete (ht);
10382 if (memcmp (checksum_before, checksum_after, 16))
10383 fold_check_failed (expr, ret);
10385 return ret;
10388 void
10389 print_fold_checksum (tree expr)
10391 struct md5_ctx ctx;
10392 unsigned char checksum[16], cnt;
10393 htab_t ht;
10395 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10396 md5_init_ctx (&ctx);
10397 fold_checksum_tree (expr, &ctx, ht);
10398 md5_finish_ctx (&ctx, checksum);
10399 htab_delete (ht);
10400 for (cnt = 0; cnt < 16; ++cnt)
10401 fprintf (stderr, "%02x", checksum[cnt]);
10402 putc ('\n', stderr);
10405 static void
10406 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10408 internal_error ("fold check: original tree changed by fold");
10411 static void
10412 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10414 void **slot;
10415 enum tree_code code;
10416 char buf[sizeof (struct tree_function_decl)];
10417 int i, len;
10419 recursive_label:
10421 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10422 <= sizeof (struct tree_function_decl))
10423 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
10424 if (expr == NULL)
10425 return;
10426 slot = htab_find_slot (ht, expr, INSERT);
10427 if (*slot != NULL)
10428 return;
10429 *slot = expr;
10430 code = TREE_CODE (expr);
10431 if (TREE_CODE_CLASS (code) == tcc_declaration
10432 && DECL_ASSEMBLER_NAME_SET_P (expr))
10434 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10435 memcpy (buf, expr, tree_size (expr));
10436 expr = (tree) buf;
10437 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10439 else if (TREE_CODE_CLASS (code) == tcc_type
10440 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10441 || TYPE_CACHED_VALUES_P (expr)
10442 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10444 /* Allow these fields to be modified. */
10445 memcpy (buf, expr, tree_size (expr));
10446 expr = (tree) buf;
10447 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10448 TYPE_POINTER_TO (expr) = NULL;
10449 TYPE_REFERENCE_TO (expr) = NULL;
10450 if (TYPE_CACHED_VALUES_P (expr))
10452 TYPE_CACHED_VALUES_P (expr) = 0;
10453 TYPE_CACHED_VALUES (expr) = NULL;
10456 md5_process_bytes (expr, tree_size (expr), ctx);
10457 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10458 if (TREE_CODE_CLASS (code) != tcc_type
10459 && TREE_CODE_CLASS (code) != tcc_declaration
10460 && code != TREE_LIST)
10461 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10462 switch (TREE_CODE_CLASS (code))
10464 case tcc_constant:
10465 switch (code)
10467 case STRING_CST:
10468 md5_process_bytes (TREE_STRING_POINTER (expr),
10469 TREE_STRING_LENGTH (expr), ctx);
10470 break;
10471 case COMPLEX_CST:
10472 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10473 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10474 break;
10475 case VECTOR_CST:
10476 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10477 break;
10478 default:
10479 break;
10481 break;
10482 case tcc_exceptional:
10483 switch (code)
10485 case TREE_LIST:
10486 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10487 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10488 expr = TREE_CHAIN (expr);
10489 goto recursive_label;
10490 break;
10491 case TREE_VEC:
10492 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10493 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10494 break;
10495 default:
10496 break;
10498 break;
10499 case tcc_expression:
10500 case tcc_reference:
10501 case tcc_comparison:
10502 case tcc_unary:
10503 case tcc_binary:
10504 case tcc_statement:
10505 len = TREE_CODE_LENGTH (code);
10506 for (i = 0; i < len; ++i)
10507 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10508 break;
10509 case tcc_declaration:
10510 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10511 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10512 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10513 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10514 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10515 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10516 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10517 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
10518 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10520 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
10522 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10523 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10524 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
10526 break;
10527 case tcc_type:
10528 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10529 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10530 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10531 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10532 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10533 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10534 if (INTEGRAL_TYPE_P (expr)
10535 || SCALAR_FLOAT_TYPE_P (expr))
10537 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10538 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10540 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10541 if (TREE_CODE (expr) == RECORD_TYPE
10542 || TREE_CODE (expr) == UNION_TYPE
10543 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10544 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10545 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10546 break;
10547 default:
10548 break;
10552 #endif
10554 /* Fold a unary tree expression with code CODE of type TYPE with an
10555 operand OP0. Return a folded expression if successful. Otherwise,
10556 return a tree expression with code CODE of type TYPE with an
10557 operand OP0. */
10559 tree
10560 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
10562 tree tem;
10563 #ifdef ENABLE_FOLD_CHECKING
10564 unsigned char checksum_before[16], checksum_after[16];
10565 struct md5_ctx ctx;
10566 htab_t ht;
10568 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10569 md5_init_ctx (&ctx);
10570 fold_checksum_tree (op0, &ctx, ht);
10571 md5_finish_ctx (&ctx, checksum_before);
10572 htab_empty (ht);
10573 #endif
10575 tem = fold_unary (code, type, op0);
10576 if (!tem)
10577 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
10579 #ifdef ENABLE_FOLD_CHECKING
10580 md5_init_ctx (&ctx);
10581 fold_checksum_tree (op0, &ctx, ht);
10582 md5_finish_ctx (&ctx, checksum_after);
10583 htab_delete (ht);
10585 if (memcmp (checksum_before, checksum_after, 16))
10586 fold_check_failed (op0, tem);
10587 #endif
10588 return tem;
10591 /* Fold a binary tree expression with code CODE of type TYPE with
10592 operands OP0 and OP1. Return a folded expression if successful.
10593 Otherwise, return a tree expression with code CODE of type TYPE
10594 with operands OP0 and OP1. */
10596 tree
10597 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
10598 MEM_STAT_DECL)
10600 tree tem;
10601 #ifdef ENABLE_FOLD_CHECKING
10602 unsigned char checksum_before_op0[16],
10603 checksum_before_op1[16],
10604 checksum_after_op0[16],
10605 checksum_after_op1[16];
10606 struct md5_ctx ctx;
10607 htab_t ht;
10609 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10610 md5_init_ctx (&ctx);
10611 fold_checksum_tree (op0, &ctx, ht);
10612 md5_finish_ctx (&ctx, checksum_before_op0);
10613 htab_empty (ht);
10615 md5_init_ctx (&ctx);
10616 fold_checksum_tree (op1, &ctx, ht);
10617 md5_finish_ctx (&ctx, checksum_before_op1);
10618 htab_empty (ht);
10619 #endif
10621 tem = fold_binary (code, type, op0, op1);
10622 if (!tem)
10623 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
10625 #ifdef ENABLE_FOLD_CHECKING
10626 md5_init_ctx (&ctx);
10627 fold_checksum_tree (op0, &ctx, ht);
10628 md5_finish_ctx (&ctx, checksum_after_op0);
10629 htab_empty (ht);
10631 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10632 fold_check_failed (op0, tem);
10634 md5_init_ctx (&ctx);
10635 fold_checksum_tree (op1, &ctx, ht);
10636 md5_finish_ctx (&ctx, checksum_after_op1);
10637 htab_delete (ht);
10639 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10640 fold_check_failed (op1, tem);
10641 #endif
10642 return tem;
10645 /* Fold a ternary tree expression with code CODE of type TYPE with
10646 operands OP0, OP1, and OP2. Return a folded expression if
10647 successful. Otherwise, return a tree expression with code CODE of
10648 type TYPE with operands OP0, OP1, and OP2. */
10650 tree
10651 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
10652 MEM_STAT_DECL)
10654 tree tem;
10655 #ifdef ENABLE_FOLD_CHECKING
10656 unsigned char checksum_before_op0[16],
10657 checksum_before_op1[16],
10658 checksum_before_op2[16],
10659 checksum_after_op0[16],
10660 checksum_after_op1[16],
10661 checksum_after_op2[16];
10662 struct md5_ctx ctx;
10663 htab_t ht;
10665 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10666 md5_init_ctx (&ctx);
10667 fold_checksum_tree (op0, &ctx, ht);
10668 md5_finish_ctx (&ctx, checksum_before_op0);
10669 htab_empty (ht);
10671 md5_init_ctx (&ctx);
10672 fold_checksum_tree (op1, &ctx, ht);
10673 md5_finish_ctx (&ctx, checksum_before_op1);
10674 htab_empty (ht);
10676 md5_init_ctx (&ctx);
10677 fold_checksum_tree (op2, &ctx, ht);
10678 md5_finish_ctx (&ctx, checksum_before_op2);
10679 htab_empty (ht);
10680 #endif
10682 tem = fold_ternary (code, type, op0, op1, op2);
10683 if (!tem)
10684 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
10686 #ifdef ENABLE_FOLD_CHECKING
10687 md5_init_ctx (&ctx);
10688 fold_checksum_tree (op0, &ctx, ht);
10689 md5_finish_ctx (&ctx, checksum_after_op0);
10690 htab_empty (ht);
10692 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10693 fold_check_failed (op0, tem);
10695 md5_init_ctx (&ctx);
10696 fold_checksum_tree (op1, &ctx, ht);
10697 md5_finish_ctx (&ctx, checksum_after_op1);
10698 htab_empty (ht);
10700 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10701 fold_check_failed (op1, tem);
10703 md5_init_ctx (&ctx);
10704 fold_checksum_tree (op2, &ctx, ht);
10705 md5_finish_ctx (&ctx, checksum_after_op2);
10706 htab_delete (ht);
10708 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
10709 fold_check_failed (op2, tem);
10710 #endif
10711 return tem;
10714 /* Perform constant folding and related simplification of initializer
10715 expression EXPR. These behave identically to "fold_buildN" but ignore
10716 potential run-time traps and exceptions that fold must preserve. */
10718 #define START_FOLD_INIT \
10719 int saved_signaling_nans = flag_signaling_nans;\
10720 int saved_trapping_math = flag_trapping_math;\
10721 int saved_rounding_math = flag_rounding_math;\
10722 int saved_trapv = flag_trapv;\
10723 flag_signaling_nans = 0;\
10724 flag_trapping_math = 0;\
10725 flag_rounding_math = 0;\
10726 flag_trapv = 0
10728 #define END_FOLD_INIT \
10729 flag_signaling_nans = saved_signaling_nans;\
10730 flag_trapping_math = saved_trapping_math;\
10731 flag_rounding_math = saved_rounding_math;\
10732 flag_trapv = saved_trapv
10734 tree
10735 fold_build1_initializer (enum tree_code code, tree type, tree op)
10737 tree result;
10738 START_FOLD_INIT;
10740 result = fold_build1 (code, type, op);
10742 END_FOLD_INIT;
10743 return result;
10746 tree
10747 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
10749 tree result;
10750 START_FOLD_INIT;
10752 result = fold_build2 (code, type, op0, op1);
10754 END_FOLD_INIT;
10755 return result;
10758 tree
10759 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
10760 tree op2)
10762 tree result;
10763 START_FOLD_INIT;
10765 result = fold_build3 (code, type, op0, op1, op2);
10767 END_FOLD_INIT;
10768 return result;
10771 #undef START_FOLD_INIT
10772 #undef END_FOLD_INIT
10774 /* Determine if first argument is a multiple of second argument. Return 0 if
10775 it is not, or we cannot easily determined it to be.
10777 An example of the sort of thing we care about (at this point; this routine
10778 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10779 fold cases do now) is discovering that
10781 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10783 is a multiple of
10785 SAVE_EXPR (J * 8)
10787 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10789 This code also handles discovering that
10791 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10793 is a multiple of 8 so we don't have to worry about dealing with a
10794 possible remainder.
10796 Note that we *look* inside a SAVE_EXPR only to determine how it was
10797 calculated; it is not safe for fold to do much of anything else with the
10798 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10799 at run time. For example, the latter example above *cannot* be implemented
10800 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10801 evaluation time of the original SAVE_EXPR is not necessarily the same at
10802 the time the new expression is evaluated. The only optimization of this
10803 sort that would be valid is changing
10805 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10807 divided by 8 to
10809 SAVE_EXPR (I) * SAVE_EXPR (J)
10811 (where the same SAVE_EXPR (J) is used in the original and the
10812 transformed version). */
10814 static int
10815 multiple_of_p (tree type, tree top, tree bottom)
10817 if (operand_equal_p (top, bottom, 0))
10818 return 1;
10820 if (TREE_CODE (type) != INTEGER_TYPE)
10821 return 0;
10823 switch (TREE_CODE (top))
10825 case BIT_AND_EXPR:
10826 /* Bitwise and provides a power of two multiple. If the mask is
10827 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10828 if (!integer_pow2p (bottom))
10829 return 0;
10830 /* FALLTHRU */
10832 case MULT_EXPR:
10833 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10834 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10836 case PLUS_EXPR:
10837 case MINUS_EXPR:
10838 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10839 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10841 case LSHIFT_EXPR:
10842 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10844 tree op1, t1;
10846 op1 = TREE_OPERAND (top, 1);
10847 /* const_binop may not detect overflow correctly,
10848 so check for it explicitly here. */
10849 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10850 > TREE_INT_CST_LOW (op1)
10851 && TREE_INT_CST_HIGH (op1) == 0
10852 && 0 != (t1 = fold_convert (type,
10853 const_binop (LSHIFT_EXPR,
10854 size_one_node,
10855 op1, 0)))
10856 && ! TREE_OVERFLOW (t1))
10857 return multiple_of_p (type, t1, bottom);
10859 return 0;
10861 case NOP_EXPR:
10862 /* Can't handle conversions from non-integral or wider integral type. */
10863 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10864 || (TYPE_PRECISION (type)
10865 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10866 return 0;
10868 /* .. fall through ... */
10870 case SAVE_EXPR:
10871 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10873 case INTEGER_CST:
10874 if (TREE_CODE (bottom) != INTEGER_CST
10875 || (TYPE_UNSIGNED (type)
10876 && (tree_int_cst_sgn (top) < 0
10877 || tree_int_cst_sgn (bottom) < 0)))
10878 return 0;
10879 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10880 top, bottom, 0));
10882 default:
10883 return 0;
10887 /* Return true if `t' is known to be non-negative. */
10890 tree_expr_nonnegative_p (tree t)
10892 if (TYPE_UNSIGNED (TREE_TYPE (t)))
10893 return 1;
10895 switch (TREE_CODE (t))
10897 case ABS_EXPR:
10898 /* We can't return 1 if flag_wrapv is set because
10899 ABS_EXPR<INT_MIN> = INT_MIN. */
10900 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
10901 return 1;
10902 break;
10904 case INTEGER_CST:
10905 return tree_int_cst_sgn (t) >= 0;
10907 case REAL_CST:
10908 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10910 case PLUS_EXPR:
10911 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10912 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10913 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10915 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10916 both unsigned and at least 2 bits shorter than the result. */
10917 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10918 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10919 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10921 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10922 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10923 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10924 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10926 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10927 TYPE_PRECISION (inner2)) + 1;
10928 return prec < TYPE_PRECISION (TREE_TYPE (t));
10931 break;
10933 case MULT_EXPR:
10934 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10936 /* x * x for floating point x is always non-negative. */
10937 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10938 return 1;
10939 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10940 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10943 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10944 both unsigned and their total bits is shorter than the result. */
10945 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10946 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10947 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10949 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10950 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10951 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10952 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10953 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10954 < TYPE_PRECISION (TREE_TYPE (t));
10956 return 0;
10958 case BIT_AND_EXPR:
10959 case MAX_EXPR:
10960 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10961 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10963 case BIT_IOR_EXPR:
10964 case BIT_XOR_EXPR:
10965 case MIN_EXPR:
10966 case RDIV_EXPR:
10967 case TRUNC_DIV_EXPR:
10968 case CEIL_DIV_EXPR:
10969 case FLOOR_DIV_EXPR:
10970 case ROUND_DIV_EXPR:
10971 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10972 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10974 case TRUNC_MOD_EXPR:
10975 case CEIL_MOD_EXPR:
10976 case FLOOR_MOD_EXPR:
10977 case ROUND_MOD_EXPR:
10978 case SAVE_EXPR:
10979 case NON_LVALUE_EXPR:
10980 case FLOAT_EXPR:
10981 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10983 case COMPOUND_EXPR:
10984 case MODIFY_EXPR:
10985 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10987 case BIND_EXPR:
10988 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10990 case COND_EXPR:
10991 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10992 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10994 case NOP_EXPR:
10996 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10997 tree outer_type = TREE_TYPE (t);
10999 if (TREE_CODE (outer_type) == REAL_TYPE)
11001 if (TREE_CODE (inner_type) == REAL_TYPE)
11002 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11003 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11005 if (TYPE_UNSIGNED (inner_type))
11006 return 1;
11007 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11010 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
11012 if (TREE_CODE (inner_type) == REAL_TYPE)
11013 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
11014 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11015 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
11016 && TYPE_UNSIGNED (inner_type);
11019 break;
11021 case TARGET_EXPR:
11023 tree temp = TARGET_EXPR_SLOT (t);
11024 t = TARGET_EXPR_INITIAL (t);
11026 /* If the initializer is non-void, then it's a normal expression
11027 that will be assigned to the slot. */
11028 if (!VOID_TYPE_P (t))
11029 return tree_expr_nonnegative_p (t);
11031 /* Otherwise, the initializer sets the slot in some way. One common
11032 way is an assignment statement at the end of the initializer. */
11033 while (1)
11035 if (TREE_CODE (t) == BIND_EXPR)
11036 t = expr_last (BIND_EXPR_BODY (t));
11037 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
11038 || TREE_CODE (t) == TRY_CATCH_EXPR)
11039 t = expr_last (TREE_OPERAND (t, 0));
11040 else if (TREE_CODE (t) == STATEMENT_LIST)
11041 t = expr_last (t);
11042 else
11043 break;
11045 if (TREE_CODE (t) == MODIFY_EXPR
11046 && TREE_OPERAND (t, 0) == temp)
11047 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11049 return 0;
11052 case CALL_EXPR:
11054 tree fndecl = get_callee_fndecl (t);
11055 tree arglist = TREE_OPERAND (t, 1);
11056 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
11057 switch (DECL_FUNCTION_CODE (fndecl))
11059 CASE_FLT_FN (BUILT_IN_ACOS):
11060 CASE_FLT_FN (BUILT_IN_ACOSH):
11061 CASE_FLT_FN (BUILT_IN_CABS):
11062 CASE_FLT_FN (BUILT_IN_COSH):
11063 CASE_FLT_FN (BUILT_IN_ERFC):
11064 CASE_FLT_FN (BUILT_IN_EXP):
11065 CASE_FLT_FN (BUILT_IN_EXP10):
11066 CASE_FLT_FN (BUILT_IN_EXP2):
11067 CASE_FLT_FN (BUILT_IN_FABS):
11068 CASE_FLT_FN (BUILT_IN_FDIM):
11069 CASE_FLT_FN (BUILT_IN_HYPOT):
11070 CASE_FLT_FN (BUILT_IN_POW10):
11071 CASE_INT_FN (BUILT_IN_FFS):
11072 CASE_INT_FN (BUILT_IN_PARITY):
11073 CASE_INT_FN (BUILT_IN_POPCOUNT):
11074 /* Always true. */
11075 return 1;
11077 CASE_FLT_FN (BUILT_IN_SQRT):
11078 /* sqrt(-0.0) is -0.0. */
11079 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
11080 return 1;
11081 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11083 CASE_FLT_FN (BUILT_IN_ASINH):
11084 CASE_FLT_FN (BUILT_IN_ATAN):
11085 CASE_FLT_FN (BUILT_IN_ATANH):
11086 CASE_FLT_FN (BUILT_IN_CBRT):
11087 CASE_FLT_FN (BUILT_IN_CEIL):
11088 CASE_FLT_FN (BUILT_IN_ERF):
11089 CASE_FLT_FN (BUILT_IN_EXPM1):
11090 CASE_FLT_FN (BUILT_IN_FLOOR):
11091 CASE_FLT_FN (BUILT_IN_FMOD):
11092 CASE_FLT_FN (BUILT_IN_FREXP):
11093 CASE_FLT_FN (BUILT_IN_LCEIL):
11094 CASE_FLT_FN (BUILT_IN_LDEXP):
11095 CASE_FLT_FN (BUILT_IN_LFLOOR):
11096 CASE_FLT_FN (BUILT_IN_LLCEIL):
11097 CASE_FLT_FN (BUILT_IN_LLFLOOR):
11098 CASE_FLT_FN (BUILT_IN_LLRINT):
11099 CASE_FLT_FN (BUILT_IN_LLROUND):
11100 CASE_FLT_FN (BUILT_IN_LRINT):
11101 CASE_FLT_FN (BUILT_IN_LROUND):
11102 CASE_FLT_FN (BUILT_IN_MODF):
11103 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11104 CASE_FLT_FN (BUILT_IN_POW):
11105 CASE_FLT_FN (BUILT_IN_RINT):
11106 CASE_FLT_FN (BUILT_IN_ROUND):
11107 CASE_FLT_FN (BUILT_IN_SIGNBIT):
11108 CASE_FLT_FN (BUILT_IN_SINH):
11109 CASE_FLT_FN (BUILT_IN_TANH):
11110 CASE_FLT_FN (BUILT_IN_TRUNC):
11111 /* True if the 1st argument is nonnegative. */
11112 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11114 CASE_FLT_FN (BUILT_IN_FMAX):
11115 /* True if the 1st OR 2nd arguments are nonnegative. */
11116 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11117 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11119 CASE_FLT_FN (BUILT_IN_FMIN):
11120 /* True if the 1st AND 2nd arguments are nonnegative. */
11121 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11122 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11124 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11125 /* True if the 2nd argument is nonnegative. */
11126 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11128 default:
11129 break;
11133 /* ... fall through ... */
11135 default:
11136 if (truth_value_p (TREE_CODE (t)))
11137 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11138 return 1;
11141 /* We don't know sign of `t', so be conservative and return false. */
11142 return 0;
11145 /* Return true when T is an address and is known to be nonzero.
11146 For floating point we further ensure that T is not denormal.
11147 Similar logic is present in nonzero_address in rtlanal.h. */
11149 bool
11150 tree_expr_nonzero_p (tree t)
11152 tree type = TREE_TYPE (t);
11154 /* Doing something useful for floating point would need more work. */
11155 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11156 return false;
11158 switch (TREE_CODE (t))
11160 case ABS_EXPR:
11161 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11163 case INTEGER_CST:
11164 /* We used to test for !integer_zerop here. This does not work correctly
11165 if TREE_CONSTANT_OVERFLOW (t). */
11166 return (TREE_INT_CST_LOW (t) != 0
11167 || TREE_INT_CST_HIGH (t) != 0);
11169 case PLUS_EXPR:
11170 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11172 /* With the presence of negative values it is hard
11173 to say something. */
11174 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11175 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11176 return false;
11177 /* One of operands must be positive and the other non-negative. */
11178 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11179 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11181 break;
11183 case MULT_EXPR:
11184 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11186 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11187 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11189 break;
11191 case NOP_EXPR:
11193 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11194 tree outer_type = TREE_TYPE (t);
11196 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11197 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11199 break;
11201 case ADDR_EXPR:
11203 tree base = get_base_address (TREE_OPERAND (t, 0));
11205 if (!base)
11206 return false;
11208 /* Weak declarations may link to NULL. */
11209 if (VAR_OR_FUNCTION_DECL_P (base))
11210 return !DECL_WEAK (base);
11212 /* Constants are never weak. */
11213 if (CONSTANT_CLASS_P (base))
11214 return true;
11216 return false;
11219 case COND_EXPR:
11220 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11221 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11223 case MIN_EXPR:
11224 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11225 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11227 case MAX_EXPR:
11228 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11230 /* When both operands are nonzero, then MAX must be too. */
11231 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11232 return true;
11234 /* MAX where operand 0 is positive is positive. */
11235 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11237 /* MAX where operand 1 is positive is positive. */
11238 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11239 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11240 return true;
11241 break;
11243 case COMPOUND_EXPR:
11244 case MODIFY_EXPR:
11245 case BIND_EXPR:
11246 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11248 case SAVE_EXPR:
11249 case NON_LVALUE_EXPR:
11250 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11252 case BIT_IOR_EXPR:
11253 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11254 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11256 case CALL_EXPR:
11257 return alloca_call_p (t);
11259 default:
11260 break;
11262 return false;
11265 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11266 attempt to fold the expression to a constant without modifying TYPE,
11267 OP0 or OP1.
11269 If the expression could be simplified to a constant, then return
11270 the constant. If the expression would not be simplified to a
11271 constant, then return NULL_TREE. */
11273 tree
11274 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11276 tree tem = fold_binary (code, type, op0, op1);
11277 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11280 /* Given the components of a unary expression CODE, TYPE and OP0,
11281 attempt to fold the expression to a constant without modifying
11282 TYPE or OP0.
11284 If the expression could be simplified to a constant, then return
11285 the constant. If the expression would not be simplified to a
11286 constant, then return NULL_TREE. */
11288 tree
11289 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11291 tree tem = fold_unary (code, type, op0);
11292 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11295 /* If EXP represents referencing an element in a constant string
11296 (either via pointer arithmetic or array indexing), return the
11297 tree representing the value accessed, otherwise return NULL. */
11299 tree
11300 fold_read_from_constant_string (tree exp)
11302 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11304 tree exp1 = TREE_OPERAND (exp, 0);
11305 tree index;
11306 tree string;
11308 if (TREE_CODE (exp) == INDIRECT_REF)
11309 string = string_constant (exp1, &index);
11310 else
11312 tree low_bound = array_ref_low_bound (exp);
11313 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11315 /* Optimize the special-case of a zero lower bound.
11317 We convert the low_bound to sizetype to avoid some problems
11318 with constant folding. (E.g. suppose the lower bound is 1,
11319 and its mode is QI. Without the conversion,l (ARRAY
11320 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11321 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11322 if (! integer_zerop (low_bound))
11323 index = size_diffop (index, fold_convert (sizetype, low_bound));
11325 string = exp1;
11328 if (string
11329 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11330 && TREE_CODE (string) == STRING_CST
11331 && TREE_CODE (index) == INTEGER_CST
11332 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11333 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11334 == MODE_INT)
11335 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11336 return fold_convert (TREE_TYPE (exp),
11337 build_int_cst (NULL_TREE,
11338 (TREE_STRING_POINTER (string)
11339 [TREE_INT_CST_LOW (index)])));
11341 return NULL;
11344 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11345 an integer constant or real constant.
11347 TYPE is the type of the result. */
11349 static tree
11350 fold_negate_const (tree arg0, tree type)
11352 tree t = NULL_TREE;
11354 switch (TREE_CODE (arg0))
11356 case INTEGER_CST:
11358 unsigned HOST_WIDE_INT low;
11359 HOST_WIDE_INT high;
11360 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11361 TREE_INT_CST_HIGH (arg0),
11362 &low, &high);
11363 t = build_int_cst_wide (type, low, high);
11364 t = force_fit_type (t, 1,
11365 (overflow | TREE_OVERFLOW (arg0))
11366 && !TYPE_UNSIGNED (type),
11367 TREE_CONSTANT_OVERFLOW (arg0));
11368 break;
11371 case REAL_CST:
11372 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11373 break;
11375 default:
11376 gcc_unreachable ();
11379 return t;
11382 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11383 an integer constant or real constant.
11385 TYPE is the type of the result. */
11387 tree
11388 fold_abs_const (tree arg0, tree type)
11390 tree t = NULL_TREE;
11392 switch (TREE_CODE (arg0))
11394 case INTEGER_CST:
11395 /* If the value is unsigned, then the absolute value is
11396 the same as the ordinary value. */
11397 if (TYPE_UNSIGNED (type))
11398 t = arg0;
11399 /* Similarly, if the value is non-negative. */
11400 else if (INT_CST_LT (integer_minus_one_node, arg0))
11401 t = arg0;
11402 /* If the value is negative, then the absolute value is
11403 its negation. */
11404 else
11406 unsigned HOST_WIDE_INT low;
11407 HOST_WIDE_INT high;
11408 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11409 TREE_INT_CST_HIGH (arg0),
11410 &low, &high);
11411 t = build_int_cst_wide (type, low, high);
11412 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11413 TREE_CONSTANT_OVERFLOW (arg0));
11415 break;
11417 case REAL_CST:
11418 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11419 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11420 else
11421 t = arg0;
11422 break;
11424 default:
11425 gcc_unreachable ();
11428 return t;
11431 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11432 constant. TYPE is the type of the result. */
11434 static tree
11435 fold_not_const (tree arg0, tree type)
11437 tree t = NULL_TREE;
11439 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11441 t = build_int_cst_wide (type,
11442 ~ TREE_INT_CST_LOW (arg0),
11443 ~ TREE_INT_CST_HIGH (arg0));
11444 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11445 TREE_CONSTANT_OVERFLOW (arg0));
11447 return t;
11450 /* Given CODE, a relational operator, the target type, TYPE and two
11451 constant operands OP0 and OP1, return the result of the
11452 relational operation. If the result is not a compile time
11453 constant, then return NULL_TREE. */
11455 static tree
11456 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11458 int result, invert;
11460 /* From here on, the only cases we handle are when the result is
11461 known to be a constant. */
11463 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11465 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11466 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11468 /* Handle the cases where either operand is a NaN. */
11469 if (real_isnan (c0) || real_isnan (c1))
11471 switch (code)
11473 case EQ_EXPR:
11474 case ORDERED_EXPR:
11475 result = 0;
11476 break;
11478 case NE_EXPR:
11479 case UNORDERED_EXPR:
11480 case UNLT_EXPR:
11481 case UNLE_EXPR:
11482 case UNGT_EXPR:
11483 case UNGE_EXPR:
11484 case UNEQ_EXPR:
11485 result = 1;
11486 break;
11488 case LT_EXPR:
11489 case LE_EXPR:
11490 case GT_EXPR:
11491 case GE_EXPR:
11492 case LTGT_EXPR:
11493 if (flag_trapping_math)
11494 return NULL_TREE;
11495 result = 0;
11496 break;
11498 default:
11499 gcc_unreachable ();
11502 return constant_boolean_node (result, type);
11505 return constant_boolean_node (real_compare (code, c0, c1), type);
11508 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11510 To compute GT, swap the arguments and do LT.
11511 To compute GE, do LT and invert the result.
11512 To compute LE, swap the arguments, do LT and invert the result.
11513 To compute NE, do EQ and invert the result.
11515 Therefore, the code below must handle only EQ and LT. */
11517 if (code == LE_EXPR || code == GT_EXPR)
11519 tree tem = op0;
11520 op0 = op1;
11521 op1 = tem;
11522 code = swap_tree_comparison (code);
11525 /* Note that it is safe to invert for real values here because we
11526 have already handled the one case that it matters. */
11528 invert = 0;
11529 if (code == NE_EXPR || code == GE_EXPR)
11531 invert = 1;
11532 code = invert_tree_comparison (code, false);
11535 /* Compute a result for LT or EQ if args permit;
11536 Otherwise return T. */
11537 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11539 if (code == EQ_EXPR)
11540 result = tree_int_cst_equal (op0, op1);
11541 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11542 result = INT_CST_LT_UNSIGNED (op0, op1);
11543 else
11544 result = INT_CST_LT (op0, op1);
11546 else
11547 return NULL_TREE;
11549 if (invert)
11550 result ^= 1;
11551 return constant_boolean_node (result, type);
11554 /* Build an expression for the a clean point containing EXPR with type TYPE.
11555 Don't build a cleanup point expression for EXPR which don't have side
11556 effects. */
11558 tree
11559 fold_build_cleanup_point_expr (tree type, tree expr)
11561 /* If the expression does not have side effects then we don't have to wrap
11562 it with a cleanup point expression. */
11563 if (!TREE_SIDE_EFFECTS (expr))
11564 return expr;
11566 /* If the expression is a return, check to see if the expression inside the
11567 return has no side effects or the right hand side of the modify expression
11568 inside the return. If either don't have side effects set we don't need to
11569 wrap the expression in a cleanup point expression. Note we don't check the
11570 left hand side of the modify because it should always be a return decl. */
11571 if (TREE_CODE (expr) == RETURN_EXPR)
11573 tree op = TREE_OPERAND (expr, 0);
11574 if (!op || !TREE_SIDE_EFFECTS (op))
11575 return expr;
11576 op = TREE_OPERAND (op, 1);
11577 if (!TREE_SIDE_EFFECTS (op))
11578 return expr;
11581 return build1 (CLEANUP_POINT_EXPR, type, expr);
11584 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11585 avoid confusing the gimplify process. */
11587 tree
11588 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11590 /* The size of the object is not relevant when talking about its address. */
11591 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11592 t = TREE_OPERAND (t, 0);
11594 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11595 if (TREE_CODE (t) == INDIRECT_REF
11596 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11598 t = TREE_OPERAND (t, 0);
11599 if (TREE_TYPE (t) != ptrtype)
11600 t = build1 (NOP_EXPR, ptrtype, t);
11602 else
11604 tree base = t;
11606 while (handled_component_p (base))
11607 base = TREE_OPERAND (base, 0);
11608 if (DECL_P (base))
11609 TREE_ADDRESSABLE (base) = 1;
11611 t = build1 (ADDR_EXPR, ptrtype, t);
11614 return t;
11617 tree
11618 build_fold_addr_expr (tree t)
11620 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11623 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11624 of an indirection through OP0, or NULL_TREE if no simplification is
11625 possible. */
11627 tree
11628 fold_indirect_ref_1 (tree type, tree op0)
11630 tree sub = op0;
11631 tree subtype;
11633 STRIP_NOPS (sub);
11634 subtype = TREE_TYPE (sub);
11635 if (!POINTER_TYPE_P (subtype))
11636 return NULL_TREE;
11638 if (TREE_CODE (sub) == ADDR_EXPR)
11640 tree op = TREE_OPERAND (sub, 0);
11641 tree optype = TREE_TYPE (op);
11642 /* *&p => p; make sure to handle *&"str"[cst] here. */
11643 if (type == optype)
11645 tree fop = fold_read_from_constant_string (op);
11646 if (fop)
11647 return fop;
11648 else
11649 return op;
11651 /* *(foo *)&fooarray => fooarray[0] */
11652 else if (TREE_CODE (optype) == ARRAY_TYPE
11653 && type == TREE_TYPE (optype))
11655 tree type_domain = TYPE_DOMAIN (optype);
11656 tree min_val = size_zero_node;
11657 if (type_domain && TYPE_MIN_VALUE (type_domain))
11658 min_val = TYPE_MIN_VALUE (type_domain);
11659 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11663 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11664 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11665 && type == TREE_TYPE (TREE_TYPE (subtype)))
11667 tree type_domain;
11668 tree min_val = size_zero_node;
11669 sub = build_fold_indirect_ref (sub);
11670 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11671 if (type_domain && TYPE_MIN_VALUE (type_domain))
11672 min_val = TYPE_MIN_VALUE (type_domain);
11673 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11676 return NULL_TREE;
11679 /* Builds an expression for an indirection through T, simplifying some
11680 cases. */
11682 tree
11683 build_fold_indirect_ref (tree t)
11685 tree type = TREE_TYPE (TREE_TYPE (t));
11686 tree sub = fold_indirect_ref_1 (type, t);
11688 if (sub)
11689 return sub;
11690 else
11691 return build1 (INDIRECT_REF, type, t);
11694 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11696 tree
11697 fold_indirect_ref (tree t)
11699 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11701 if (sub)
11702 return sub;
11703 else
11704 return t;
11707 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11708 whose result is ignored. The type of the returned tree need not be
11709 the same as the original expression. */
11711 tree
11712 fold_ignored_result (tree t)
11714 if (!TREE_SIDE_EFFECTS (t))
11715 return integer_zero_node;
11717 for (;;)
11718 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11720 case tcc_unary:
11721 t = TREE_OPERAND (t, 0);
11722 break;
11724 case tcc_binary:
11725 case tcc_comparison:
11726 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11727 t = TREE_OPERAND (t, 0);
11728 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11729 t = TREE_OPERAND (t, 1);
11730 else
11731 return t;
11732 break;
11734 case tcc_expression:
11735 switch (TREE_CODE (t))
11737 case COMPOUND_EXPR:
11738 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11739 return t;
11740 t = TREE_OPERAND (t, 0);
11741 break;
11743 case COND_EXPR:
11744 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11745 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11746 return t;
11747 t = TREE_OPERAND (t, 0);
11748 break;
11750 default:
11751 return t;
11753 break;
11755 default:
11756 return t;
11760 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11761 This can only be applied to objects of a sizetype. */
11763 tree
11764 round_up (tree value, int divisor)
11766 tree div = NULL_TREE;
11768 gcc_assert (divisor > 0);
11769 if (divisor == 1)
11770 return value;
11772 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11773 have to do anything. Only do this when we are not given a const,
11774 because in that case, this check is more expensive than just
11775 doing it. */
11776 if (TREE_CODE (value) != INTEGER_CST)
11778 div = build_int_cst (TREE_TYPE (value), divisor);
11780 if (multiple_of_p (TREE_TYPE (value), value, div))
11781 return value;
11784 /* If divisor is a power of two, simplify this to bit manipulation. */
11785 if (divisor == (divisor & -divisor))
11787 tree t;
11789 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11790 value = size_binop (PLUS_EXPR, value, t);
11791 t = build_int_cst (TREE_TYPE (value), -divisor);
11792 value = size_binop (BIT_AND_EXPR, value, t);
11794 else
11796 if (!div)
11797 div = build_int_cst (TREE_TYPE (value), divisor);
11798 value = size_binop (CEIL_DIV_EXPR, value, div);
11799 value = size_binop (MULT_EXPR, value, div);
11802 return value;
11805 /* Likewise, but round down. */
11807 tree
11808 round_down (tree value, int divisor)
11810 tree div = NULL_TREE;
11812 gcc_assert (divisor > 0);
11813 if (divisor == 1)
11814 return value;
11816 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11817 have to do anything. Only do this when we are not given a const,
11818 because in that case, this check is more expensive than just
11819 doing it. */
11820 if (TREE_CODE (value) != INTEGER_CST)
11822 div = build_int_cst (TREE_TYPE (value), divisor);
11824 if (multiple_of_p (TREE_TYPE (value), value, div))
11825 return value;
11828 /* If divisor is a power of two, simplify this to bit manipulation. */
11829 if (divisor == (divisor & -divisor))
11831 tree t;
11833 t = build_int_cst (TREE_TYPE (value), -divisor);
11834 value = size_binop (BIT_AND_EXPR, value, t);
11836 else
11838 if (!div)
11839 div = build_int_cst (TREE_TYPE (value), divisor);
11840 value = size_binop (FLOOR_DIV_EXPR, value, div);
11841 value = size_binop (MULT_EXPR, value, div);
11844 return value;
11847 /* Returns the pointer to the base of the object addressed by EXP and
11848 extracts the information about the offset of the access, storing it
11849 to PBITPOS and POFFSET. */
11851 static tree
11852 split_address_to_core_and_offset (tree exp,
11853 HOST_WIDE_INT *pbitpos, tree *poffset)
11855 tree core;
11856 enum machine_mode mode;
11857 int unsignedp, volatilep;
11858 HOST_WIDE_INT bitsize;
11860 if (TREE_CODE (exp) == ADDR_EXPR)
11862 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11863 poffset, &mode, &unsignedp, &volatilep,
11864 false);
11865 core = build_fold_addr_expr (core);
11867 else
11869 core = exp;
11870 *pbitpos = 0;
11871 *poffset = NULL_TREE;
11874 return core;
11877 /* Returns true if addresses of E1 and E2 differ by a constant, false
11878 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11880 bool
11881 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11883 tree core1, core2;
11884 HOST_WIDE_INT bitpos1, bitpos2;
11885 tree toffset1, toffset2, tdiff, type;
11887 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11888 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11890 if (bitpos1 % BITS_PER_UNIT != 0
11891 || bitpos2 % BITS_PER_UNIT != 0
11892 || !operand_equal_p (core1, core2, 0))
11893 return false;
11895 if (toffset1 && toffset2)
11897 type = TREE_TYPE (toffset1);
11898 if (type != TREE_TYPE (toffset2))
11899 toffset2 = fold_convert (type, toffset2);
11901 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11902 if (!cst_and_fits_in_hwi (tdiff))
11903 return false;
11905 *diff = int_cst_value (tdiff);
11907 else if (toffset1 || toffset2)
11909 /* If only one of the offsets is non-constant, the difference cannot
11910 be a constant. */
11911 return false;
11913 else
11914 *diff = 0;
11916 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11917 return true;
11920 /* Simplify the floating point expression EXP when the sign of the
11921 result is not significant. Return NULL_TREE if no simplification
11922 is possible. */
11924 tree
11925 fold_strip_sign_ops (tree exp)
11927 tree arg0, arg1;
11929 switch (TREE_CODE (exp))
11931 case ABS_EXPR:
11932 case NEGATE_EXPR:
11933 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11934 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11936 case MULT_EXPR:
11937 case RDIV_EXPR:
11938 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11939 return NULL_TREE;
11940 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11941 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11942 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11943 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11944 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11945 arg1 ? arg1 : TREE_OPERAND (exp, 1));
11946 break;
11948 default:
11949 break;
11951 return NULL_TREE;