* builtins.c (get_builtin_sync_mode): New.
[official-gcc.git] / gcc / fold-const.c
blob0190c322dc59feeb717cc4e4849551f031aa1f6f
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
106 tree *, tree *);
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
114 tree);
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
124 tree, tree,
125 tree, tree, int);
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
128 tree, tree, tree);
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
139 addition.
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
143 sign. */
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
151 #define LOWPART(x) \
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
161 static void
162 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
164 words[0] = LOWPART (low);
165 words[1] = HIGHPART (low);
166 words[2] = LOWPART (hi);
167 words[3] = HIGHPART (hi);
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
174 static void
175 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
176 HOST_WIDE_INT *hi)
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
197 tree
198 force_fit_type (tree t, int overflowable,
199 bool overflowed, bool overflowed_const)
201 unsigned HOST_WIDE_INT low;
202 HOST_WIDE_INT high;
203 unsigned int prec;
204 int sign_extended_type;
206 gcc_assert (TREE_CODE (t) == INTEGER_CST);
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
211 if (POINTER_TYPE_P (TREE_TYPE (t))
212 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
213 prec = POINTER_SIZE;
214 else
215 prec = TYPE_PRECISION (TREE_TYPE (t));
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
218 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 else
229 high = 0;
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 low &= ~((HOST_WIDE_INT) (-1) << prec);
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (high & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)low < 0)
248 high = -1;
250 else
252 /* Sign extend bottom half? */
253 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
255 high = -1;
256 low |= (HOST_WIDE_INT)(-1) << prec;
260 /* If the value changed, return a new node. */
261 if (overflowed || overflowed_const
262 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
264 t = build_int_cst_wide (TREE_TYPE (t), low, high);
266 if (overflowed
267 || overflowable < 0
268 || (overflowable > 0 && sign_extended_type))
270 t = copy_node (t);
271 TREE_OVERFLOW (t) = 1;
272 TREE_CONSTANT_OVERFLOW (t) = 1;
274 else if (overflowed_const)
276 t = copy_node (t);
277 TREE_CONSTANT_OVERFLOW (t) = 1;
281 return t;
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
291 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
292 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
294 unsigned HOST_WIDE_INT l;
295 HOST_WIDE_INT h;
297 l = l1 + l2;
298 h = h1 + h2 + (l < l1);
300 *lv = l;
301 *hv = h;
302 return OVERFLOW_SUM_SIGN (h1, h2, h);
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
314 if (l1 == 0)
316 *lv = 0;
317 *hv = - h1;
318 return (*hv & h1) < 0;
320 else
322 *lv = -l1;
323 *hv = ~h1;
324 return 0;
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
335 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
336 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
337 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
339 HOST_WIDE_INT arg1[4];
340 HOST_WIDE_INT arg2[4];
341 HOST_WIDE_INT prod[4 * 2];
342 unsigned HOST_WIDE_INT carry;
343 int i, j, k;
344 unsigned HOST_WIDE_INT toplow, neglow;
345 HOST_WIDE_INT tophigh, neghigh;
347 encode (arg1, l1, h1);
348 encode (arg2, l2, h2);
350 memset (prod, 0, sizeof prod);
352 for (i = 0; i < 4; i++)
354 carry = 0;
355 for (j = 0; j < 4; j++)
357 k = i + j;
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry += arg1[i] * arg2[j];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
361 carry += prod[k];
362 prod[k] = LOWPART (carry);
363 carry = HIGHPART (carry);
365 prod[i + 4] = carry;
368 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod + 4, &toplow, &tophigh);
373 if (h1 < 0)
375 neg_double (l2, h2, &neglow, &neghigh);
376 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
378 if (h2 < 0)
380 neg_double (l1, h1, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
392 void
393 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
394 HOST_WIDE_INT count, unsigned int prec,
395 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
397 unsigned HOST_WIDE_INT signmask;
399 if (count < 0)
401 rshift_double (l1, h1, -count, prec, lv, hv, arith);
402 return;
405 if (SHIFT_COUNT_TRUNCATED)
406 count %= prec;
408 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
412 *hv = 0;
413 *lv = 0;
415 else if (count >= HOST_BITS_PER_WIDE_INT)
417 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
418 *lv = 0;
420 else
422 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
423 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
424 *lv = l1 << count;
427 /* Sign extend all bits that are beyond the precision. */
429 signmask = -((prec > HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT) *hv
431 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
432 : (*lv >> (prec - 1))) & 1);
434 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
436 else if (prec >= HOST_BITS_PER_WIDE_INT)
438 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
439 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
441 else
443 *hv = signmask;
444 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
445 *lv |= signmask << prec;
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
454 void
455 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
456 HOST_WIDE_INT count, unsigned int prec,
457 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
458 int arith)
460 unsigned HOST_WIDE_INT signmask;
462 signmask = (arith
463 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
464 : 0);
466 if (SHIFT_COUNT_TRUNCATED)
467 count %= prec;
469 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
473 *hv = 0;
474 *lv = 0;
476 else if (count >= HOST_BITS_PER_WIDE_INT)
478 *hv = 0;
479 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
481 else
483 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
484 *lv = ((l1 >> count)
485 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
488 /* Zero / sign extend all bits that are beyond the precision. */
490 if (count >= (HOST_WIDE_INT)prec)
492 *hv = signmask;
493 *lv = signmask;
495 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
497 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
499 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
500 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
502 else
504 *hv = signmask;
505 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
506 *lv |= signmask << (prec - count);
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
515 void
516 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
517 HOST_WIDE_INT count, unsigned int prec,
518 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
520 unsigned HOST_WIDE_INT s1l, s2l;
521 HOST_WIDE_INT s1h, s2h;
523 count %= prec;
524 if (count < 0)
525 count += prec;
527 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
528 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
529 *lv = s1l | s2l;
530 *hv = s1h | s2h;
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
537 void
538 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
545 count %= prec;
546 if (count < 0)
547 count += prec;
549 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551 *lv = s1l | s2l;
552 *hv = s1h | s2h;
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
559 or EXACT_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
565 div_and_round_double (enum tree_code code, int uns,
566 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig,
568 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig,
570 unsigned HOST_WIDE_INT *lquo,
571 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
572 HOST_WIDE_INT *hrem)
574 int quo_neg = 0;
575 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den[4], quo[4];
577 int i, j;
578 unsigned HOST_WIDE_INT work;
579 unsigned HOST_WIDE_INT carry = 0;
580 unsigned HOST_WIDE_INT lnum = lnum_orig;
581 HOST_WIDE_INT hnum = hnum_orig;
582 unsigned HOST_WIDE_INT lden = lden_orig;
583 HOST_WIDE_INT hden = hden_orig;
584 int overflow = 0;
586 if (hden == 0 && lden == 0)
587 overflow = 1, lden = 1;
589 /* Calculate quotient sign and convert operands to unsigned. */
590 if (!uns)
592 if (hnum < 0)
594 quo_neg = ~ quo_neg;
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum, hnum, &lnum, &hnum)
597 && ((HOST_WIDE_INT) lden & hden) == -1)
598 overflow = 1;
600 if (hden < 0)
602 quo_neg = ~ quo_neg;
603 neg_double (lden, hden, &lden, &hden);
607 if (hnum == 0 && hden == 0)
608 { /* single precision */
609 *hquo = *hrem = 0;
610 /* This unsigned division rounds toward zero. */
611 *lquo = lnum / lden;
612 goto finish_up;
615 if (hnum == 0)
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
618 *hquo = *lquo = 0;
619 *hrem = hnum;
620 *lrem = lnum;
621 goto finish_up;
624 memset (quo, 0, sizeof quo);
626 memset (num, 0, sizeof num); /* to zero 9th element */
627 memset (den, 0, sizeof den);
629 encode (num, lnum, hnum);
630 encode (den, lden, hden);
632 /* Special code for when the divisor < BASE. */
633 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
635 /* hnum != 0 already checked. */
636 for (i = 4 - 1; i >= 0; i--)
638 work = num[i] + carry * BASE;
639 quo[i] = work / lden;
640 carry = work % lden;
643 else
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig, den_hi_sig;
648 unsigned HOST_WIDE_INT quo_est, scale;
650 /* Find the highest nonzero divisor digit. */
651 for (i = 4 - 1;; i--)
652 if (den[i] != 0)
654 den_hi_sig = i;
655 break;
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
661 scale = BASE / (den[den_hi_sig] + 1);
662 if (scale > 1)
663 { /* scale divisor and dividend */
664 carry = 0;
665 for (i = 0; i <= 4 - 1; i++)
667 work = (num[i] * scale) + carry;
668 num[i] = LOWPART (work);
669 carry = HIGHPART (work);
672 num[4] = carry;
673 carry = 0;
674 for (i = 0; i <= 4 - 1; i++)
676 work = (den[i] * scale) + carry;
677 den[i] = LOWPART (work);
678 carry = HIGHPART (work);
679 if (den[i] != 0) den_hi_sig = i;
683 num_hi_sig = 4;
685 /* Main loop */
686 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp;
693 num_hi_sig = i + den_hi_sig + 1;
694 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
695 if (num[num_hi_sig] != den[den_hi_sig])
696 quo_est = work / den[den_hi_sig];
697 else
698 quo_est = BASE - 1;
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp = work - quo_est * den[den_hi_sig];
702 if (tmp < BASE
703 && (den[den_hi_sig - 1] * quo_est
704 > (tmp * BASE + num[num_hi_sig - 2])))
705 quo_est--;
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
711 carry = 0;
712 for (j = 0; j <= den_hi_sig; j++)
714 work = quo_est * den[j] + carry;
715 carry = HIGHPART (work);
716 work = num[i + j] - LOWPART (work);
717 num[i + j] = LOWPART (work);
718 carry += HIGHPART (work) != 0;
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
725 quo_est--;
726 carry = 0; /* add divisor back in */
727 for (j = 0; j <= den_hi_sig; j++)
729 work = num[i + j] + den[j] + carry;
730 carry = HIGHPART (work);
731 num[i + j] = LOWPART (work);
734 num [num_hi_sig] += carry;
737 /* Store the quotient digit. */
738 quo[i] = quo_est;
742 decode (quo, lquo, hquo);
744 finish_up:
745 /* If result is negative, make it so. */
746 if (quo_neg)
747 neg_double (*lquo, *hquo, lquo, hquo);
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
751 neg_double (*lrem, *hrem, lrem, hrem);
752 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
754 switch (code)
756 case TRUNC_DIV_EXPR:
757 case TRUNC_MOD_EXPR: /* round toward zero */
758 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
759 return overflow;
761 case FLOOR_DIV_EXPR:
762 case FLOOR_MOD_EXPR: /* round toward negative infinity */
763 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
765 /* quo = quo - 1; */
766 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
767 lquo, hquo);
769 else
770 return overflow;
771 break;
773 case CEIL_DIV_EXPR:
774 case CEIL_MOD_EXPR: /* round toward positive infinity */
775 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
778 lquo, hquo);
780 else
781 return overflow;
782 break;
784 case ROUND_DIV_EXPR:
785 case ROUND_MOD_EXPR: /* round to closest integer */
787 unsigned HOST_WIDE_INT labs_rem = *lrem;
788 HOST_WIDE_INT habs_rem = *hrem;
789 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
790 HOST_WIDE_INT habs_den = hden, htwice;
792 /* Get absolute values. */
793 if (*hrem < 0)
794 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
795 if (hden < 0)
796 neg_double (lden, hden, &labs_den, &habs_den);
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
800 labs_rem, habs_rem, &ltwice, &htwice);
802 if (((unsigned HOST_WIDE_INT) habs_den
803 < (unsigned HOST_WIDE_INT) htwice)
804 || (((unsigned HOST_WIDE_INT) habs_den
805 == (unsigned HOST_WIDE_INT) htwice)
806 && (labs_den < ltwice)))
808 if (*hquo < 0)
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo,
811 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
812 else
813 /* quo = quo + 1; */
814 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
815 lquo, hquo);
817 else
818 return overflow;
820 break;
822 default:
823 gcc_unreachable ();
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
828 neg_double (*lrem, *hrem, lrem, hrem);
829 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
830 return overflow;
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
837 static tree
838 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
840 unsigned HOST_WIDE_INT int1l, int2l;
841 HOST_WIDE_INT int1h, int2h;
842 unsigned HOST_WIDE_INT quol, reml;
843 HOST_WIDE_INT quoh, remh;
844 tree type = TREE_TYPE (arg1);
845 int uns = TYPE_UNSIGNED (type);
847 int1l = TREE_INT_CST_LOW (arg1);
848 int1h = TREE_INT_CST_HIGH (arg1);
849 int2l = TREE_INT_CST_LOW (arg2);
850 int2h = TREE_INT_CST_HIGH (arg2);
852 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
853 &quol, &quoh, &reml, &remh);
854 if (remh != 0 || reml != 0)
855 return NULL_TREE;
857 return build_int_cst_wide (type, quol, quoh);
860 /* Return true if built-in mathematical function specified by CODE
861 preserves the sign of it argument, i.e. -f(x) == f(-x). */
863 static bool
864 negate_mathfn_p (enum built_in_function code)
866 switch (code)
868 case BUILT_IN_ASIN:
869 case BUILT_IN_ASINF:
870 case BUILT_IN_ASINL:
871 case BUILT_IN_ATAN:
872 case BUILT_IN_ATANF:
873 case BUILT_IN_ATANL:
874 case BUILT_IN_SIN:
875 case BUILT_IN_SINF:
876 case BUILT_IN_SINL:
877 case BUILT_IN_TAN:
878 case BUILT_IN_TANF:
879 case BUILT_IN_TANL:
880 return true;
882 default:
883 break;
885 return false;
888 /* Check whether we may negate an integer constant T without causing
889 overflow. */
891 bool
892 may_negate_without_overflow_p (tree t)
894 unsigned HOST_WIDE_INT val;
895 unsigned int prec;
896 tree type;
898 gcc_assert (TREE_CODE (t) == INTEGER_CST);
900 type = TREE_TYPE (t);
901 if (TYPE_UNSIGNED (type))
902 return false;
904 prec = TYPE_PRECISION (type);
905 if (prec > HOST_BITS_PER_WIDE_INT)
907 if (TREE_INT_CST_LOW (t) != 0)
908 return true;
909 prec -= HOST_BITS_PER_WIDE_INT;
910 val = TREE_INT_CST_HIGH (t);
912 else
913 val = TREE_INT_CST_LOW (t);
914 if (prec < HOST_BITS_PER_WIDE_INT)
915 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
916 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
919 /* Determine whether an expression T can be cheaply negated using
920 the function negate_expr. */
922 static bool
923 negate_expr_p (tree t)
925 tree type;
927 if (t == 0)
928 return false;
930 type = TREE_TYPE (t);
932 STRIP_SIGN_NOPS (t);
933 switch (TREE_CODE (t))
935 case INTEGER_CST:
936 if (TYPE_UNSIGNED (type) || ! flag_trapv)
937 return true;
939 /* Check that -CST will not overflow type. */
940 return may_negate_without_overflow_p (t);
942 case REAL_CST:
943 case NEGATE_EXPR:
944 return true;
946 case COMPLEX_CST:
947 return negate_expr_p (TREE_REALPART (t))
948 && negate_expr_p (TREE_IMAGPART (t));
950 case PLUS_EXPR:
951 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
952 return false;
953 /* -(A + B) -> (-B) - A. */
954 if (negate_expr_p (TREE_OPERAND (t, 1))
955 && reorder_operands_p (TREE_OPERAND (t, 0),
956 TREE_OPERAND (t, 1)))
957 return true;
958 /* -(A + B) -> (-A) - B. */
959 return negate_expr_p (TREE_OPERAND (t, 0));
961 case MINUS_EXPR:
962 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
963 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
964 && reorder_operands_p (TREE_OPERAND (t, 0),
965 TREE_OPERAND (t, 1));
967 case MULT_EXPR:
968 if (TYPE_UNSIGNED (TREE_TYPE (t)))
969 break;
971 /* Fall through. */
973 case RDIV_EXPR:
974 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
975 return negate_expr_p (TREE_OPERAND (t, 1))
976 || negate_expr_p (TREE_OPERAND (t, 0));
977 break;
979 case NOP_EXPR:
980 /* Negate -((double)float) as (double)(-float). */
981 if (TREE_CODE (type) == REAL_TYPE)
983 tree tem = strip_float_extensions (t);
984 if (tem != t)
985 return negate_expr_p (tem);
987 break;
989 case CALL_EXPR:
990 /* Negate -f(x) as f(-x). */
991 if (negate_mathfn_p (builtin_mathfn_code (t)))
992 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
993 break;
995 case RSHIFT_EXPR:
996 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
997 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
999 tree op1 = TREE_OPERAND (t, 1);
1000 if (TREE_INT_CST_HIGH (op1) == 0
1001 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1002 == TREE_INT_CST_LOW (op1))
1003 return true;
1005 break;
1007 default:
1008 break;
1010 return false;
1013 /* Given T, an expression, return the negation of T. Allow for T to be
1014 null, in which case return null. */
1016 static tree
1017 negate_expr (tree t)
1019 tree type;
1020 tree tem;
1022 if (t == 0)
1023 return 0;
1025 type = TREE_TYPE (t);
1026 STRIP_SIGN_NOPS (t);
1028 switch (TREE_CODE (t))
1030 case INTEGER_CST:
1031 tem = fold_negate_const (t, type);
1032 if (! TREE_OVERFLOW (tem)
1033 || TYPE_UNSIGNED (type)
1034 || ! flag_trapv)
1035 return tem;
1036 break;
1038 case REAL_CST:
1039 tem = fold_negate_const (t, type);
1040 /* Two's complement FP formats, such as c4x, may overflow. */
1041 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1042 return fold_convert (type, tem);
1043 break;
1045 case COMPLEX_CST:
1047 tree rpart = negate_expr (TREE_REALPART (t));
1048 tree ipart = negate_expr (TREE_IMAGPART (t));
1050 if ((TREE_CODE (rpart) == REAL_CST
1051 && TREE_CODE (ipart) == REAL_CST)
1052 || (TREE_CODE (rpart) == INTEGER_CST
1053 && TREE_CODE (ipart) == INTEGER_CST))
1054 return build_complex (type, rpart, ipart);
1056 break;
1058 case NEGATE_EXPR:
1059 return fold_convert (type, TREE_OPERAND (t, 0));
1061 case PLUS_EXPR:
1062 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1064 /* -(A + B) -> (-B) - A. */
1065 if (negate_expr_p (TREE_OPERAND (t, 1))
1066 && reorder_operands_p (TREE_OPERAND (t, 0),
1067 TREE_OPERAND (t, 1)))
1069 tem = negate_expr (TREE_OPERAND (t, 1));
1070 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1071 tem, TREE_OPERAND (t, 0));
1072 return fold_convert (type, tem);
1075 /* -(A + B) -> (-A) - B. */
1076 if (negate_expr_p (TREE_OPERAND (t, 0)))
1078 tem = negate_expr (TREE_OPERAND (t, 0));
1079 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1080 tem, TREE_OPERAND (t, 1));
1081 return fold_convert (type, tem);
1084 break;
1086 case MINUS_EXPR:
1087 /* - (A - B) -> B - A */
1088 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1089 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1090 return fold_convert (type,
1091 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1092 TREE_OPERAND (t, 1),
1093 TREE_OPERAND (t, 0)));
1094 break;
1096 case MULT_EXPR:
1097 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1098 break;
1100 /* Fall through. */
1102 case RDIV_EXPR:
1103 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1105 tem = TREE_OPERAND (t, 1);
1106 if (negate_expr_p (tem))
1107 return fold_convert (type,
1108 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1109 TREE_OPERAND (t, 0),
1110 negate_expr (tem)));
1111 tem = TREE_OPERAND (t, 0);
1112 if (negate_expr_p (tem))
1113 return fold_convert (type,
1114 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1115 negate_expr (tem),
1116 TREE_OPERAND (t, 1)));
1118 break;
1120 case NOP_EXPR:
1121 /* Convert -((double)float) into (double)(-float). */
1122 if (TREE_CODE (type) == REAL_TYPE)
1124 tem = strip_float_extensions (t);
1125 if (tem != t && negate_expr_p (tem))
1126 return fold_convert (type, negate_expr (tem));
1128 break;
1130 case CALL_EXPR:
1131 /* Negate -f(x) as f(-x). */
1132 if (negate_mathfn_p (builtin_mathfn_code (t))
1133 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1135 tree fndecl, arg, arglist;
1137 fndecl = get_callee_fndecl (t);
1138 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1139 arglist = build_tree_list (NULL_TREE, arg);
1140 return build_function_call_expr (fndecl, arglist);
1142 break;
1144 case RSHIFT_EXPR:
1145 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1146 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1148 tree op1 = TREE_OPERAND (t, 1);
1149 if (TREE_INT_CST_HIGH (op1) == 0
1150 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1151 == TREE_INT_CST_LOW (op1))
1153 tree ntype = TYPE_UNSIGNED (type)
1154 ? lang_hooks.types.signed_type (type)
1155 : lang_hooks.types.unsigned_type (type);
1156 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1157 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1158 return fold_convert (type, temp);
1161 break;
1163 default:
1164 break;
1167 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1168 return fold_convert (type, tem);
1171 /* Split a tree IN into a constant, literal and variable parts that could be
1172 combined with CODE to make IN. "constant" means an expression with
1173 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1174 commutative arithmetic operation. Store the constant part into *CONP,
1175 the literal in *LITP and return the variable part. If a part isn't
1176 present, set it to null. If the tree does not decompose in this way,
1177 return the entire tree as the variable part and the other parts as null.
1179 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1180 case, we negate an operand that was subtracted. Except if it is a
1181 literal for which we use *MINUS_LITP instead.
1183 If NEGATE_P is true, we are negating all of IN, again except a literal
1184 for which we use *MINUS_LITP instead.
1186 If IN is itself a literal or constant, return it as appropriate.
1188 Note that we do not guarantee that any of the three values will be the
1189 same type as IN, but they will have the same signedness and mode. */
1191 static tree
1192 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1193 tree *minus_litp, int negate_p)
1195 tree var = 0;
1197 *conp = 0;
1198 *litp = 0;
1199 *minus_litp = 0;
1201 /* Strip any conversions that don't change the machine mode or signedness. */
1202 STRIP_SIGN_NOPS (in);
1204 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1205 *litp = in;
1206 else if (TREE_CODE (in) == code
1207 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1208 /* We can associate addition and subtraction together (even
1209 though the C standard doesn't say so) for integers because
1210 the value is not affected. For reals, the value might be
1211 affected, so we can't. */
1212 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1213 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1215 tree op0 = TREE_OPERAND (in, 0);
1216 tree op1 = TREE_OPERAND (in, 1);
1217 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1218 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1220 /* First see if either of the operands is a literal, then a constant. */
1221 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1222 *litp = op0, op0 = 0;
1223 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1224 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1226 if (op0 != 0 && TREE_CONSTANT (op0))
1227 *conp = op0, op0 = 0;
1228 else if (op1 != 0 && TREE_CONSTANT (op1))
1229 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1231 /* If we haven't dealt with either operand, this is not a case we can
1232 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1233 if (op0 != 0 && op1 != 0)
1234 var = in;
1235 else if (op0 != 0)
1236 var = op0;
1237 else
1238 var = op1, neg_var_p = neg1_p;
1240 /* Now do any needed negations. */
1241 if (neg_litp_p)
1242 *minus_litp = *litp, *litp = 0;
1243 if (neg_conp_p)
1244 *conp = negate_expr (*conp);
1245 if (neg_var_p)
1246 var = negate_expr (var);
1248 else if (TREE_CONSTANT (in))
1249 *conp = in;
1250 else
1251 var = in;
1253 if (negate_p)
1255 if (*litp)
1256 *minus_litp = *litp, *litp = 0;
1257 else if (*minus_litp)
1258 *litp = *minus_litp, *minus_litp = 0;
1259 *conp = negate_expr (*conp);
1260 var = negate_expr (var);
1263 return var;
1266 /* Re-associate trees split by the above function. T1 and T2 are either
1267 expressions to associate or null. Return the new expression, if any. If
1268 we build an operation, do it in TYPE and with CODE. */
1270 static tree
1271 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1273 if (t1 == 0)
1274 return t2;
1275 else if (t2 == 0)
1276 return t1;
1278 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1279 try to fold this since we will have infinite recursion. But do
1280 deal with any NEGATE_EXPRs. */
1281 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1282 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1284 if (code == PLUS_EXPR)
1286 if (TREE_CODE (t1) == NEGATE_EXPR)
1287 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1288 fold_convert (type, TREE_OPERAND (t1, 0)));
1289 else if (TREE_CODE (t2) == NEGATE_EXPR)
1290 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1291 fold_convert (type, TREE_OPERAND (t2, 0)));
1292 else if (integer_zerop (t2))
1293 return fold_convert (type, t1);
1295 else if (code == MINUS_EXPR)
1297 if (integer_zerop (t2))
1298 return fold_convert (type, t1);
1301 return build2 (code, type, fold_convert (type, t1),
1302 fold_convert (type, t2));
1305 return fold_build2 (code, type, fold_convert (type, t1),
1306 fold_convert (type, t2));
1309 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1310 to produce a new constant.
1312 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1314 tree
1315 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1317 unsigned HOST_WIDE_INT int1l, int2l;
1318 HOST_WIDE_INT int1h, int2h;
1319 unsigned HOST_WIDE_INT low;
1320 HOST_WIDE_INT hi;
1321 unsigned HOST_WIDE_INT garbagel;
1322 HOST_WIDE_INT garbageh;
1323 tree t;
1324 tree type = TREE_TYPE (arg1);
1325 int uns = TYPE_UNSIGNED (type);
1326 int is_sizetype
1327 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1328 int overflow = 0;
1330 int1l = TREE_INT_CST_LOW (arg1);
1331 int1h = TREE_INT_CST_HIGH (arg1);
1332 int2l = TREE_INT_CST_LOW (arg2);
1333 int2h = TREE_INT_CST_HIGH (arg2);
1335 switch (code)
1337 case BIT_IOR_EXPR:
1338 low = int1l | int2l, hi = int1h | int2h;
1339 break;
1341 case BIT_XOR_EXPR:
1342 low = int1l ^ int2l, hi = int1h ^ int2h;
1343 break;
1345 case BIT_AND_EXPR:
1346 low = int1l & int2l, hi = int1h & int2h;
1347 break;
1349 case RSHIFT_EXPR:
1350 int2l = -int2l;
1351 case LSHIFT_EXPR:
1352 /* It's unclear from the C standard whether shifts can overflow.
1353 The following code ignores overflow; perhaps a C standard
1354 interpretation ruling is needed. */
1355 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1356 &low, &hi, !uns);
1357 break;
1359 case RROTATE_EXPR:
1360 int2l = - int2l;
1361 case LROTATE_EXPR:
1362 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1363 &low, &hi);
1364 break;
1366 case PLUS_EXPR:
1367 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1368 break;
1370 case MINUS_EXPR:
1371 neg_double (int2l, int2h, &low, &hi);
1372 add_double (int1l, int1h, low, hi, &low, &hi);
1373 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1374 break;
1376 case MULT_EXPR:
1377 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1378 break;
1380 case TRUNC_DIV_EXPR:
1381 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1382 case EXACT_DIV_EXPR:
1383 /* This is a shortcut for a common special case. */
1384 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1385 && ! TREE_CONSTANT_OVERFLOW (arg1)
1386 && ! TREE_CONSTANT_OVERFLOW (arg2)
1387 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1389 if (code == CEIL_DIV_EXPR)
1390 int1l += int2l - 1;
1392 low = int1l / int2l, hi = 0;
1393 break;
1396 /* ... fall through ... */
1398 case ROUND_DIV_EXPR:
1399 if (int2h == 0 && int2l == 1)
1401 low = int1l, hi = int1h;
1402 break;
1404 if (int1l == int2l && int1h == int2h
1405 && ! (int1l == 0 && int1h == 0))
1407 low = 1, hi = 0;
1408 break;
1410 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1411 &low, &hi, &garbagel, &garbageh);
1412 break;
1414 case TRUNC_MOD_EXPR:
1415 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1416 /* This is a shortcut for a common special case. */
1417 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1418 && ! TREE_CONSTANT_OVERFLOW (arg1)
1419 && ! TREE_CONSTANT_OVERFLOW (arg2)
1420 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1422 if (code == CEIL_MOD_EXPR)
1423 int1l += int2l - 1;
1424 low = int1l % int2l, hi = 0;
1425 break;
1428 /* ... fall through ... */
1430 case ROUND_MOD_EXPR:
1431 overflow = div_and_round_double (code, uns,
1432 int1l, int1h, int2l, int2h,
1433 &garbagel, &garbageh, &low, &hi);
1434 break;
1436 case MIN_EXPR:
1437 case MAX_EXPR:
1438 if (uns)
1439 low = (((unsigned HOST_WIDE_INT) int1h
1440 < (unsigned HOST_WIDE_INT) int2h)
1441 || (((unsigned HOST_WIDE_INT) int1h
1442 == (unsigned HOST_WIDE_INT) int2h)
1443 && int1l < int2l));
1444 else
1445 low = (int1h < int2h
1446 || (int1h == int2h && int1l < int2l));
1448 if (low == (code == MIN_EXPR))
1449 low = int1l, hi = int1h;
1450 else
1451 low = int2l, hi = int2h;
1452 break;
1454 default:
1455 gcc_unreachable ();
1458 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1460 if (notrunc)
1462 /* Propagate overflow flags ourselves. */
1463 if (((!uns || is_sizetype) && overflow)
1464 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1466 t = copy_node (t);
1467 TREE_OVERFLOW (t) = 1;
1468 TREE_CONSTANT_OVERFLOW (t) = 1;
1470 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1472 t = copy_node (t);
1473 TREE_CONSTANT_OVERFLOW (t) = 1;
1476 else
1477 t = force_fit_type (t, 1,
1478 ((!uns || is_sizetype) && overflow)
1479 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1480 TREE_CONSTANT_OVERFLOW (arg1)
1481 | TREE_CONSTANT_OVERFLOW (arg2));
1483 return t;
1486 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1487 constant. We assume ARG1 and ARG2 have the same data type, or at least
1488 are the same kind of constant and the same machine mode.
1490 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1492 static tree
1493 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1495 STRIP_NOPS (arg1);
1496 STRIP_NOPS (arg2);
1498 if (TREE_CODE (arg1) == INTEGER_CST)
1499 return int_const_binop (code, arg1, arg2, notrunc);
1501 if (TREE_CODE (arg1) == REAL_CST)
1503 enum machine_mode mode;
1504 REAL_VALUE_TYPE d1;
1505 REAL_VALUE_TYPE d2;
1506 REAL_VALUE_TYPE value;
1507 REAL_VALUE_TYPE result;
1508 bool inexact;
1509 tree t, type;
1511 d1 = TREE_REAL_CST (arg1);
1512 d2 = TREE_REAL_CST (arg2);
1514 type = TREE_TYPE (arg1);
1515 mode = TYPE_MODE (type);
1517 /* Don't perform operation if we honor signaling NaNs and
1518 either operand is a NaN. */
1519 if (HONOR_SNANS (mode)
1520 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1521 return NULL_TREE;
1523 /* Don't perform operation if it would raise a division
1524 by zero exception. */
1525 if (code == RDIV_EXPR
1526 && REAL_VALUES_EQUAL (d2, dconst0)
1527 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1528 return NULL_TREE;
1530 /* If either operand is a NaN, just return it. Otherwise, set up
1531 for floating-point trap; we return an overflow. */
1532 if (REAL_VALUE_ISNAN (d1))
1533 return arg1;
1534 else if (REAL_VALUE_ISNAN (d2))
1535 return arg2;
1537 inexact = real_arithmetic (&value, code, &d1, &d2);
1538 real_convert (&result, mode, &value);
1540 /* Don't constant fold this floating point operation if the
1541 result may dependent upon the run-time rounding mode and
1542 flag_rounding_math is set, or if GCC's software emulation
1543 is unable to accurately represent the result. */
1545 if ((flag_rounding_math
1546 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1547 && !flag_unsafe_math_optimizations))
1548 && (inexact || !real_identical (&result, &value)))
1549 return NULL_TREE;
1551 t = build_real (type, result);
1553 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1554 TREE_CONSTANT_OVERFLOW (t)
1555 = TREE_OVERFLOW (t)
1556 | TREE_CONSTANT_OVERFLOW (arg1)
1557 | TREE_CONSTANT_OVERFLOW (arg2);
1558 return t;
1560 if (TREE_CODE (arg1) == COMPLEX_CST)
1562 tree type = TREE_TYPE (arg1);
1563 tree r1 = TREE_REALPART (arg1);
1564 tree i1 = TREE_IMAGPART (arg1);
1565 tree r2 = TREE_REALPART (arg2);
1566 tree i2 = TREE_IMAGPART (arg2);
1567 tree t;
1569 switch (code)
1571 case PLUS_EXPR:
1572 t = build_complex (type,
1573 const_binop (PLUS_EXPR, r1, r2, notrunc),
1574 const_binop (PLUS_EXPR, i1, i2, notrunc));
1575 break;
1577 case MINUS_EXPR:
1578 t = build_complex (type,
1579 const_binop (MINUS_EXPR, r1, r2, notrunc),
1580 const_binop (MINUS_EXPR, i1, i2, notrunc));
1581 break;
1583 case MULT_EXPR:
1584 t = build_complex (type,
1585 const_binop (MINUS_EXPR,
1586 const_binop (MULT_EXPR,
1587 r1, r2, notrunc),
1588 const_binop (MULT_EXPR,
1589 i1, i2, notrunc),
1590 notrunc),
1591 const_binop (PLUS_EXPR,
1592 const_binop (MULT_EXPR,
1593 r1, i2, notrunc),
1594 const_binop (MULT_EXPR,
1595 i1, r2, notrunc),
1596 notrunc));
1597 break;
1599 case RDIV_EXPR:
1601 tree t1, t2, real, imag;
1602 tree magsquared
1603 = const_binop (PLUS_EXPR,
1604 const_binop (MULT_EXPR, r2, r2, notrunc),
1605 const_binop (MULT_EXPR, i2, i2, notrunc),
1606 notrunc);
1608 t1 = const_binop (PLUS_EXPR,
1609 const_binop (MULT_EXPR, r1, r2, notrunc),
1610 const_binop (MULT_EXPR, i1, i2, notrunc),
1611 notrunc);
1612 t2 = const_binop (MINUS_EXPR,
1613 const_binop (MULT_EXPR, i1, r2, notrunc),
1614 const_binop (MULT_EXPR, r1, i2, notrunc),
1615 notrunc);
1617 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1619 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1620 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1622 else
1624 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1625 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1626 if (!real || !imag)
1627 return NULL_TREE;
1630 t = build_complex (type, real, imag);
1632 break;
1634 default:
1635 gcc_unreachable ();
1637 return t;
1639 return 0;
1642 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1643 indicates which particular sizetype to create. */
1645 tree
1646 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1648 return build_int_cst (sizetype_tab[(int) kind], number);
1651 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1652 is a tree code. The type of the result is taken from the operands.
1653 Both must be the same type integer type and it must be a size type.
1654 If the operands are constant, so is the result. */
1656 tree
1657 size_binop (enum tree_code code, tree arg0, tree arg1)
1659 tree type = TREE_TYPE (arg0);
1661 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1662 && type == TREE_TYPE (arg1));
1664 /* Handle the special case of two integer constants faster. */
1665 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1667 /* And some specific cases even faster than that. */
1668 if (code == PLUS_EXPR && integer_zerop (arg0))
1669 return arg1;
1670 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1671 && integer_zerop (arg1))
1672 return arg0;
1673 else if (code == MULT_EXPR && integer_onep (arg0))
1674 return arg1;
1676 /* Handle general case of two integer constants. */
1677 return int_const_binop (code, arg0, arg1, 0);
1680 if (arg0 == error_mark_node || arg1 == error_mark_node)
1681 return error_mark_node;
1683 return fold_build2 (code, type, arg0, arg1);
1686 /* Given two values, either both of sizetype or both of bitsizetype,
1687 compute the difference between the two values. Return the value
1688 in signed type corresponding to the type of the operands. */
1690 tree
1691 size_diffop (tree arg0, tree arg1)
1693 tree type = TREE_TYPE (arg0);
1694 tree ctype;
1696 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1697 && type == TREE_TYPE (arg1));
1699 /* If the type is already signed, just do the simple thing. */
1700 if (!TYPE_UNSIGNED (type))
1701 return size_binop (MINUS_EXPR, arg0, arg1);
1703 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1705 /* If either operand is not a constant, do the conversions to the signed
1706 type and subtract. The hardware will do the right thing with any
1707 overflow in the subtraction. */
1708 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1709 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1710 fold_convert (ctype, arg1));
1712 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1713 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1714 overflow) and negate (which can't either). Special-case a result
1715 of zero while we're here. */
1716 if (tree_int_cst_equal (arg0, arg1))
1717 return fold_convert (ctype, integer_zero_node);
1718 else if (tree_int_cst_lt (arg1, arg0))
1719 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1720 else
1721 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1722 fold_convert (ctype, size_binop (MINUS_EXPR,
1723 arg1, arg0)));
1726 /* A subroutine of fold_convert_const handling conversions of an
1727 INTEGER_CST to another integer type. */
1729 static tree
1730 fold_convert_const_int_from_int (tree type, tree arg1)
1732 tree t;
1734 /* Given an integer constant, make new constant with new type,
1735 appropriately sign-extended or truncated. */
1736 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1737 TREE_INT_CST_HIGH (arg1));
1739 t = force_fit_type (t,
1740 /* Don't set the overflow when
1741 converting a pointer */
1742 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1743 (TREE_INT_CST_HIGH (arg1) < 0
1744 && (TYPE_UNSIGNED (type)
1745 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1746 | TREE_OVERFLOW (arg1),
1747 TREE_CONSTANT_OVERFLOW (arg1));
1749 return t;
1752 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1753 to an integer type. */
1755 static tree
1756 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1758 int overflow = 0;
1759 tree t;
1761 /* The following code implements the floating point to integer
1762 conversion rules required by the Java Language Specification,
1763 that IEEE NaNs are mapped to zero and values that overflow
1764 the target precision saturate, i.e. values greater than
1765 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1766 are mapped to INT_MIN. These semantics are allowed by the
1767 C and C++ standards that simply state that the behavior of
1768 FP-to-integer conversion is unspecified upon overflow. */
1770 HOST_WIDE_INT high, low;
1771 REAL_VALUE_TYPE r;
1772 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1774 switch (code)
1776 case FIX_TRUNC_EXPR:
1777 real_trunc (&r, VOIDmode, &x);
1778 break;
1780 case FIX_CEIL_EXPR:
1781 real_ceil (&r, VOIDmode, &x);
1782 break;
1784 case FIX_FLOOR_EXPR:
1785 real_floor (&r, VOIDmode, &x);
1786 break;
1788 case FIX_ROUND_EXPR:
1789 real_round (&r, VOIDmode, &x);
1790 break;
1792 default:
1793 gcc_unreachable ();
1796 /* If R is NaN, return zero and show we have an overflow. */
1797 if (REAL_VALUE_ISNAN (r))
1799 overflow = 1;
1800 high = 0;
1801 low = 0;
1804 /* See if R is less than the lower bound or greater than the
1805 upper bound. */
1807 if (! overflow)
1809 tree lt = TYPE_MIN_VALUE (type);
1810 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1811 if (REAL_VALUES_LESS (r, l))
1813 overflow = 1;
1814 high = TREE_INT_CST_HIGH (lt);
1815 low = TREE_INT_CST_LOW (lt);
1819 if (! overflow)
1821 tree ut = TYPE_MAX_VALUE (type);
1822 if (ut)
1824 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1825 if (REAL_VALUES_LESS (u, r))
1827 overflow = 1;
1828 high = TREE_INT_CST_HIGH (ut);
1829 low = TREE_INT_CST_LOW (ut);
1834 if (! overflow)
1835 REAL_VALUE_TO_INT (&low, &high, r);
1837 t = build_int_cst_wide (type, low, high);
1839 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1840 TREE_CONSTANT_OVERFLOW (arg1));
1841 return t;
1844 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1845 to another floating point type. */
1847 static tree
1848 fold_convert_const_real_from_real (tree type, tree arg1)
1850 REAL_VALUE_TYPE value;
1851 tree t;
1853 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1854 t = build_real (type, value);
1856 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1857 TREE_CONSTANT_OVERFLOW (t)
1858 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1859 return t;
1862 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1863 type TYPE. If no simplification can be done return NULL_TREE. */
1865 static tree
1866 fold_convert_const (enum tree_code code, tree type, tree arg1)
1868 if (TREE_TYPE (arg1) == type)
1869 return arg1;
1871 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1873 if (TREE_CODE (arg1) == INTEGER_CST)
1874 return fold_convert_const_int_from_int (type, arg1);
1875 else if (TREE_CODE (arg1) == REAL_CST)
1876 return fold_convert_const_int_from_real (code, type, arg1);
1878 else if (TREE_CODE (type) == REAL_TYPE)
1880 if (TREE_CODE (arg1) == INTEGER_CST)
1881 return build_real_from_int_cst (type, arg1);
1882 if (TREE_CODE (arg1) == REAL_CST)
1883 return fold_convert_const_real_from_real (type, arg1);
1885 return NULL_TREE;
1888 /* Construct a vector of zero elements of vector type TYPE. */
1890 static tree
1891 build_zero_vector (tree type)
1893 tree elem, list;
1894 int i, units;
1896 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1897 units = TYPE_VECTOR_SUBPARTS (type);
1899 list = NULL_TREE;
1900 for (i = 0; i < units; i++)
1901 list = tree_cons (NULL_TREE, elem, list);
1902 return build_vector (type, list);
1905 /* Convert expression ARG to type TYPE. Used by the middle-end for
1906 simple conversions in preference to calling the front-end's convert. */
1908 tree
1909 fold_convert (tree type, tree arg)
1911 tree orig = TREE_TYPE (arg);
1912 tree tem;
1914 if (type == orig)
1915 return arg;
1917 if (TREE_CODE (arg) == ERROR_MARK
1918 || TREE_CODE (type) == ERROR_MARK
1919 || TREE_CODE (orig) == ERROR_MARK)
1920 return error_mark_node;
1922 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1923 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1924 TYPE_MAIN_VARIANT (orig)))
1925 return fold_build1 (NOP_EXPR, type, arg);
1927 switch (TREE_CODE (type))
1929 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1930 case POINTER_TYPE: case REFERENCE_TYPE:
1931 case OFFSET_TYPE:
1932 if (TREE_CODE (arg) == INTEGER_CST)
1934 tem = fold_convert_const (NOP_EXPR, type, arg);
1935 if (tem != NULL_TREE)
1936 return tem;
1938 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1939 || TREE_CODE (orig) == OFFSET_TYPE)
1940 return fold_build1 (NOP_EXPR, type, arg);
1941 if (TREE_CODE (orig) == COMPLEX_TYPE)
1943 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1944 return fold_convert (type, tem);
1946 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1947 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1948 return fold_build1 (NOP_EXPR, type, arg);
1950 case REAL_TYPE:
1951 if (TREE_CODE (arg) == INTEGER_CST)
1953 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1954 if (tem != NULL_TREE)
1955 return tem;
1957 else if (TREE_CODE (arg) == REAL_CST)
1959 tem = fold_convert_const (NOP_EXPR, type, arg);
1960 if (tem != NULL_TREE)
1961 return tem;
1964 switch (TREE_CODE (orig))
1966 case INTEGER_TYPE: case CHAR_TYPE:
1967 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1968 case POINTER_TYPE: case REFERENCE_TYPE:
1969 return fold_build1 (FLOAT_EXPR, type, arg);
1971 case REAL_TYPE:
1972 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1973 type, arg);
1975 case COMPLEX_TYPE:
1976 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1977 return fold_convert (type, tem);
1979 default:
1980 gcc_unreachable ();
1983 case COMPLEX_TYPE:
1984 switch (TREE_CODE (orig))
1986 case INTEGER_TYPE: case CHAR_TYPE:
1987 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1988 case POINTER_TYPE: case REFERENCE_TYPE:
1989 case REAL_TYPE:
1990 return build2 (COMPLEX_EXPR, type,
1991 fold_convert (TREE_TYPE (type), arg),
1992 fold_convert (TREE_TYPE (type), integer_zero_node));
1993 case COMPLEX_TYPE:
1995 tree rpart, ipart;
1997 if (TREE_CODE (arg) == COMPLEX_EXPR)
1999 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2000 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2001 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2004 arg = save_expr (arg);
2005 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2006 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2007 rpart = fold_convert (TREE_TYPE (type), rpart);
2008 ipart = fold_convert (TREE_TYPE (type), ipart);
2009 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2012 default:
2013 gcc_unreachable ();
2016 case VECTOR_TYPE:
2017 if (integer_zerop (arg))
2018 return build_zero_vector (type);
2019 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2020 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2021 || TREE_CODE (orig) == VECTOR_TYPE);
2022 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2024 case VOID_TYPE:
2025 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2027 default:
2028 gcc_unreachable ();
2032 /* Return false if expr can be assumed not to be an lvalue, true
2033 otherwise. */
2035 static bool
2036 maybe_lvalue_p (tree x)
2038 /* We only need to wrap lvalue tree codes. */
2039 switch (TREE_CODE (x))
2041 case VAR_DECL:
2042 case PARM_DECL:
2043 case RESULT_DECL:
2044 case LABEL_DECL:
2045 case FUNCTION_DECL:
2046 case SSA_NAME:
2048 case COMPONENT_REF:
2049 case INDIRECT_REF:
2050 case ALIGN_INDIRECT_REF:
2051 case MISALIGNED_INDIRECT_REF:
2052 case ARRAY_REF:
2053 case ARRAY_RANGE_REF:
2054 case BIT_FIELD_REF:
2055 case OBJ_TYPE_REF:
2057 case REALPART_EXPR:
2058 case IMAGPART_EXPR:
2059 case PREINCREMENT_EXPR:
2060 case PREDECREMENT_EXPR:
2061 case SAVE_EXPR:
2062 case TRY_CATCH_EXPR:
2063 case WITH_CLEANUP_EXPR:
2064 case COMPOUND_EXPR:
2065 case MODIFY_EXPR:
2066 case TARGET_EXPR:
2067 case COND_EXPR:
2068 case BIND_EXPR:
2069 case MIN_EXPR:
2070 case MAX_EXPR:
2071 break;
2073 default:
2074 /* Assume the worst for front-end tree codes. */
2075 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2076 break;
2077 return false;
2080 return true;
2083 /* Return an expr equal to X but certainly not valid as an lvalue. */
2085 tree
2086 non_lvalue (tree x)
2088 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2089 us. */
2090 if (in_gimple_form)
2091 return x;
2093 if (! maybe_lvalue_p (x))
2094 return x;
2095 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2098 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2099 Zero means allow extended lvalues. */
2101 int pedantic_lvalues;
2103 /* When pedantic, return an expr equal to X but certainly not valid as a
2104 pedantic lvalue. Otherwise, return X. */
2106 static tree
2107 pedantic_non_lvalue (tree x)
2109 if (pedantic_lvalues)
2110 return non_lvalue (x);
2111 else
2112 return x;
2115 /* Given a tree comparison code, return the code that is the logical inverse
2116 of the given code. It is not safe to do this for floating-point
2117 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2118 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2120 enum tree_code
2121 invert_tree_comparison (enum tree_code code, bool honor_nans)
2123 if (honor_nans && flag_trapping_math)
2124 return ERROR_MARK;
2126 switch (code)
2128 case EQ_EXPR:
2129 return NE_EXPR;
2130 case NE_EXPR:
2131 return EQ_EXPR;
2132 case GT_EXPR:
2133 return honor_nans ? UNLE_EXPR : LE_EXPR;
2134 case GE_EXPR:
2135 return honor_nans ? UNLT_EXPR : LT_EXPR;
2136 case LT_EXPR:
2137 return honor_nans ? UNGE_EXPR : GE_EXPR;
2138 case LE_EXPR:
2139 return honor_nans ? UNGT_EXPR : GT_EXPR;
2140 case LTGT_EXPR:
2141 return UNEQ_EXPR;
2142 case UNEQ_EXPR:
2143 return LTGT_EXPR;
2144 case UNGT_EXPR:
2145 return LE_EXPR;
2146 case UNGE_EXPR:
2147 return LT_EXPR;
2148 case UNLT_EXPR:
2149 return GE_EXPR;
2150 case UNLE_EXPR:
2151 return GT_EXPR;
2152 case ORDERED_EXPR:
2153 return UNORDERED_EXPR;
2154 case UNORDERED_EXPR:
2155 return ORDERED_EXPR;
2156 default:
2157 gcc_unreachable ();
2161 /* Similar, but return the comparison that results if the operands are
2162 swapped. This is safe for floating-point. */
2164 enum tree_code
2165 swap_tree_comparison (enum tree_code code)
2167 switch (code)
2169 case EQ_EXPR:
2170 case NE_EXPR:
2171 case ORDERED_EXPR:
2172 case UNORDERED_EXPR:
2173 case LTGT_EXPR:
2174 case UNEQ_EXPR:
2175 return code;
2176 case GT_EXPR:
2177 return LT_EXPR;
2178 case GE_EXPR:
2179 return LE_EXPR;
2180 case LT_EXPR:
2181 return GT_EXPR;
2182 case LE_EXPR:
2183 return GE_EXPR;
2184 case UNGT_EXPR:
2185 return UNLT_EXPR;
2186 case UNGE_EXPR:
2187 return UNLE_EXPR;
2188 case UNLT_EXPR:
2189 return UNGT_EXPR;
2190 case UNLE_EXPR:
2191 return UNGE_EXPR;
2192 default:
2193 gcc_unreachable ();
2198 /* Convert a comparison tree code from an enum tree_code representation
2199 into a compcode bit-based encoding. This function is the inverse of
2200 compcode_to_comparison. */
2202 static enum comparison_code
2203 comparison_to_compcode (enum tree_code code)
2205 switch (code)
2207 case LT_EXPR:
2208 return COMPCODE_LT;
2209 case EQ_EXPR:
2210 return COMPCODE_EQ;
2211 case LE_EXPR:
2212 return COMPCODE_LE;
2213 case GT_EXPR:
2214 return COMPCODE_GT;
2215 case NE_EXPR:
2216 return COMPCODE_NE;
2217 case GE_EXPR:
2218 return COMPCODE_GE;
2219 case ORDERED_EXPR:
2220 return COMPCODE_ORD;
2221 case UNORDERED_EXPR:
2222 return COMPCODE_UNORD;
2223 case UNLT_EXPR:
2224 return COMPCODE_UNLT;
2225 case UNEQ_EXPR:
2226 return COMPCODE_UNEQ;
2227 case UNLE_EXPR:
2228 return COMPCODE_UNLE;
2229 case UNGT_EXPR:
2230 return COMPCODE_UNGT;
2231 case LTGT_EXPR:
2232 return COMPCODE_LTGT;
2233 case UNGE_EXPR:
2234 return COMPCODE_UNGE;
2235 default:
2236 gcc_unreachable ();
2240 /* Convert a compcode bit-based encoding of a comparison operator back
2241 to GCC's enum tree_code representation. This function is the
2242 inverse of comparison_to_compcode. */
2244 static enum tree_code
2245 compcode_to_comparison (enum comparison_code code)
2247 switch (code)
2249 case COMPCODE_LT:
2250 return LT_EXPR;
2251 case COMPCODE_EQ:
2252 return EQ_EXPR;
2253 case COMPCODE_LE:
2254 return LE_EXPR;
2255 case COMPCODE_GT:
2256 return GT_EXPR;
2257 case COMPCODE_NE:
2258 return NE_EXPR;
2259 case COMPCODE_GE:
2260 return GE_EXPR;
2261 case COMPCODE_ORD:
2262 return ORDERED_EXPR;
2263 case COMPCODE_UNORD:
2264 return UNORDERED_EXPR;
2265 case COMPCODE_UNLT:
2266 return UNLT_EXPR;
2267 case COMPCODE_UNEQ:
2268 return UNEQ_EXPR;
2269 case COMPCODE_UNLE:
2270 return UNLE_EXPR;
2271 case COMPCODE_UNGT:
2272 return UNGT_EXPR;
2273 case COMPCODE_LTGT:
2274 return LTGT_EXPR;
2275 case COMPCODE_UNGE:
2276 return UNGE_EXPR;
2277 default:
2278 gcc_unreachable ();
2282 /* Return a tree for the comparison which is the combination of
2283 doing the AND or OR (depending on CODE) of the two operations LCODE
2284 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2285 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2286 if this makes the transformation invalid. */
2288 tree
2289 combine_comparisons (enum tree_code code, enum tree_code lcode,
2290 enum tree_code rcode, tree truth_type,
2291 tree ll_arg, tree lr_arg)
2293 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2294 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2295 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2296 enum comparison_code compcode;
2298 switch (code)
2300 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2301 compcode = lcompcode & rcompcode;
2302 break;
2304 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2305 compcode = lcompcode | rcompcode;
2306 break;
2308 default:
2309 return NULL_TREE;
2312 if (!honor_nans)
2314 /* Eliminate unordered comparisons, as well as LTGT and ORD
2315 which are not used unless the mode has NaNs. */
2316 compcode &= ~COMPCODE_UNORD;
2317 if (compcode == COMPCODE_LTGT)
2318 compcode = COMPCODE_NE;
2319 else if (compcode == COMPCODE_ORD)
2320 compcode = COMPCODE_TRUE;
2322 else if (flag_trapping_math)
2324 /* Check that the original operation and the optimized ones will trap
2325 under the same condition. */
2326 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2327 && (lcompcode != COMPCODE_EQ)
2328 && (lcompcode != COMPCODE_ORD);
2329 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2330 && (rcompcode != COMPCODE_EQ)
2331 && (rcompcode != COMPCODE_ORD);
2332 bool trap = (compcode & COMPCODE_UNORD) == 0
2333 && (compcode != COMPCODE_EQ)
2334 && (compcode != COMPCODE_ORD);
2336 /* In a short-circuited boolean expression the LHS might be
2337 such that the RHS, if evaluated, will never trap. For
2338 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2339 if neither x nor y is NaN. (This is a mixed blessing: for
2340 example, the expression above will never trap, hence
2341 optimizing it to x < y would be invalid). */
2342 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2343 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2344 rtrap = false;
2346 /* If the comparison was short-circuited, and only the RHS
2347 trapped, we may now generate a spurious trap. */
2348 if (rtrap && !ltrap
2349 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2350 return NULL_TREE;
2352 /* If we changed the conditions that cause a trap, we lose. */
2353 if ((ltrap || rtrap) != trap)
2354 return NULL_TREE;
2357 if (compcode == COMPCODE_TRUE)
2358 return constant_boolean_node (true, truth_type);
2359 else if (compcode == COMPCODE_FALSE)
2360 return constant_boolean_node (false, truth_type);
2361 else
2362 return fold_build2 (compcode_to_comparison (compcode),
2363 truth_type, ll_arg, lr_arg);
2366 /* Return nonzero if CODE is a tree code that represents a truth value. */
2368 static int
2369 truth_value_p (enum tree_code code)
2371 return (TREE_CODE_CLASS (code) == tcc_comparison
2372 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2373 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2374 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2377 /* Return nonzero if two operands (typically of the same tree node)
2378 are necessarily equal. If either argument has side-effects this
2379 function returns zero. FLAGS modifies behavior as follows:
2381 If OEP_ONLY_CONST is set, only return nonzero for constants.
2382 This function tests whether the operands are indistinguishable;
2383 it does not test whether they are equal using C's == operation.
2384 The distinction is important for IEEE floating point, because
2385 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2386 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2388 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2389 even though it may hold multiple values during a function.
2390 This is because a GCC tree node guarantees that nothing else is
2391 executed between the evaluation of its "operands" (which may often
2392 be evaluated in arbitrary order). Hence if the operands themselves
2393 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2394 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2395 unset means assuming isochronic (or instantaneous) tree equivalence.
2396 Unless comparing arbitrary expression trees, such as from different
2397 statements, this flag can usually be left unset.
2399 If OEP_PURE_SAME is set, then pure functions with identical arguments
2400 are considered the same. It is used when the caller has other ways
2401 to ensure that global memory is unchanged in between. */
2404 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2406 /* If either is ERROR_MARK, they aren't equal. */
2407 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2408 return 0;
2410 /* If both types don't have the same signedness, then we can't consider
2411 them equal. We must check this before the STRIP_NOPS calls
2412 because they may change the signedness of the arguments. */
2413 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2414 return 0;
2416 STRIP_NOPS (arg0);
2417 STRIP_NOPS (arg1);
2419 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2420 /* This is needed for conversions and for COMPONENT_REF.
2421 Might as well play it safe and always test this. */
2422 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2423 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2424 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2425 return 0;
2427 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2428 We don't care about side effects in that case because the SAVE_EXPR
2429 takes care of that for us. In all other cases, two expressions are
2430 equal if they have no side effects. If we have two identical
2431 expressions with side effects that should be treated the same due
2432 to the only side effects being identical SAVE_EXPR's, that will
2433 be detected in the recursive calls below. */
2434 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2435 && (TREE_CODE (arg0) == SAVE_EXPR
2436 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2437 return 1;
2439 /* Next handle constant cases, those for which we can return 1 even
2440 if ONLY_CONST is set. */
2441 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2442 switch (TREE_CODE (arg0))
2444 case INTEGER_CST:
2445 return (! TREE_CONSTANT_OVERFLOW (arg0)
2446 && ! TREE_CONSTANT_OVERFLOW (arg1)
2447 && tree_int_cst_equal (arg0, arg1));
2449 case REAL_CST:
2450 return (! TREE_CONSTANT_OVERFLOW (arg0)
2451 && ! TREE_CONSTANT_OVERFLOW (arg1)
2452 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2453 TREE_REAL_CST (arg1)));
2455 case VECTOR_CST:
2457 tree v1, v2;
2459 if (TREE_CONSTANT_OVERFLOW (arg0)
2460 || TREE_CONSTANT_OVERFLOW (arg1))
2461 return 0;
2463 v1 = TREE_VECTOR_CST_ELTS (arg0);
2464 v2 = TREE_VECTOR_CST_ELTS (arg1);
2465 while (v1 && v2)
2467 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2468 flags))
2469 return 0;
2470 v1 = TREE_CHAIN (v1);
2471 v2 = TREE_CHAIN (v2);
2474 return v1 == v2;
2477 case COMPLEX_CST:
2478 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2479 flags)
2480 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2481 flags));
2483 case STRING_CST:
2484 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2485 && ! memcmp (TREE_STRING_POINTER (arg0),
2486 TREE_STRING_POINTER (arg1),
2487 TREE_STRING_LENGTH (arg0)));
2489 case ADDR_EXPR:
2490 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2492 default:
2493 break;
2496 if (flags & OEP_ONLY_CONST)
2497 return 0;
2499 /* Define macros to test an operand from arg0 and arg1 for equality and a
2500 variant that allows null and views null as being different from any
2501 non-null value. In the latter case, if either is null, the both
2502 must be; otherwise, do the normal comparison. */
2503 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2504 TREE_OPERAND (arg1, N), flags)
2506 #define OP_SAME_WITH_NULL(N) \
2507 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2508 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2510 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2512 case tcc_unary:
2513 /* Two conversions are equal only if signedness and modes match. */
2514 switch (TREE_CODE (arg0))
2516 case NOP_EXPR:
2517 case CONVERT_EXPR:
2518 case FIX_CEIL_EXPR:
2519 case FIX_TRUNC_EXPR:
2520 case FIX_FLOOR_EXPR:
2521 case FIX_ROUND_EXPR:
2522 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2523 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2524 return 0;
2525 break;
2526 default:
2527 break;
2530 return OP_SAME (0);
2533 case tcc_comparison:
2534 case tcc_binary:
2535 if (OP_SAME (0) && OP_SAME (1))
2536 return 1;
2538 /* For commutative ops, allow the other order. */
2539 return (commutative_tree_code (TREE_CODE (arg0))
2540 && operand_equal_p (TREE_OPERAND (arg0, 0),
2541 TREE_OPERAND (arg1, 1), flags)
2542 && operand_equal_p (TREE_OPERAND (arg0, 1),
2543 TREE_OPERAND (arg1, 0), flags));
2545 case tcc_reference:
2546 /* If either of the pointer (or reference) expressions we are
2547 dereferencing contain a side effect, these cannot be equal. */
2548 if (TREE_SIDE_EFFECTS (arg0)
2549 || TREE_SIDE_EFFECTS (arg1))
2550 return 0;
2552 switch (TREE_CODE (arg0))
2554 case INDIRECT_REF:
2555 case ALIGN_INDIRECT_REF:
2556 case MISALIGNED_INDIRECT_REF:
2557 case REALPART_EXPR:
2558 case IMAGPART_EXPR:
2559 return OP_SAME (0);
2561 case ARRAY_REF:
2562 case ARRAY_RANGE_REF:
2563 /* Operands 2 and 3 may be null. */
2564 return (OP_SAME (0)
2565 && OP_SAME (1)
2566 && OP_SAME_WITH_NULL (2)
2567 && OP_SAME_WITH_NULL (3));
2569 case COMPONENT_REF:
2570 /* Handle operand 2 the same as for ARRAY_REF. */
2571 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2573 case BIT_FIELD_REF:
2574 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2576 default:
2577 return 0;
2580 case tcc_expression:
2581 switch (TREE_CODE (arg0))
2583 case ADDR_EXPR:
2584 case TRUTH_NOT_EXPR:
2585 return OP_SAME (0);
2587 case TRUTH_ANDIF_EXPR:
2588 case TRUTH_ORIF_EXPR:
2589 return OP_SAME (0) && OP_SAME (1);
2591 case TRUTH_AND_EXPR:
2592 case TRUTH_OR_EXPR:
2593 case TRUTH_XOR_EXPR:
2594 if (OP_SAME (0) && OP_SAME (1))
2595 return 1;
2597 /* Otherwise take into account this is a commutative operation. */
2598 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2599 TREE_OPERAND (arg1, 1), flags)
2600 && operand_equal_p (TREE_OPERAND (arg0, 1),
2601 TREE_OPERAND (arg1, 0), flags));
2603 case CALL_EXPR:
2604 /* If the CALL_EXPRs call different functions, then they
2605 clearly can not be equal. */
2606 if (!OP_SAME (0))
2607 return 0;
2610 unsigned int cef = call_expr_flags (arg0);
2611 if (flags & OEP_PURE_SAME)
2612 cef &= ECF_CONST | ECF_PURE;
2613 else
2614 cef &= ECF_CONST;
2615 if (!cef)
2616 return 0;
2619 /* Now see if all the arguments are the same. operand_equal_p
2620 does not handle TREE_LIST, so we walk the operands here
2621 feeding them to operand_equal_p. */
2622 arg0 = TREE_OPERAND (arg0, 1);
2623 arg1 = TREE_OPERAND (arg1, 1);
2624 while (arg0 && arg1)
2626 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2627 flags))
2628 return 0;
2630 arg0 = TREE_CHAIN (arg0);
2631 arg1 = TREE_CHAIN (arg1);
2634 /* If we get here and both argument lists are exhausted
2635 then the CALL_EXPRs are equal. */
2636 return ! (arg0 || arg1);
2638 default:
2639 return 0;
2642 case tcc_declaration:
2643 /* Consider __builtin_sqrt equal to sqrt. */
2644 return (TREE_CODE (arg0) == FUNCTION_DECL
2645 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2646 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2647 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2649 default:
2650 return 0;
2653 #undef OP_SAME
2654 #undef OP_SAME_WITH_NULL
2657 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2658 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2660 When in doubt, return 0. */
2662 static int
2663 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2665 int unsignedp1, unsignedpo;
2666 tree primarg0, primarg1, primother;
2667 unsigned int correct_width;
2669 if (operand_equal_p (arg0, arg1, 0))
2670 return 1;
2672 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2673 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2674 return 0;
2676 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2677 and see if the inner values are the same. This removes any
2678 signedness comparison, which doesn't matter here. */
2679 primarg0 = arg0, primarg1 = arg1;
2680 STRIP_NOPS (primarg0);
2681 STRIP_NOPS (primarg1);
2682 if (operand_equal_p (primarg0, primarg1, 0))
2683 return 1;
2685 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2686 actual comparison operand, ARG0.
2688 First throw away any conversions to wider types
2689 already present in the operands. */
2691 primarg1 = get_narrower (arg1, &unsignedp1);
2692 primother = get_narrower (other, &unsignedpo);
2694 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2695 if (unsignedp1 == unsignedpo
2696 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2697 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2699 tree type = TREE_TYPE (arg0);
2701 /* Make sure shorter operand is extended the right way
2702 to match the longer operand. */
2703 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2704 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2706 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2707 return 1;
2710 return 0;
2713 /* See if ARG is an expression that is either a comparison or is performing
2714 arithmetic on comparisons. The comparisons must only be comparing
2715 two different values, which will be stored in *CVAL1 and *CVAL2; if
2716 they are nonzero it means that some operands have already been found.
2717 No variables may be used anywhere else in the expression except in the
2718 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2719 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2721 If this is true, return 1. Otherwise, return zero. */
2723 static int
2724 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2726 enum tree_code code = TREE_CODE (arg);
2727 enum tree_code_class class = TREE_CODE_CLASS (code);
2729 /* We can handle some of the tcc_expression cases here. */
2730 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2731 class = tcc_unary;
2732 else if (class == tcc_expression
2733 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2734 || code == COMPOUND_EXPR))
2735 class = tcc_binary;
2737 else if (class == tcc_expression && code == SAVE_EXPR
2738 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2740 /* If we've already found a CVAL1 or CVAL2, this expression is
2741 two complex to handle. */
2742 if (*cval1 || *cval2)
2743 return 0;
2745 class = tcc_unary;
2746 *save_p = 1;
2749 switch (class)
2751 case tcc_unary:
2752 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2754 case tcc_binary:
2755 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2756 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2757 cval1, cval2, save_p));
2759 case tcc_constant:
2760 return 1;
2762 case tcc_expression:
2763 if (code == COND_EXPR)
2764 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2765 cval1, cval2, save_p)
2766 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2767 cval1, cval2, save_p)
2768 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2769 cval1, cval2, save_p));
2770 return 0;
2772 case tcc_comparison:
2773 /* First see if we can handle the first operand, then the second. For
2774 the second operand, we know *CVAL1 can't be zero. It must be that
2775 one side of the comparison is each of the values; test for the
2776 case where this isn't true by failing if the two operands
2777 are the same. */
2779 if (operand_equal_p (TREE_OPERAND (arg, 0),
2780 TREE_OPERAND (arg, 1), 0))
2781 return 0;
2783 if (*cval1 == 0)
2784 *cval1 = TREE_OPERAND (arg, 0);
2785 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2787 else if (*cval2 == 0)
2788 *cval2 = TREE_OPERAND (arg, 0);
2789 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2791 else
2792 return 0;
2794 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2796 else if (*cval2 == 0)
2797 *cval2 = TREE_OPERAND (arg, 1);
2798 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2800 else
2801 return 0;
2803 return 1;
2805 default:
2806 return 0;
2810 /* ARG is a tree that is known to contain just arithmetic operations and
2811 comparisons. Evaluate the operations in the tree substituting NEW0 for
2812 any occurrence of OLD0 as an operand of a comparison and likewise for
2813 NEW1 and OLD1. */
2815 static tree
2816 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2818 tree type = TREE_TYPE (arg);
2819 enum tree_code code = TREE_CODE (arg);
2820 enum tree_code_class class = TREE_CODE_CLASS (code);
2822 /* We can handle some of the tcc_expression cases here. */
2823 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2824 class = tcc_unary;
2825 else if (class == tcc_expression
2826 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2827 class = tcc_binary;
2829 switch (class)
2831 case tcc_unary:
2832 return fold_build1 (code, type,
2833 eval_subst (TREE_OPERAND (arg, 0),
2834 old0, new0, old1, new1));
2836 case tcc_binary:
2837 return fold_build2 (code, type,
2838 eval_subst (TREE_OPERAND (arg, 0),
2839 old0, new0, old1, new1),
2840 eval_subst (TREE_OPERAND (arg, 1),
2841 old0, new0, old1, new1));
2843 case tcc_expression:
2844 switch (code)
2846 case SAVE_EXPR:
2847 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2849 case COMPOUND_EXPR:
2850 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2852 case COND_EXPR:
2853 return fold_build3 (code, type,
2854 eval_subst (TREE_OPERAND (arg, 0),
2855 old0, new0, old1, new1),
2856 eval_subst (TREE_OPERAND (arg, 1),
2857 old0, new0, old1, new1),
2858 eval_subst (TREE_OPERAND (arg, 2),
2859 old0, new0, old1, new1));
2860 default:
2861 break;
2863 /* Fall through - ??? */
2865 case tcc_comparison:
2867 tree arg0 = TREE_OPERAND (arg, 0);
2868 tree arg1 = TREE_OPERAND (arg, 1);
2870 /* We need to check both for exact equality and tree equality. The
2871 former will be true if the operand has a side-effect. In that
2872 case, we know the operand occurred exactly once. */
2874 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2875 arg0 = new0;
2876 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2877 arg0 = new1;
2879 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2880 arg1 = new0;
2881 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2882 arg1 = new1;
2884 return fold_build2 (code, type, arg0, arg1);
2887 default:
2888 return arg;
2892 /* Return a tree for the case when the result of an expression is RESULT
2893 converted to TYPE and OMITTED was previously an operand of the expression
2894 but is now not needed (e.g., we folded OMITTED * 0).
2896 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2897 the conversion of RESULT to TYPE. */
2899 tree
2900 omit_one_operand (tree type, tree result, tree omitted)
2902 tree t = fold_convert (type, result);
2904 if (TREE_SIDE_EFFECTS (omitted))
2905 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2907 return non_lvalue (t);
2910 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2912 static tree
2913 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2915 tree t = fold_convert (type, result);
2917 if (TREE_SIDE_EFFECTS (omitted))
2918 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2920 return pedantic_non_lvalue (t);
2923 /* Return a tree for the case when the result of an expression is RESULT
2924 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2925 of the expression but are now not needed.
2927 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2928 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2929 evaluated before OMITTED2. Otherwise, if neither has side effects,
2930 just do the conversion of RESULT to TYPE. */
2932 tree
2933 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2935 tree t = fold_convert (type, result);
2937 if (TREE_SIDE_EFFECTS (omitted2))
2938 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2939 if (TREE_SIDE_EFFECTS (omitted1))
2940 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2942 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2946 /* Return a simplified tree node for the truth-negation of ARG. This
2947 never alters ARG itself. We assume that ARG is an operation that
2948 returns a truth value (0 or 1).
2950 FIXME: one would think we would fold the result, but it causes
2951 problems with the dominator optimizer. */
2952 tree
2953 invert_truthvalue (tree arg)
2955 tree type = TREE_TYPE (arg);
2956 enum tree_code code = TREE_CODE (arg);
2958 if (code == ERROR_MARK)
2959 return arg;
2961 /* If this is a comparison, we can simply invert it, except for
2962 floating-point non-equality comparisons, in which case we just
2963 enclose a TRUTH_NOT_EXPR around what we have. */
2965 if (TREE_CODE_CLASS (code) == tcc_comparison)
2967 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2968 if (FLOAT_TYPE_P (op_type)
2969 && flag_trapping_math
2970 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2971 && code != NE_EXPR && code != EQ_EXPR)
2972 return build1 (TRUTH_NOT_EXPR, type, arg);
2973 else
2975 code = invert_tree_comparison (code,
2976 HONOR_NANS (TYPE_MODE (op_type)));
2977 if (code == ERROR_MARK)
2978 return build1 (TRUTH_NOT_EXPR, type, arg);
2979 else
2980 return build2 (code, type,
2981 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2985 switch (code)
2987 case INTEGER_CST:
2988 return constant_boolean_node (integer_zerop (arg), type);
2990 case TRUTH_AND_EXPR:
2991 return build2 (TRUTH_OR_EXPR, type,
2992 invert_truthvalue (TREE_OPERAND (arg, 0)),
2993 invert_truthvalue (TREE_OPERAND (arg, 1)));
2995 case TRUTH_OR_EXPR:
2996 return build2 (TRUTH_AND_EXPR, type,
2997 invert_truthvalue (TREE_OPERAND (arg, 0)),
2998 invert_truthvalue (TREE_OPERAND (arg, 1)));
3000 case TRUTH_XOR_EXPR:
3001 /* Here we can invert either operand. We invert the first operand
3002 unless the second operand is a TRUTH_NOT_EXPR in which case our
3003 result is the XOR of the first operand with the inside of the
3004 negation of the second operand. */
3006 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3007 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3008 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3009 else
3010 return build2 (TRUTH_XOR_EXPR, type,
3011 invert_truthvalue (TREE_OPERAND (arg, 0)),
3012 TREE_OPERAND (arg, 1));
3014 case TRUTH_ANDIF_EXPR:
3015 return build2 (TRUTH_ORIF_EXPR, type,
3016 invert_truthvalue (TREE_OPERAND (arg, 0)),
3017 invert_truthvalue (TREE_OPERAND (arg, 1)));
3019 case TRUTH_ORIF_EXPR:
3020 return build2 (TRUTH_ANDIF_EXPR, type,
3021 invert_truthvalue (TREE_OPERAND (arg, 0)),
3022 invert_truthvalue (TREE_OPERAND (arg, 1)));
3024 case TRUTH_NOT_EXPR:
3025 return TREE_OPERAND (arg, 0);
3027 case COND_EXPR:
3028 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3029 invert_truthvalue (TREE_OPERAND (arg, 1)),
3030 invert_truthvalue (TREE_OPERAND (arg, 2)));
3032 case COMPOUND_EXPR:
3033 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3034 invert_truthvalue (TREE_OPERAND (arg, 1)));
3036 case NON_LVALUE_EXPR:
3037 return invert_truthvalue (TREE_OPERAND (arg, 0));
3039 case NOP_EXPR:
3040 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3041 break;
3043 case CONVERT_EXPR:
3044 case FLOAT_EXPR:
3045 return build1 (TREE_CODE (arg), type,
3046 invert_truthvalue (TREE_OPERAND (arg, 0)));
3048 case BIT_AND_EXPR:
3049 if (!integer_onep (TREE_OPERAND (arg, 1)))
3050 break;
3051 return build2 (EQ_EXPR, type, arg,
3052 fold_convert (type, integer_zero_node));
3054 case SAVE_EXPR:
3055 return build1 (TRUTH_NOT_EXPR, type, arg);
3057 case CLEANUP_POINT_EXPR:
3058 return build1 (CLEANUP_POINT_EXPR, type,
3059 invert_truthvalue (TREE_OPERAND (arg, 0)));
3061 default:
3062 break;
3064 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3065 return build1 (TRUTH_NOT_EXPR, type, arg);
3068 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3069 operands are another bit-wise operation with a common input. If so,
3070 distribute the bit operations to save an operation and possibly two if
3071 constants are involved. For example, convert
3072 (A | B) & (A | C) into A | (B & C)
3073 Further simplification will occur if B and C are constants.
3075 If this optimization cannot be done, 0 will be returned. */
3077 static tree
3078 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3080 tree common;
3081 tree left, right;
3083 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3084 || TREE_CODE (arg0) == code
3085 || (TREE_CODE (arg0) != BIT_AND_EXPR
3086 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3087 return 0;
3089 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3091 common = TREE_OPERAND (arg0, 0);
3092 left = TREE_OPERAND (arg0, 1);
3093 right = TREE_OPERAND (arg1, 1);
3095 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3097 common = TREE_OPERAND (arg0, 0);
3098 left = TREE_OPERAND (arg0, 1);
3099 right = TREE_OPERAND (arg1, 0);
3101 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3103 common = TREE_OPERAND (arg0, 1);
3104 left = TREE_OPERAND (arg0, 0);
3105 right = TREE_OPERAND (arg1, 1);
3107 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3109 common = TREE_OPERAND (arg0, 1);
3110 left = TREE_OPERAND (arg0, 0);
3111 right = TREE_OPERAND (arg1, 0);
3113 else
3114 return 0;
3116 return fold_build2 (TREE_CODE (arg0), type, common,
3117 fold_build2 (code, type, left, right));
3120 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3121 with code CODE. This optimization is unsafe. */
3122 static tree
3123 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3125 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3126 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3128 /* (A / C) +- (B / C) -> (A +- B) / C. */
3129 if (mul0 == mul1
3130 && operand_equal_p (TREE_OPERAND (arg0, 1),
3131 TREE_OPERAND (arg1, 1), 0))
3132 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3133 fold_build2 (code, type,
3134 TREE_OPERAND (arg0, 0),
3135 TREE_OPERAND (arg1, 0)),
3136 TREE_OPERAND (arg0, 1));
3138 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3139 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3140 TREE_OPERAND (arg1, 0), 0)
3141 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3142 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3144 REAL_VALUE_TYPE r0, r1;
3145 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3146 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3147 if (!mul0)
3148 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3149 if (!mul1)
3150 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3151 real_arithmetic (&r0, code, &r0, &r1);
3152 return fold_build2 (MULT_EXPR, type,
3153 TREE_OPERAND (arg0, 0),
3154 build_real (type, r0));
3157 return NULL_TREE;
3160 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3161 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3163 static tree
3164 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3165 int unsignedp)
3167 tree result;
3169 if (bitpos == 0)
3171 tree size = TYPE_SIZE (TREE_TYPE (inner));
3172 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3173 || POINTER_TYPE_P (TREE_TYPE (inner)))
3174 && host_integerp (size, 0)
3175 && tree_low_cst (size, 0) == bitsize)
3176 return fold_convert (type, inner);
3179 result = build3 (BIT_FIELD_REF, type, inner,
3180 size_int (bitsize), bitsize_int (bitpos));
3182 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3184 return result;
3187 /* Optimize a bit-field compare.
3189 There are two cases: First is a compare against a constant and the
3190 second is a comparison of two items where the fields are at the same
3191 bit position relative to the start of a chunk (byte, halfword, word)
3192 large enough to contain it. In these cases we can avoid the shift
3193 implicit in bitfield extractions.
3195 For constants, we emit a compare of the shifted constant with the
3196 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3197 compared. For two fields at the same position, we do the ANDs with the
3198 similar mask and compare the result of the ANDs.
3200 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3201 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3202 are the left and right operands of the comparison, respectively.
3204 If the optimization described above can be done, we return the resulting
3205 tree. Otherwise we return zero. */
3207 static tree
3208 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3209 tree lhs, tree rhs)
3211 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3212 tree type = TREE_TYPE (lhs);
3213 tree signed_type, unsigned_type;
3214 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3215 enum machine_mode lmode, rmode, nmode;
3216 int lunsignedp, runsignedp;
3217 int lvolatilep = 0, rvolatilep = 0;
3218 tree linner, rinner = NULL_TREE;
3219 tree mask;
3220 tree offset;
3222 /* Get all the information about the extractions being done. If the bit size
3223 if the same as the size of the underlying object, we aren't doing an
3224 extraction at all and so can do nothing. We also don't want to
3225 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3226 then will no longer be able to replace it. */
3227 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3228 &lunsignedp, &lvolatilep, false);
3229 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3230 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3231 return 0;
3233 if (!const_p)
3235 /* If this is not a constant, we can only do something if bit positions,
3236 sizes, and signedness are the same. */
3237 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3238 &runsignedp, &rvolatilep, false);
3240 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3241 || lunsignedp != runsignedp || offset != 0
3242 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3243 return 0;
3246 /* See if we can find a mode to refer to this field. We should be able to,
3247 but fail if we can't. */
3248 nmode = get_best_mode (lbitsize, lbitpos,
3249 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3250 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3251 TYPE_ALIGN (TREE_TYPE (rinner))),
3252 word_mode, lvolatilep || rvolatilep);
3253 if (nmode == VOIDmode)
3254 return 0;
3256 /* Set signed and unsigned types of the precision of this mode for the
3257 shifts below. */
3258 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3259 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3261 /* Compute the bit position and size for the new reference and our offset
3262 within it. If the new reference is the same size as the original, we
3263 won't optimize anything, so return zero. */
3264 nbitsize = GET_MODE_BITSIZE (nmode);
3265 nbitpos = lbitpos & ~ (nbitsize - 1);
3266 lbitpos -= nbitpos;
3267 if (nbitsize == lbitsize)
3268 return 0;
3270 if (BYTES_BIG_ENDIAN)
3271 lbitpos = nbitsize - lbitsize - lbitpos;
3273 /* Make the mask to be used against the extracted field. */
3274 mask = build_int_cst (unsigned_type, -1);
3275 mask = force_fit_type (mask, 0, false, false);
3276 mask = fold_convert (unsigned_type, mask);
3277 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3278 mask = const_binop (RSHIFT_EXPR, mask,
3279 size_int (nbitsize - lbitsize - lbitpos), 0);
3281 if (! const_p)
3282 /* If not comparing with constant, just rework the comparison
3283 and return. */
3284 return build2 (code, compare_type,
3285 build2 (BIT_AND_EXPR, unsigned_type,
3286 make_bit_field_ref (linner, unsigned_type,
3287 nbitsize, nbitpos, 1),
3288 mask),
3289 build2 (BIT_AND_EXPR, unsigned_type,
3290 make_bit_field_ref (rinner, unsigned_type,
3291 nbitsize, nbitpos, 1),
3292 mask));
3294 /* Otherwise, we are handling the constant case. See if the constant is too
3295 big for the field. Warn and return a tree of for 0 (false) if so. We do
3296 this not only for its own sake, but to avoid having to test for this
3297 error case below. If we didn't, we might generate wrong code.
3299 For unsigned fields, the constant shifted right by the field length should
3300 be all zero. For signed fields, the high-order bits should agree with
3301 the sign bit. */
3303 if (lunsignedp)
3305 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3306 fold_convert (unsigned_type, rhs),
3307 size_int (lbitsize), 0)))
3309 warning (0, "comparison is always %d due to width of bit-field",
3310 code == NE_EXPR);
3311 return constant_boolean_node (code == NE_EXPR, compare_type);
3314 else
3316 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3317 size_int (lbitsize - 1), 0);
3318 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3320 warning (0, "comparison is always %d due to width of bit-field",
3321 code == NE_EXPR);
3322 return constant_boolean_node (code == NE_EXPR, compare_type);
3326 /* Single-bit compares should always be against zero. */
3327 if (lbitsize == 1 && ! integer_zerop (rhs))
3329 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3330 rhs = fold_convert (type, integer_zero_node);
3333 /* Make a new bitfield reference, shift the constant over the
3334 appropriate number of bits and mask it with the computed mask
3335 (in case this was a signed field). If we changed it, make a new one. */
3336 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3337 if (lvolatilep)
3339 TREE_SIDE_EFFECTS (lhs) = 1;
3340 TREE_THIS_VOLATILE (lhs) = 1;
3343 rhs = const_binop (BIT_AND_EXPR,
3344 const_binop (LSHIFT_EXPR,
3345 fold_convert (unsigned_type, rhs),
3346 size_int (lbitpos), 0),
3347 mask, 0);
3349 return build2 (code, compare_type,
3350 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3351 rhs);
3354 /* Subroutine for fold_truthop: decode a field reference.
3356 If EXP is a comparison reference, we return the innermost reference.
3358 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3359 set to the starting bit number.
3361 If the innermost field can be completely contained in a mode-sized
3362 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3364 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3365 otherwise it is not changed.
3367 *PUNSIGNEDP is set to the signedness of the field.
3369 *PMASK is set to the mask used. This is either contained in a
3370 BIT_AND_EXPR or derived from the width of the field.
3372 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3374 Return 0 if this is not a component reference or is one that we can't
3375 do anything with. */
3377 static tree
3378 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3379 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3380 int *punsignedp, int *pvolatilep,
3381 tree *pmask, tree *pand_mask)
3383 tree outer_type = 0;
3384 tree and_mask = 0;
3385 tree mask, inner, offset;
3386 tree unsigned_type;
3387 unsigned int precision;
3389 /* All the optimizations using this function assume integer fields.
3390 There are problems with FP fields since the type_for_size call
3391 below can fail for, e.g., XFmode. */
3392 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3393 return 0;
3395 /* We are interested in the bare arrangement of bits, so strip everything
3396 that doesn't affect the machine mode. However, record the type of the
3397 outermost expression if it may matter below. */
3398 if (TREE_CODE (exp) == NOP_EXPR
3399 || TREE_CODE (exp) == CONVERT_EXPR
3400 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3401 outer_type = TREE_TYPE (exp);
3402 STRIP_NOPS (exp);
3404 if (TREE_CODE (exp) == BIT_AND_EXPR)
3406 and_mask = TREE_OPERAND (exp, 1);
3407 exp = TREE_OPERAND (exp, 0);
3408 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3409 if (TREE_CODE (and_mask) != INTEGER_CST)
3410 return 0;
3413 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3414 punsignedp, pvolatilep, false);
3415 if ((inner == exp && and_mask == 0)
3416 || *pbitsize < 0 || offset != 0
3417 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3418 return 0;
3420 /* If the number of bits in the reference is the same as the bitsize of
3421 the outer type, then the outer type gives the signedness. Otherwise
3422 (in case of a small bitfield) the signedness is unchanged. */
3423 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3424 *punsignedp = TYPE_UNSIGNED (outer_type);
3426 /* Compute the mask to access the bitfield. */
3427 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3428 precision = TYPE_PRECISION (unsigned_type);
3430 mask = build_int_cst (unsigned_type, -1);
3431 mask = force_fit_type (mask, 0, false, false);
3433 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3434 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3436 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3437 if (and_mask != 0)
3438 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3439 fold_convert (unsigned_type, and_mask), mask);
3441 *pmask = mask;
3442 *pand_mask = and_mask;
3443 return inner;
3446 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3447 bit positions. */
3449 static int
3450 all_ones_mask_p (tree mask, int size)
3452 tree type = TREE_TYPE (mask);
3453 unsigned int precision = TYPE_PRECISION (type);
3454 tree tmask;
3456 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3457 tmask = force_fit_type (tmask, 0, false, false);
3459 return
3460 tree_int_cst_equal (mask,
3461 const_binop (RSHIFT_EXPR,
3462 const_binop (LSHIFT_EXPR, tmask,
3463 size_int (precision - size),
3465 size_int (precision - size), 0));
3468 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3469 represents the sign bit of EXP's type. If EXP represents a sign
3470 or zero extension, also test VAL against the unextended type.
3471 The return value is the (sub)expression whose sign bit is VAL,
3472 or NULL_TREE otherwise. */
3474 static tree
3475 sign_bit_p (tree exp, tree val)
3477 unsigned HOST_WIDE_INT mask_lo, lo;
3478 HOST_WIDE_INT mask_hi, hi;
3479 int width;
3480 tree t;
3482 /* Tree EXP must have an integral type. */
3483 t = TREE_TYPE (exp);
3484 if (! INTEGRAL_TYPE_P (t))
3485 return NULL_TREE;
3487 /* Tree VAL must be an integer constant. */
3488 if (TREE_CODE (val) != INTEGER_CST
3489 || TREE_CONSTANT_OVERFLOW (val))
3490 return NULL_TREE;
3492 width = TYPE_PRECISION (t);
3493 if (width > HOST_BITS_PER_WIDE_INT)
3495 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3496 lo = 0;
3498 mask_hi = ((unsigned HOST_WIDE_INT) -1
3499 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3500 mask_lo = -1;
3502 else
3504 hi = 0;
3505 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3507 mask_hi = 0;
3508 mask_lo = ((unsigned HOST_WIDE_INT) -1
3509 >> (HOST_BITS_PER_WIDE_INT - width));
3512 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3513 treat VAL as if it were unsigned. */
3514 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3515 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3516 return exp;
3518 /* Handle extension from a narrower type. */
3519 if (TREE_CODE (exp) == NOP_EXPR
3520 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3521 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3523 return NULL_TREE;
3526 /* Subroutine for fold_truthop: determine if an operand is simple enough
3527 to be evaluated unconditionally. */
3529 static int
3530 simple_operand_p (tree exp)
3532 /* Strip any conversions that don't change the machine mode. */
3533 STRIP_NOPS (exp);
3535 return (CONSTANT_CLASS_P (exp)
3536 || TREE_CODE (exp) == SSA_NAME
3537 || (DECL_P (exp)
3538 && ! TREE_ADDRESSABLE (exp)
3539 && ! TREE_THIS_VOLATILE (exp)
3540 && ! DECL_NONLOCAL (exp)
3541 /* Don't regard global variables as simple. They may be
3542 allocated in ways unknown to the compiler (shared memory,
3543 #pragma weak, etc). */
3544 && ! TREE_PUBLIC (exp)
3545 && ! DECL_EXTERNAL (exp)
3546 /* Loading a static variable is unduly expensive, but global
3547 registers aren't expensive. */
3548 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3551 /* The following functions are subroutines to fold_range_test and allow it to
3552 try to change a logical combination of comparisons into a range test.
3554 For example, both
3555 X == 2 || X == 3 || X == 4 || X == 5
3557 X >= 2 && X <= 5
3558 are converted to
3559 (unsigned) (X - 2) <= 3
3561 We describe each set of comparisons as being either inside or outside
3562 a range, using a variable named like IN_P, and then describe the
3563 range with a lower and upper bound. If one of the bounds is omitted,
3564 it represents either the highest or lowest value of the type.
3566 In the comments below, we represent a range by two numbers in brackets
3567 preceded by a "+" to designate being inside that range, or a "-" to
3568 designate being outside that range, so the condition can be inverted by
3569 flipping the prefix. An omitted bound is represented by a "-". For
3570 example, "- [-, 10]" means being outside the range starting at the lowest
3571 possible value and ending at 10, in other words, being greater than 10.
3572 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3573 always false.
3575 We set up things so that the missing bounds are handled in a consistent
3576 manner so neither a missing bound nor "true" and "false" need to be
3577 handled using a special case. */
3579 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3580 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3581 and UPPER1_P are nonzero if the respective argument is an upper bound
3582 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3583 must be specified for a comparison. ARG1 will be converted to ARG0's
3584 type if both are specified. */
3586 static tree
3587 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3588 tree arg1, int upper1_p)
3590 tree tem;
3591 int result;
3592 int sgn0, sgn1;
3594 /* If neither arg represents infinity, do the normal operation.
3595 Else, if not a comparison, return infinity. Else handle the special
3596 comparison rules. Note that most of the cases below won't occur, but
3597 are handled for consistency. */
3599 if (arg0 != 0 && arg1 != 0)
3601 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3602 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3603 STRIP_NOPS (tem);
3604 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3607 if (TREE_CODE_CLASS (code) != tcc_comparison)
3608 return 0;
3610 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3611 for neither. In real maths, we cannot assume open ended ranges are
3612 the same. But, this is computer arithmetic, where numbers are finite.
3613 We can therefore make the transformation of any unbounded range with
3614 the value Z, Z being greater than any representable number. This permits
3615 us to treat unbounded ranges as equal. */
3616 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3617 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3618 switch (code)
3620 case EQ_EXPR:
3621 result = sgn0 == sgn1;
3622 break;
3623 case NE_EXPR:
3624 result = sgn0 != sgn1;
3625 break;
3626 case LT_EXPR:
3627 result = sgn0 < sgn1;
3628 break;
3629 case LE_EXPR:
3630 result = sgn0 <= sgn1;
3631 break;
3632 case GT_EXPR:
3633 result = sgn0 > sgn1;
3634 break;
3635 case GE_EXPR:
3636 result = sgn0 >= sgn1;
3637 break;
3638 default:
3639 gcc_unreachable ();
3642 return constant_boolean_node (result, type);
3645 /* Given EXP, a logical expression, set the range it is testing into
3646 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3647 actually being tested. *PLOW and *PHIGH will be made of the same type
3648 as the returned expression. If EXP is not a comparison, we will most
3649 likely not be returning a useful value and range. */
3651 static tree
3652 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3654 enum tree_code code;
3655 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3656 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3657 int in_p, n_in_p;
3658 tree low, high, n_low, n_high;
3660 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3661 and see if we can refine the range. Some of the cases below may not
3662 happen, but it doesn't seem worth worrying about this. We "continue"
3663 the outer loop when we've changed something; otherwise we "break"
3664 the switch, which will "break" the while. */
3666 in_p = 0;
3667 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3669 while (1)
3671 code = TREE_CODE (exp);
3672 exp_type = TREE_TYPE (exp);
3674 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3676 if (TREE_CODE_LENGTH (code) > 0)
3677 arg0 = TREE_OPERAND (exp, 0);
3678 if (TREE_CODE_CLASS (code) == tcc_comparison
3679 || TREE_CODE_CLASS (code) == tcc_unary
3680 || TREE_CODE_CLASS (code) == tcc_binary)
3681 arg0_type = TREE_TYPE (arg0);
3682 if (TREE_CODE_CLASS (code) == tcc_binary
3683 || TREE_CODE_CLASS (code) == tcc_comparison
3684 || (TREE_CODE_CLASS (code) == tcc_expression
3685 && TREE_CODE_LENGTH (code) > 1))
3686 arg1 = TREE_OPERAND (exp, 1);
3689 switch (code)
3691 case TRUTH_NOT_EXPR:
3692 in_p = ! in_p, exp = arg0;
3693 continue;
3695 case EQ_EXPR: case NE_EXPR:
3696 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3697 /* We can only do something if the range is testing for zero
3698 and if the second operand is an integer constant. Note that
3699 saying something is "in" the range we make is done by
3700 complementing IN_P since it will set in the initial case of
3701 being not equal to zero; "out" is leaving it alone. */
3702 if (low == 0 || high == 0
3703 || ! integer_zerop (low) || ! integer_zerop (high)
3704 || TREE_CODE (arg1) != INTEGER_CST)
3705 break;
3707 switch (code)
3709 case NE_EXPR: /* - [c, c] */
3710 low = high = arg1;
3711 break;
3712 case EQ_EXPR: /* + [c, c] */
3713 in_p = ! in_p, low = high = arg1;
3714 break;
3715 case GT_EXPR: /* - [-, c] */
3716 low = 0, high = arg1;
3717 break;
3718 case GE_EXPR: /* + [c, -] */
3719 in_p = ! in_p, low = arg1, high = 0;
3720 break;
3721 case LT_EXPR: /* - [c, -] */
3722 low = arg1, high = 0;
3723 break;
3724 case LE_EXPR: /* + [-, c] */
3725 in_p = ! in_p, low = 0, high = arg1;
3726 break;
3727 default:
3728 gcc_unreachable ();
3731 /* If this is an unsigned comparison, we also know that EXP is
3732 greater than or equal to zero. We base the range tests we make
3733 on that fact, so we record it here so we can parse existing
3734 range tests. We test arg0_type since often the return type
3735 of, e.g. EQ_EXPR, is boolean. */
3736 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3738 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3739 in_p, low, high, 1,
3740 fold_convert (arg0_type, integer_zero_node),
3741 NULL_TREE))
3742 break;
3744 in_p = n_in_p, low = n_low, high = n_high;
3746 /* If the high bound is missing, but we have a nonzero low
3747 bound, reverse the range so it goes from zero to the low bound
3748 minus 1. */
3749 if (high == 0 && low && ! integer_zerop (low))
3751 in_p = ! in_p;
3752 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3753 integer_one_node, 0);
3754 low = fold_convert (arg0_type, integer_zero_node);
3758 exp = arg0;
3759 continue;
3761 case NEGATE_EXPR:
3762 /* (-x) IN [a,b] -> x in [-b, -a] */
3763 n_low = range_binop (MINUS_EXPR, exp_type,
3764 fold_convert (exp_type, integer_zero_node),
3765 0, high, 1);
3766 n_high = range_binop (MINUS_EXPR, exp_type,
3767 fold_convert (exp_type, integer_zero_node),
3768 0, low, 0);
3769 low = n_low, high = n_high;
3770 exp = arg0;
3771 continue;
3773 case BIT_NOT_EXPR:
3774 /* ~ X -> -X - 1 */
3775 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3776 fold_convert (exp_type, integer_one_node));
3777 continue;
3779 case PLUS_EXPR: case MINUS_EXPR:
3780 if (TREE_CODE (arg1) != INTEGER_CST)
3781 break;
3783 /* If EXP is signed, any overflow in the computation is undefined,
3784 so we don't worry about it so long as our computations on
3785 the bounds don't overflow. For unsigned, overflow is defined
3786 and this is exactly the right thing. */
3787 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3788 arg0_type, low, 0, arg1, 0);
3789 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3790 arg0_type, high, 1, arg1, 0);
3791 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3792 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3793 break;
3795 /* Check for an unsigned range which has wrapped around the maximum
3796 value thus making n_high < n_low, and normalize it. */
3797 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3799 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3800 integer_one_node, 0);
3801 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3802 integer_one_node, 0);
3804 /* If the range is of the form +/- [ x+1, x ], we won't
3805 be able to normalize it. But then, it represents the
3806 whole range or the empty set, so make it
3807 +/- [ -, - ]. */
3808 if (tree_int_cst_equal (n_low, low)
3809 && tree_int_cst_equal (n_high, high))
3810 low = high = 0;
3811 else
3812 in_p = ! in_p;
3814 else
3815 low = n_low, high = n_high;
3817 exp = arg0;
3818 continue;
3820 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3821 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3822 break;
3824 if (! INTEGRAL_TYPE_P (arg0_type)
3825 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3826 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3827 break;
3829 n_low = low, n_high = high;
3831 if (n_low != 0)
3832 n_low = fold_convert (arg0_type, n_low);
3834 if (n_high != 0)
3835 n_high = fold_convert (arg0_type, n_high);
3838 /* If we're converting arg0 from an unsigned type, to exp,
3839 a signed type, we will be doing the comparison as unsigned.
3840 The tests above have already verified that LOW and HIGH
3841 are both positive.
3843 So we have to ensure that we will handle large unsigned
3844 values the same way that the current signed bounds treat
3845 negative values. */
3847 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3849 tree high_positive;
3850 tree equiv_type = lang_hooks.types.type_for_mode
3851 (TYPE_MODE (arg0_type), 1);
3853 /* A range without an upper bound is, naturally, unbounded.
3854 Since convert would have cropped a very large value, use
3855 the max value for the destination type. */
3856 high_positive
3857 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3858 : TYPE_MAX_VALUE (arg0_type);
3860 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3861 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3862 fold_convert (arg0_type,
3863 high_positive),
3864 fold_convert (arg0_type,
3865 integer_one_node));
3867 /* If the low bound is specified, "and" the range with the
3868 range for which the original unsigned value will be
3869 positive. */
3870 if (low != 0)
3872 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3873 1, n_low, n_high, 1,
3874 fold_convert (arg0_type,
3875 integer_zero_node),
3876 high_positive))
3877 break;
3879 in_p = (n_in_p == in_p);
3881 else
3883 /* Otherwise, "or" the range with the range of the input
3884 that will be interpreted as negative. */
3885 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3886 0, n_low, n_high, 1,
3887 fold_convert (arg0_type,
3888 integer_zero_node),
3889 high_positive))
3890 break;
3892 in_p = (in_p != n_in_p);
3896 exp = arg0;
3897 low = n_low, high = n_high;
3898 continue;
3900 default:
3901 break;
3904 break;
3907 /* If EXP is a constant, we can evaluate whether this is true or false. */
3908 if (TREE_CODE (exp) == INTEGER_CST)
3910 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3911 exp, 0, low, 0))
3912 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3913 exp, 1, high, 1)));
3914 low = high = 0;
3915 exp = 0;
3918 *pin_p = in_p, *plow = low, *phigh = high;
3919 return exp;
3922 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3923 type, TYPE, return an expression to test if EXP is in (or out of, depending
3924 on IN_P) the range. Return 0 if the test couldn't be created. */
3926 static tree
3927 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3929 tree etype = TREE_TYPE (exp);
3930 tree value;
3932 #ifdef HAVE_canonicalize_funcptr_for_compare
3933 /* Disable this optimization for function pointer expressions
3934 on targets that require function pointer canonicalization. */
3935 if (HAVE_canonicalize_funcptr_for_compare
3936 && TREE_CODE (etype) == POINTER_TYPE
3937 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
3938 return NULL_TREE;
3939 #endif
3941 if (! in_p)
3943 value = build_range_check (type, exp, 1, low, high);
3944 if (value != 0)
3945 return invert_truthvalue (value);
3947 return 0;
3950 if (low == 0 && high == 0)
3951 return fold_convert (type, integer_one_node);
3953 if (low == 0)
3954 return fold_build2 (LE_EXPR, type, exp,
3955 fold_convert (etype, high));
3957 if (high == 0)
3958 return fold_build2 (GE_EXPR, type, exp,
3959 fold_convert (etype, low));
3961 if (operand_equal_p (low, high, 0))
3962 return fold_build2 (EQ_EXPR, type, exp,
3963 fold_convert (etype, low));
3965 if (integer_zerop (low))
3967 if (! TYPE_UNSIGNED (etype))
3969 etype = lang_hooks.types.unsigned_type (etype);
3970 high = fold_convert (etype, high);
3971 exp = fold_convert (etype, exp);
3973 return build_range_check (type, exp, 1, 0, high);
3976 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3977 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3979 unsigned HOST_WIDE_INT lo;
3980 HOST_WIDE_INT hi;
3981 int prec;
3983 prec = TYPE_PRECISION (etype);
3984 if (prec <= HOST_BITS_PER_WIDE_INT)
3986 hi = 0;
3987 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3989 else
3991 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3992 lo = (unsigned HOST_WIDE_INT) -1;
3995 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3997 if (TYPE_UNSIGNED (etype))
3999 etype = lang_hooks.types.signed_type (etype);
4000 exp = fold_convert (etype, exp);
4002 return fold_build2 (GT_EXPR, type, exp,
4003 fold_convert (etype, integer_zero_node));
4007 value = const_binop (MINUS_EXPR, high, low, 0);
4008 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
4010 tree utype, minv, maxv;
4012 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4013 for the type in question, as we rely on this here. */
4014 switch (TREE_CODE (etype))
4016 case INTEGER_TYPE:
4017 case ENUMERAL_TYPE:
4018 case CHAR_TYPE:
4019 utype = lang_hooks.types.unsigned_type (etype);
4020 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4021 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4022 integer_one_node, 1);
4023 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4024 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4025 minv, 1, maxv, 1)))
4027 etype = utype;
4028 high = fold_convert (etype, high);
4029 low = fold_convert (etype, low);
4030 exp = fold_convert (etype, exp);
4031 value = const_binop (MINUS_EXPR, high, low, 0);
4033 break;
4034 default:
4035 break;
4039 if (value != 0 && ! TREE_OVERFLOW (value))
4040 return build_range_check (type,
4041 fold_build2 (MINUS_EXPR, etype, exp, low),
4042 1, fold_convert (etype, integer_zero_node),
4043 value);
4045 return 0;
4048 /* Given two ranges, see if we can merge them into one. Return 1 if we
4049 can, 0 if we can't. Set the output range into the specified parameters. */
4051 static int
4052 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4053 tree high0, int in1_p, tree low1, tree high1)
4055 int no_overlap;
4056 int subset;
4057 int temp;
4058 tree tem;
4059 int in_p;
4060 tree low, high;
4061 int lowequal = ((low0 == 0 && low1 == 0)
4062 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4063 low0, 0, low1, 0)));
4064 int highequal = ((high0 == 0 && high1 == 0)
4065 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4066 high0, 1, high1, 1)));
4068 /* Make range 0 be the range that starts first, or ends last if they
4069 start at the same value. Swap them if it isn't. */
4070 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4071 low0, 0, low1, 0))
4072 || (lowequal
4073 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4074 high1, 1, high0, 1))))
4076 temp = in0_p, in0_p = in1_p, in1_p = temp;
4077 tem = low0, low0 = low1, low1 = tem;
4078 tem = high0, high0 = high1, high1 = tem;
4081 /* Now flag two cases, whether the ranges are disjoint or whether the
4082 second range is totally subsumed in the first. Note that the tests
4083 below are simplified by the ones above. */
4084 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4085 high0, 1, low1, 0));
4086 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4087 high1, 1, high0, 1));
4089 /* We now have four cases, depending on whether we are including or
4090 excluding the two ranges. */
4091 if (in0_p && in1_p)
4093 /* If they don't overlap, the result is false. If the second range
4094 is a subset it is the result. Otherwise, the range is from the start
4095 of the second to the end of the first. */
4096 if (no_overlap)
4097 in_p = 0, low = high = 0;
4098 else if (subset)
4099 in_p = 1, low = low1, high = high1;
4100 else
4101 in_p = 1, low = low1, high = high0;
4104 else if (in0_p && ! in1_p)
4106 /* If they don't overlap, the result is the first range. If they are
4107 equal, the result is false. If the second range is a subset of the
4108 first, and the ranges begin at the same place, we go from just after
4109 the end of the first range to the end of the second. If the second
4110 range is not a subset of the first, or if it is a subset and both
4111 ranges end at the same place, the range starts at the start of the
4112 first range and ends just before the second range.
4113 Otherwise, we can't describe this as a single range. */
4114 if (no_overlap)
4115 in_p = 1, low = low0, high = high0;
4116 else if (lowequal && highequal)
4117 in_p = 0, low = high = 0;
4118 else if (subset && lowequal)
4120 in_p = 1, high = high0;
4121 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4122 integer_one_node, 0);
4124 else if (! subset || highequal)
4126 in_p = 1, low = low0;
4127 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4128 integer_one_node, 0);
4130 else
4131 return 0;
4134 else if (! in0_p && in1_p)
4136 /* If they don't overlap, the result is the second range. If the second
4137 is a subset of the first, the result is false. Otherwise,
4138 the range starts just after the first range and ends at the
4139 end of the second. */
4140 if (no_overlap)
4141 in_p = 1, low = low1, high = high1;
4142 else if (subset || highequal)
4143 in_p = 0, low = high = 0;
4144 else
4146 in_p = 1, high = high1;
4147 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4148 integer_one_node, 0);
4152 else
4154 /* The case where we are excluding both ranges. Here the complex case
4155 is if they don't overlap. In that case, the only time we have a
4156 range is if they are adjacent. If the second is a subset of the
4157 first, the result is the first. Otherwise, the range to exclude
4158 starts at the beginning of the first range and ends at the end of the
4159 second. */
4160 if (no_overlap)
4162 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4163 range_binop (PLUS_EXPR, NULL_TREE,
4164 high0, 1,
4165 integer_one_node, 1),
4166 1, low1, 0)))
4167 in_p = 0, low = low0, high = high1;
4168 else
4170 /* Canonicalize - [min, x] into - [-, x]. */
4171 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4172 switch (TREE_CODE (TREE_TYPE (low0)))
4174 case ENUMERAL_TYPE:
4175 if (TYPE_PRECISION (TREE_TYPE (low0))
4176 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4177 break;
4178 /* FALLTHROUGH */
4179 case INTEGER_TYPE:
4180 case CHAR_TYPE:
4181 if (tree_int_cst_equal (low0,
4182 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4183 low0 = 0;
4184 break;
4185 case POINTER_TYPE:
4186 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4187 && integer_zerop (low0))
4188 low0 = 0;
4189 break;
4190 default:
4191 break;
4194 /* Canonicalize - [x, max] into - [x, -]. */
4195 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4196 switch (TREE_CODE (TREE_TYPE (high1)))
4198 case ENUMERAL_TYPE:
4199 if (TYPE_PRECISION (TREE_TYPE (high1))
4200 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4201 break;
4202 /* FALLTHROUGH */
4203 case INTEGER_TYPE:
4204 case CHAR_TYPE:
4205 if (tree_int_cst_equal (high1,
4206 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4207 high1 = 0;
4208 break;
4209 case POINTER_TYPE:
4210 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4211 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4212 high1, 1,
4213 integer_one_node, 1)))
4214 high1 = 0;
4215 break;
4216 default:
4217 break;
4220 /* The ranges might be also adjacent between the maximum and
4221 minimum values of the given type. For
4222 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4223 return + [x + 1, y - 1]. */
4224 if (low0 == 0 && high1 == 0)
4226 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4227 integer_one_node, 1);
4228 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4229 integer_one_node, 0);
4230 if (low == 0 || high == 0)
4231 return 0;
4233 in_p = 1;
4235 else
4236 return 0;
4239 else if (subset)
4240 in_p = 0, low = low0, high = high0;
4241 else
4242 in_p = 0, low = low0, high = high1;
4245 *pin_p = in_p, *plow = low, *phigh = high;
4246 return 1;
4250 /* Subroutine of fold, looking inside expressions of the form
4251 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4252 of the COND_EXPR. This function is being used also to optimize
4253 A op B ? C : A, by reversing the comparison first.
4255 Return a folded expression whose code is not a COND_EXPR
4256 anymore, or NULL_TREE if no folding opportunity is found. */
4258 static tree
4259 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4261 enum tree_code comp_code = TREE_CODE (arg0);
4262 tree arg00 = TREE_OPERAND (arg0, 0);
4263 tree arg01 = TREE_OPERAND (arg0, 1);
4264 tree arg1_type = TREE_TYPE (arg1);
4265 tree tem;
4267 STRIP_NOPS (arg1);
4268 STRIP_NOPS (arg2);
4270 /* If we have A op 0 ? A : -A, consider applying the following
4271 transformations:
4273 A == 0? A : -A same as -A
4274 A != 0? A : -A same as A
4275 A >= 0? A : -A same as abs (A)
4276 A > 0? A : -A same as abs (A)
4277 A <= 0? A : -A same as -abs (A)
4278 A < 0? A : -A same as -abs (A)
4280 None of these transformations work for modes with signed
4281 zeros. If A is +/-0, the first two transformations will
4282 change the sign of the result (from +0 to -0, or vice
4283 versa). The last four will fix the sign of the result,
4284 even though the original expressions could be positive or
4285 negative, depending on the sign of A.
4287 Note that all these transformations are correct if A is
4288 NaN, since the two alternatives (A and -A) are also NaNs. */
4289 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4290 ? real_zerop (arg01)
4291 : integer_zerop (arg01))
4292 && ((TREE_CODE (arg2) == NEGATE_EXPR
4293 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4294 /* In the case that A is of the form X-Y, '-A' (arg2) may
4295 have already been folded to Y-X, check for that. */
4296 || (TREE_CODE (arg1) == MINUS_EXPR
4297 && TREE_CODE (arg2) == MINUS_EXPR
4298 && operand_equal_p (TREE_OPERAND (arg1, 0),
4299 TREE_OPERAND (arg2, 1), 0)
4300 && operand_equal_p (TREE_OPERAND (arg1, 1),
4301 TREE_OPERAND (arg2, 0), 0))))
4302 switch (comp_code)
4304 case EQ_EXPR:
4305 case UNEQ_EXPR:
4306 tem = fold_convert (arg1_type, arg1);
4307 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4308 case NE_EXPR:
4309 case LTGT_EXPR:
4310 return pedantic_non_lvalue (fold_convert (type, arg1));
4311 case UNGE_EXPR:
4312 case UNGT_EXPR:
4313 if (flag_trapping_math)
4314 break;
4315 /* Fall through. */
4316 case GE_EXPR:
4317 case GT_EXPR:
4318 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4319 arg1 = fold_convert (lang_hooks.types.signed_type
4320 (TREE_TYPE (arg1)), arg1);
4321 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4322 return pedantic_non_lvalue (fold_convert (type, tem));
4323 case UNLE_EXPR:
4324 case UNLT_EXPR:
4325 if (flag_trapping_math)
4326 break;
4327 case LE_EXPR:
4328 case LT_EXPR:
4329 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4330 arg1 = fold_convert (lang_hooks.types.signed_type
4331 (TREE_TYPE (arg1)), arg1);
4332 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4333 return negate_expr (fold_convert (type, tem));
4334 default:
4335 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4336 break;
4339 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4340 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4341 both transformations are correct when A is NaN: A != 0
4342 is then true, and A == 0 is false. */
4344 if (integer_zerop (arg01) && integer_zerop (arg2))
4346 if (comp_code == NE_EXPR)
4347 return pedantic_non_lvalue (fold_convert (type, arg1));
4348 else if (comp_code == EQ_EXPR)
4349 return fold_convert (type, integer_zero_node);
4352 /* Try some transformations of A op B ? A : B.
4354 A == B? A : B same as B
4355 A != B? A : B same as A
4356 A >= B? A : B same as max (A, B)
4357 A > B? A : B same as max (B, A)
4358 A <= B? A : B same as min (A, B)
4359 A < B? A : B same as min (B, A)
4361 As above, these transformations don't work in the presence
4362 of signed zeros. For example, if A and B are zeros of
4363 opposite sign, the first two transformations will change
4364 the sign of the result. In the last four, the original
4365 expressions give different results for (A=+0, B=-0) and
4366 (A=-0, B=+0), but the transformed expressions do not.
4368 The first two transformations are correct if either A or B
4369 is a NaN. In the first transformation, the condition will
4370 be false, and B will indeed be chosen. In the case of the
4371 second transformation, the condition A != B will be true,
4372 and A will be chosen.
4374 The conversions to max() and min() are not correct if B is
4375 a number and A is not. The conditions in the original
4376 expressions will be false, so all four give B. The min()
4377 and max() versions would give a NaN instead. */
4378 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4379 /* Avoid these transformations if the COND_EXPR may be used
4380 as an lvalue in the C++ front-end. PR c++/19199. */
4381 && (in_gimple_form
4382 || strcmp (lang_hooks.name, "GNU C++") != 0
4383 || ! maybe_lvalue_p (arg1)
4384 || ! maybe_lvalue_p (arg2)))
4386 tree comp_op0 = arg00;
4387 tree comp_op1 = arg01;
4388 tree comp_type = TREE_TYPE (comp_op0);
4390 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4391 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4393 comp_type = type;
4394 comp_op0 = arg1;
4395 comp_op1 = arg2;
4398 switch (comp_code)
4400 case EQ_EXPR:
4401 return pedantic_non_lvalue (fold_convert (type, arg2));
4402 case NE_EXPR:
4403 return pedantic_non_lvalue (fold_convert (type, arg1));
4404 case LE_EXPR:
4405 case LT_EXPR:
4406 case UNLE_EXPR:
4407 case UNLT_EXPR:
4408 /* In C++ a ?: expression can be an lvalue, so put the
4409 operand which will be used if they are equal first
4410 so that we can convert this back to the
4411 corresponding COND_EXPR. */
4412 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4414 comp_op0 = fold_convert (comp_type, comp_op0);
4415 comp_op1 = fold_convert (comp_type, comp_op1);
4416 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4417 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4418 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4419 return pedantic_non_lvalue (fold_convert (type, tem));
4421 break;
4422 case GE_EXPR:
4423 case GT_EXPR:
4424 case UNGE_EXPR:
4425 case UNGT_EXPR:
4426 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4428 comp_op0 = fold_convert (comp_type, comp_op0);
4429 comp_op1 = fold_convert (comp_type, comp_op1);
4430 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4431 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4432 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4433 return pedantic_non_lvalue (fold_convert (type, tem));
4435 break;
4436 case UNEQ_EXPR:
4437 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4438 return pedantic_non_lvalue (fold_convert (type, arg2));
4439 break;
4440 case LTGT_EXPR:
4441 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4442 return pedantic_non_lvalue (fold_convert (type, arg1));
4443 break;
4444 default:
4445 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4446 break;
4450 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4451 we might still be able to simplify this. For example,
4452 if C1 is one less or one more than C2, this might have started
4453 out as a MIN or MAX and been transformed by this function.
4454 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4456 if (INTEGRAL_TYPE_P (type)
4457 && TREE_CODE (arg01) == INTEGER_CST
4458 && TREE_CODE (arg2) == INTEGER_CST)
4459 switch (comp_code)
4461 case EQ_EXPR:
4462 /* We can replace A with C1 in this case. */
4463 arg1 = fold_convert (type, arg01);
4464 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4466 case LT_EXPR:
4467 /* If C1 is C2 + 1, this is min(A, C2). */
4468 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4469 OEP_ONLY_CONST)
4470 && operand_equal_p (arg01,
4471 const_binop (PLUS_EXPR, arg2,
4472 integer_one_node, 0),
4473 OEP_ONLY_CONST))
4474 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4475 type, arg1, arg2));
4476 break;
4478 case LE_EXPR:
4479 /* If C1 is C2 - 1, this is min(A, C2). */
4480 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4481 OEP_ONLY_CONST)
4482 && operand_equal_p (arg01,
4483 const_binop (MINUS_EXPR, arg2,
4484 integer_one_node, 0),
4485 OEP_ONLY_CONST))
4486 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4487 type, arg1, arg2));
4488 break;
4490 case GT_EXPR:
4491 /* If C1 is C2 - 1, this is max(A, C2). */
4492 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4493 OEP_ONLY_CONST)
4494 && operand_equal_p (arg01,
4495 const_binop (MINUS_EXPR, arg2,
4496 integer_one_node, 0),
4497 OEP_ONLY_CONST))
4498 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4499 type, arg1, arg2));
4500 break;
4502 case GE_EXPR:
4503 /* If C1 is C2 + 1, this is max(A, C2). */
4504 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4505 OEP_ONLY_CONST)
4506 && operand_equal_p (arg01,
4507 const_binop (PLUS_EXPR, arg2,
4508 integer_one_node, 0),
4509 OEP_ONLY_CONST))
4510 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4511 type, arg1, arg2));
4512 break;
4513 case NE_EXPR:
4514 break;
4515 default:
4516 gcc_unreachable ();
4519 return NULL_TREE;
4524 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4525 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4526 #endif
4528 /* EXP is some logical combination of boolean tests. See if we can
4529 merge it into some range test. Return the new tree if so. */
4531 static tree
4532 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4534 int or_op = (code == TRUTH_ORIF_EXPR
4535 || code == TRUTH_OR_EXPR);
4536 int in0_p, in1_p, in_p;
4537 tree low0, low1, low, high0, high1, high;
4538 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4539 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4540 tree tem;
4542 /* If this is an OR operation, invert both sides; we will invert
4543 again at the end. */
4544 if (or_op)
4545 in0_p = ! in0_p, in1_p = ! in1_p;
4547 /* If both expressions are the same, if we can merge the ranges, and we
4548 can build the range test, return it or it inverted. If one of the
4549 ranges is always true or always false, consider it to be the same
4550 expression as the other. */
4551 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4552 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4553 in1_p, low1, high1)
4554 && 0 != (tem = (build_range_check (type,
4555 lhs != 0 ? lhs
4556 : rhs != 0 ? rhs : integer_zero_node,
4557 in_p, low, high))))
4558 return or_op ? invert_truthvalue (tem) : tem;
4560 /* On machines where the branch cost is expensive, if this is a
4561 short-circuited branch and the underlying object on both sides
4562 is the same, make a non-short-circuit operation. */
4563 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4564 && lhs != 0 && rhs != 0
4565 && (code == TRUTH_ANDIF_EXPR
4566 || code == TRUTH_ORIF_EXPR)
4567 && operand_equal_p (lhs, rhs, 0))
4569 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4570 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4571 which cases we can't do this. */
4572 if (simple_operand_p (lhs))
4573 return build2 (code == TRUTH_ANDIF_EXPR
4574 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4575 type, op0, op1);
4577 else if (lang_hooks.decls.global_bindings_p () == 0
4578 && ! CONTAINS_PLACEHOLDER_P (lhs))
4580 tree common = save_expr (lhs);
4582 if (0 != (lhs = build_range_check (type, common,
4583 or_op ? ! in0_p : in0_p,
4584 low0, high0))
4585 && (0 != (rhs = build_range_check (type, common,
4586 or_op ? ! in1_p : in1_p,
4587 low1, high1))))
4588 return build2 (code == TRUTH_ANDIF_EXPR
4589 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4590 type, lhs, rhs);
4594 return 0;
4597 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4598 bit value. Arrange things so the extra bits will be set to zero if and
4599 only if C is signed-extended to its full width. If MASK is nonzero,
4600 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4602 static tree
4603 unextend (tree c, int p, int unsignedp, tree mask)
4605 tree type = TREE_TYPE (c);
4606 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4607 tree temp;
4609 if (p == modesize || unsignedp)
4610 return c;
4612 /* We work by getting just the sign bit into the low-order bit, then
4613 into the high-order bit, then sign-extend. We then XOR that value
4614 with C. */
4615 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4616 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4618 /* We must use a signed type in order to get an arithmetic right shift.
4619 However, we must also avoid introducing accidental overflows, so that
4620 a subsequent call to integer_zerop will work. Hence we must
4621 do the type conversion here. At this point, the constant is either
4622 zero or one, and the conversion to a signed type can never overflow.
4623 We could get an overflow if this conversion is done anywhere else. */
4624 if (TYPE_UNSIGNED (type))
4625 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4627 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4628 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4629 if (mask != 0)
4630 temp = const_binop (BIT_AND_EXPR, temp,
4631 fold_convert (TREE_TYPE (c), mask), 0);
4632 /* If necessary, convert the type back to match the type of C. */
4633 if (TYPE_UNSIGNED (type))
4634 temp = fold_convert (type, temp);
4636 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4639 /* Find ways of folding logical expressions of LHS and RHS:
4640 Try to merge two comparisons to the same innermost item.
4641 Look for range tests like "ch >= '0' && ch <= '9'".
4642 Look for combinations of simple terms on machines with expensive branches
4643 and evaluate the RHS unconditionally.
4645 For example, if we have p->a == 2 && p->b == 4 and we can make an
4646 object large enough to span both A and B, we can do this with a comparison
4647 against the object ANDed with the a mask.
4649 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4650 operations to do this with one comparison.
4652 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4653 function and the one above.
4655 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4656 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4658 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4659 two operands.
4661 We return the simplified tree or 0 if no optimization is possible. */
4663 static tree
4664 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4666 /* If this is the "or" of two comparisons, we can do something if
4667 the comparisons are NE_EXPR. If this is the "and", we can do something
4668 if the comparisons are EQ_EXPR. I.e.,
4669 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4671 WANTED_CODE is this operation code. For single bit fields, we can
4672 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4673 comparison for one-bit fields. */
4675 enum tree_code wanted_code;
4676 enum tree_code lcode, rcode;
4677 tree ll_arg, lr_arg, rl_arg, rr_arg;
4678 tree ll_inner, lr_inner, rl_inner, rr_inner;
4679 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4680 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4681 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4682 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4683 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4684 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4685 enum machine_mode lnmode, rnmode;
4686 tree ll_mask, lr_mask, rl_mask, rr_mask;
4687 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4688 tree l_const, r_const;
4689 tree lntype, rntype, result;
4690 int first_bit, end_bit;
4691 int volatilep;
4693 /* Start by getting the comparison codes. Fail if anything is volatile.
4694 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4695 it were surrounded with a NE_EXPR. */
4697 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4698 return 0;
4700 lcode = TREE_CODE (lhs);
4701 rcode = TREE_CODE (rhs);
4703 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4705 lhs = build2 (NE_EXPR, truth_type, lhs,
4706 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4707 lcode = NE_EXPR;
4710 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4712 rhs = build2 (NE_EXPR, truth_type, rhs,
4713 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4714 rcode = NE_EXPR;
4717 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4718 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4719 return 0;
4721 ll_arg = TREE_OPERAND (lhs, 0);
4722 lr_arg = TREE_OPERAND (lhs, 1);
4723 rl_arg = TREE_OPERAND (rhs, 0);
4724 rr_arg = TREE_OPERAND (rhs, 1);
4726 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4727 if (simple_operand_p (ll_arg)
4728 && simple_operand_p (lr_arg))
4730 tree result;
4731 if (operand_equal_p (ll_arg, rl_arg, 0)
4732 && operand_equal_p (lr_arg, rr_arg, 0))
4734 result = combine_comparisons (code, lcode, rcode,
4735 truth_type, ll_arg, lr_arg);
4736 if (result)
4737 return result;
4739 else if (operand_equal_p (ll_arg, rr_arg, 0)
4740 && operand_equal_p (lr_arg, rl_arg, 0))
4742 result = combine_comparisons (code, lcode,
4743 swap_tree_comparison (rcode),
4744 truth_type, ll_arg, lr_arg);
4745 if (result)
4746 return result;
4750 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4751 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4753 /* If the RHS can be evaluated unconditionally and its operands are
4754 simple, it wins to evaluate the RHS unconditionally on machines
4755 with expensive branches. In this case, this isn't a comparison
4756 that can be merged. Avoid doing this if the RHS is a floating-point
4757 comparison since those can trap. */
4759 if (BRANCH_COST >= 2
4760 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4761 && simple_operand_p (rl_arg)
4762 && simple_operand_p (rr_arg))
4764 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4765 if (code == TRUTH_OR_EXPR
4766 && lcode == NE_EXPR && integer_zerop (lr_arg)
4767 && rcode == NE_EXPR && integer_zerop (rr_arg)
4768 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4769 return build2 (NE_EXPR, truth_type,
4770 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4771 ll_arg, rl_arg),
4772 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4774 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4775 if (code == TRUTH_AND_EXPR
4776 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4777 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4778 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4779 return build2 (EQ_EXPR, truth_type,
4780 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4781 ll_arg, rl_arg),
4782 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4784 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4785 return build2 (code, truth_type, lhs, rhs);
4788 /* See if the comparisons can be merged. Then get all the parameters for
4789 each side. */
4791 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4792 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4793 return 0;
4795 volatilep = 0;
4796 ll_inner = decode_field_reference (ll_arg,
4797 &ll_bitsize, &ll_bitpos, &ll_mode,
4798 &ll_unsignedp, &volatilep, &ll_mask,
4799 &ll_and_mask);
4800 lr_inner = decode_field_reference (lr_arg,
4801 &lr_bitsize, &lr_bitpos, &lr_mode,
4802 &lr_unsignedp, &volatilep, &lr_mask,
4803 &lr_and_mask);
4804 rl_inner = decode_field_reference (rl_arg,
4805 &rl_bitsize, &rl_bitpos, &rl_mode,
4806 &rl_unsignedp, &volatilep, &rl_mask,
4807 &rl_and_mask);
4808 rr_inner = decode_field_reference (rr_arg,
4809 &rr_bitsize, &rr_bitpos, &rr_mode,
4810 &rr_unsignedp, &volatilep, &rr_mask,
4811 &rr_and_mask);
4813 /* It must be true that the inner operation on the lhs of each
4814 comparison must be the same if we are to be able to do anything.
4815 Then see if we have constants. If not, the same must be true for
4816 the rhs's. */
4817 if (volatilep || ll_inner == 0 || rl_inner == 0
4818 || ! operand_equal_p (ll_inner, rl_inner, 0))
4819 return 0;
4821 if (TREE_CODE (lr_arg) == INTEGER_CST
4822 && TREE_CODE (rr_arg) == INTEGER_CST)
4823 l_const = lr_arg, r_const = rr_arg;
4824 else if (lr_inner == 0 || rr_inner == 0
4825 || ! operand_equal_p (lr_inner, rr_inner, 0))
4826 return 0;
4827 else
4828 l_const = r_const = 0;
4830 /* If either comparison code is not correct for our logical operation,
4831 fail. However, we can convert a one-bit comparison against zero into
4832 the opposite comparison against that bit being set in the field. */
4834 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4835 if (lcode != wanted_code)
4837 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4839 /* Make the left operand unsigned, since we are only interested
4840 in the value of one bit. Otherwise we are doing the wrong
4841 thing below. */
4842 ll_unsignedp = 1;
4843 l_const = ll_mask;
4845 else
4846 return 0;
4849 /* This is analogous to the code for l_const above. */
4850 if (rcode != wanted_code)
4852 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4854 rl_unsignedp = 1;
4855 r_const = rl_mask;
4857 else
4858 return 0;
4861 /* After this point all optimizations will generate bit-field
4862 references, which we might not want. */
4863 if (! lang_hooks.can_use_bit_fields_p ())
4864 return 0;
4866 /* See if we can find a mode that contains both fields being compared on
4867 the left. If we can't, fail. Otherwise, update all constants and masks
4868 to be relative to a field of that size. */
4869 first_bit = MIN (ll_bitpos, rl_bitpos);
4870 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4871 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4872 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4873 volatilep);
4874 if (lnmode == VOIDmode)
4875 return 0;
4877 lnbitsize = GET_MODE_BITSIZE (lnmode);
4878 lnbitpos = first_bit & ~ (lnbitsize - 1);
4879 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4880 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4882 if (BYTES_BIG_ENDIAN)
4884 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4885 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4888 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4889 size_int (xll_bitpos), 0);
4890 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4891 size_int (xrl_bitpos), 0);
4893 if (l_const)
4895 l_const = fold_convert (lntype, l_const);
4896 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4897 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4898 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4899 fold_build1 (BIT_NOT_EXPR,
4900 lntype, ll_mask),
4901 0)))
4903 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4905 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4908 if (r_const)
4910 r_const = fold_convert (lntype, r_const);
4911 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4912 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4913 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4914 fold_build1 (BIT_NOT_EXPR,
4915 lntype, rl_mask),
4916 0)))
4918 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4920 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4924 /* If the right sides are not constant, do the same for it. Also,
4925 disallow this optimization if a size or signedness mismatch occurs
4926 between the left and right sides. */
4927 if (l_const == 0)
4929 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4930 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4931 /* Make sure the two fields on the right
4932 correspond to the left without being swapped. */
4933 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4934 return 0;
4936 first_bit = MIN (lr_bitpos, rr_bitpos);
4937 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4938 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4939 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4940 volatilep);
4941 if (rnmode == VOIDmode)
4942 return 0;
4944 rnbitsize = GET_MODE_BITSIZE (rnmode);
4945 rnbitpos = first_bit & ~ (rnbitsize - 1);
4946 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4947 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4949 if (BYTES_BIG_ENDIAN)
4951 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4952 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4955 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4956 size_int (xlr_bitpos), 0);
4957 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4958 size_int (xrr_bitpos), 0);
4960 /* Make a mask that corresponds to both fields being compared.
4961 Do this for both items being compared. If the operands are the
4962 same size and the bits being compared are in the same position
4963 then we can do this by masking both and comparing the masked
4964 results. */
4965 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4966 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4967 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4969 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4970 ll_unsignedp || rl_unsignedp);
4971 if (! all_ones_mask_p (ll_mask, lnbitsize))
4972 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4974 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4975 lr_unsignedp || rr_unsignedp);
4976 if (! all_ones_mask_p (lr_mask, rnbitsize))
4977 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4979 return build2 (wanted_code, truth_type, lhs, rhs);
4982 /* There is still another way we can do something: If both pairs of
4983 fields being compared are adjacent, we may be able to make a wider
4984 field containing them both.
4986 Note that we still must mask the lhs/rhs expressions. Furthermore,
4987 the mask must be shifted to account for the shift done by
4988 make_bit_field_ref. */
4989 if ((ll_bitsize + ll_bitpos == rl_bitpos
4990 && lr_bitsize + lr_bitpos == rr_bitpos)
4991 || (ll_bitpos == rl_bitpos + rl_bitsize
4992 && lr_bitpos == rr_bitpos + rr_bitsize))
4994 tree type;
4996 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4997 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4998 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4999 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5001 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5002 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5003 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5004 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5006 /* Convert to the smaller type before masking out unwanted bits. */
5007 type = lntype;
5008 if (lntype != rntype)
5010 if (lnbitsize > rnbitsize)
5012 lhs = fold_convert (rntype, lhs);
5013 ll_mask = fold_convert (rntype, ll_mask);
5014 type = rntype;
5016 else if (lnbitsize < rnbitsize)
5018 rhs = fold_convert (lntype, rhs);
5019 lr_mask = fold_convert (lntype, lr_mask);
5020 type = lntype;
5024 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5025 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5027 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5028 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5030 return build2 (wanted_code, truth_type, lhs, rhs);
5033 return 0;
5036 /* Handle the case of comparisons with constants. If there is something in
5037 common between the masks, those bits of the constants must be the same.
5038 If not, the condition is always false. Test for this to avoid generating
5039 incorrect code below. */
5040 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5041 if (! integer_zerop (result)
5042 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5043 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5045 if (wanted_code == NE_EXPR)
5047 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5048 return constant_boolean_node (true, truth_type);
5050 else
5052 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5053 return constant_boolean_node (false, truth_type);
5057 /* Construct the expression we will return. First get the component
5058 reference we will make. Unless the mask is all ones the width of
5059 that field, perform the mask operation. Then compare with the
5060 merged constant. */
5061 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5062 ll_unsignedp || rl_unsignedp);
5064 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5065 if (! all_ones_mask_p (ll_mask, lnbitsize))
5066 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5068 return build2 (wanted_code, truth_type, result,
5069 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5072 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5073 constant. */
5075 static tree
5076 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5078 tree arg0 = op0;
5079 enum tree_code op_code;
5080 tree comp_const = op1;
5081 tree minmax_const;
5082 int consts_equal, consts_lt;
5083 tree inner;
5085 STRIP_SIGN_NOPS (arg0);
5087 op_code = TREE_CODE (arg0);
5088 minmax_const = TREE_OPERAND (arg0, 1);
5089 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5090 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5091 inner = TREE_OPERAND (arg0, 0);
5093 /* If something does not permit us to optimize, return the original tree. */
5094 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5095 || TREE_CODE (comp_const) != INTEGER_CST
5096 || TREE_CONSTANT_OVERFLOW (comp_const)
5097 || TREE_CODE (minmax_const) != INTEGER_CST
5098 || TREE_CONSTANT_OVERFLOW (minmax_const))
5099 return NULL_TREE;
5101 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5102 and GT_EXPR, doing the rest with recursive calls using logical
5103 simplifications. */
5104 switch (code)
5106 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5108 /* FIXME: We should be able to invert code without building a
5109 scratch tree node, but doing so would require us to
5110 duplicate a part of invert_truthvalue here. */
5111 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5112 tem = optimize_minmax_comparison (TREE_CODE (tem),
5113 TREE_TYPE (tem),
5114 TREE_OPERAND (tem, 0),
5115 TREE_OPERAND (tem, 1));
5116 return invert_truthvalue (tem);
5119 case GE_EXPR:
5120 return
5121 fold_build2 (TRUTH_ORIF_EXPR, type,
5122 optimize_minmax_comparison
5123 (EQ_EXPR, type, arg0, comp_const),
5124 optimize_minmax_comparison
5125 (GT_EXPR, type, arg0, comp_const));
5127 case EQ_EXPR:
5128 if (op_code == MAX_EXPR && consts_equal)
5129 /* MAX (X, 0) == 0 -> X <= 0 */
5130 return fold_build2 (LE_EXPR, type, inner, comp_const);
5132 else if (op_code == MAX_EXPR && consts_lt)
5133 /* MAX (X, 0) == 5 -> X == 5 */
5134 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5136 else if (op_code == MAX_EXPR)
5137 /* MAX (X, 0) == -1 -> false */
5138 return omit_one_operand (type, integer_zero_node, inner);
5140 else if (consts_equal)
5141 /* MIN (X, 0) == 0 -> X >= 0 */
5142 return fold_build2 (GE_EXPR, type, inner, comp_const);
5144 else if (consts_lt)
5145 /* MIN (X, 0) == 5 -> false */
5146 return omit_one_operand (type, integer_zero_node, inner);
5148 else
5149 /* MIN (X, 0) == -1 -> X == -1 */
5150 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5152 case GT_EXPR:
5153 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5154 /* MAX (X, 0) > 0 -> X > 0
5155 MAX (X, 0) > 5 -> X > 5 */
5156 return fold_build2 (GT_EXPR, type, inner, comp_const);
5158 else if (op_code == MAX_EXPR)
5159 /* MAX (X, 0) > -1 -> true */
5160 return omit_one_operand (type, integer_one_node, inner);
5162 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5163 /* MIN (X, 0) > 0 -> false
5164 MIN (X, 0) > 5 -> false */
5165 return omit_one_operand (type, integer_zero_node, inner);
5167 else
5168 /* MIN (X, 0) > -1 -> X > -1 */
5169 return fold_build2 (GT_EXPR, type, inner, comp_const);
5171 default:
5172 return NULL_TREE;
5176 /* T is an integer expression that is being multiplied, divided, or taken a
5177 modulus (CODE says which and what kind of divide or modulus) by a
5178 constant C. See if we can eliminate that operation by folding it with
5179 other operations already in T. WIDE_TYPE, if non-null, is a type that
5180 should be used for the computation if wider than our type.
5182 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5183 (X * 2) + (Y * 4). We must, however, be assured that either the original
5184 expression would not overflow or that overflow is undefined for the type
5185 in the language in question.
5187 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5188 the machine has a multiply-accumulate insn or that this is part of an
5189 addressing calculation.
5191 If we return a non-null expression, it is an equivalent form of the
5192 original computation, but need not be in the original type. */
5194 static tree
5195 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5197 /* To avoid exponential search depth, refuse to allow recursion past
5198 three levels. Beyond that (1) it's highly unlikely that we'll find
5199 something interesting and (2) we've probably processed it before
5200 when we built the inner expression. */
5202 static int depth;
5203 tree ret;
5205 if (depth > 3)
5206 return NULL;
5208 depth++;
5209 ret = extract_muldiv_1 (t, c, code, wide_type);
5210 depth--;
5212 return ret;
5215 static tree
5216 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5218 tree type = TREE_TYPE (t);
5219 enum tree_code tcode = TREE_CODE (t);
5220 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5221 > GET_MODE_SIZE (TYPE_MODE (type)))
5222 ? wide_type : type);
5223 tree t1, t2;
5224 int same_p = tcode == code;
5225 tree op0 = NULL_TREE, op1 = NULL_TREE;
5227 /* Don't deal with constants of zero here; they confuse the code below. */
5228 if (integer_zerop (c))
5229 return NULL_TREE;
5231 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5232 op0 = TREE_OPERAND (t, 0);
5234 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5235 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5237 /* Note that we need not handle conditional operations here since fold
5238 already handles those cases. So just do arithmetic here. */
5239 switch (tcode)
5241 case INTEGER_CST:
5242 /* For a constant, we can always simplify if we are a multiply
5243 or (for divide and modulus) if it is a multiple of our constant. */
5244 if (code == MULT_EXPR
5245 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5246 return const_binop (code, fold_convert (ctype, t),
5247 fold_convert (ctype, c), 0);
5248 break;
5250 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5251 /* If op0 is an expression ... */
5252 if ((COMPARISON_CLASS_P (op0)
5253 || UNARY_CLASS_P (op0)
5254 || BINARY_CLASS_P (op0)
5255 || EXPRESSION_CLASS_P (op0))
5256 /* ... and is unsigned, and its type is smaller than ctype,
5257 then we cannot pass through as widening. */
5258 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5259 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5260 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5261 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5262 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5263 /* ... or this is a truncation (t is narrower than op0),
5264 then we cannot pass through this narrowing. */
5265 || (GET_MODE_SIZE (TYPE_MODE (type))
5266 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5267 /* ... or signedness changes for division or modulus,
5268 then we cannot pass through this conversion. */
5269 || (code != MULT_EXPR
5270 && (TYPE_UNSIGNED (ctype)
5271 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5272 break;
5274 /* Pass the constant down and see if we can make a simplification. If
5275 we can, replace this expression with the inner simplification for
5276 possible later conversion to our or some other type. */
5277 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5278 && TREE_CODE (t2) == INTEGER_CST
5279 && ! TREE_CONSTANT_OVERFLOW (t2)
5280 && (0 != (t1 = extract_muldiv (op0, t2, code,
5281 code == MULT_EXPR
5282 ? ctype : NULL_TREE))))
5283 return t1;
5284 break;
5286 case ABS_EXPR:
5287 /* If widening the type changes it from signed to unsigned, then we
5288 must avoid building ABS_EXPR itself as unsigned. */
5289 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5291 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5292 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5294 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5295 return fold_convert (ctype, t1);
5297 break;
5299 /* FALLTHROUGH */
5300 case NEGATE_EXPR:
5301 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5302 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5303 break;
5305 case MIN_EXPR: case MAX_EXPR:
5306 /* If widening the type changes the signedness, then we can't perform
5307 this optimization as that changes the result. */
5308 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5309 break;
5311 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5312 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5313 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5315 if (tree_int_cst_sgn (c) < 0)
5316 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5318 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5319 fold_convert (ctype, t2));
5321 break;
5323 case LSHIFT_EXPR: case RSHIFT_EXPR:
5324 /* If the second operand is constant, this is a multiplication
5325 or floor division, by a power of two, so we can treat it that
5326 way unless the multiplier or divisor overflows. Signed
5327 left-shift overflow is implementation-defined rather than
5328 undefined in C90, so do not convert signed left shift into
5329 multiplication. */
5330 if (TREE_CODE (op1) == INTEGER_CST
5331 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5332 /* const_binop may not detect overflow correctly,
5333 so check for it explicitly here. */
5334 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5335 && TREE_INT_CST_HIGH (op1) == 0
5336 && 0 != (t1 = fold_convert (ctype,
5337 const_binop (LSHIFT_EXPR,
5338 size_one_node,
5339 op1, 0)))
5340 && ! TREE_OVERFLOW (t1))
5341 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5342 ? MULT_EXPR : FLOOR_DIV_EXPR,
5343 ctype, fold_convert (ctype, op0), t1),
5344 c, code, wide_type);
5345 break;
5347 case PLUS_EXPR: case MINUS_EXPR:
5348 /* See if we can eliminate the operation on both sides. If we can, we
5349 can return a new PLUS or MINUS. If we can't, the only remaining
5350 cases where we can do anything are if the second operand is a
5351 constant. */
5352 t1 = extract_muldiv (op0, c, code, wide_type);
5353 t2 = extract_muldiv (op1, c, code, wide_type);
5354 if (t1 != 0 && t2 != 0
5355 && (code == MULT_EXPR
5356 /* If not multiplication, we can only do this if both operands
5357 are divisible by c. */
5358 || (multiple_of_p (ctype, op0, c)
5359 && multiple_of_p (ctype, op1, c))))
5360 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5361 fold_convert (ctype, t2));
5363 /* If this was a subtraction, negate OP1 and set it to be an addition.
5364 This simplifies the logic below. */
5365 if (tcode == MINUS_EXPR)
5366 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5368 if (TREE_CODE (op1) != INTEGER_CST)
5369 break;
5371 /* If either OP1 or C are negative, this optimization is not safe for
5372 some of the division and remainder types while for others we need
5373 to change the code. */
5374 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5376 if (code == CEIL_DIV_EXPR)
5377 code = FLOOR_DIV_EXPR;
5378 else if (code == FLOOR_DIV_EXPR)
5379 code = CEIL_DIV_EXPR;
5380 else if (code != MULT_EXPR
5381 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5382 break;
5385 /* If it's a multiply or a division/modulus operation of a multiple
5386 of our constant, do the operation and verify it doesn't overflow. */
5387 if (code == MULT_EXPR
5388 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5390 op1 = const_binop (code, fold_convert (ctype, op1),
5391 fold_convert (ctype, c), 0);
5392 /* We allow the constant to overflow with wrapping semantics. */
5393 if (op1 == 0
5394 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5395 break;
5397 else
5398 break;
5400 /* If we have an unsigned type is not a sizetype, we cannot widen
5401 the operation since it will change the result if the original
5402 computation overflowed. */
5403 if (TYPE_UNSIGNED (ctype)
5404 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5405 && ctype != type)
5406 break;
5408 /* If we were able to eliminate our operation from the first side,
5409 apply our operation to the second side and reform the PLUS. */
5410 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5411 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5413 /* The last case is if we are a multiply. In that case, we can
5414 apply the distributive law to commute the multiply and addition
5415 if the multiplication of the constants doesn't overflow. */
5416 if (code == MULT_EXPR)
5417 return fold_build2 (tcode, ctype,
5418 fold_build2 (code, ctype,
5419 fold_convert (ctype, op0),
5420 fold_convert (ctype, c)),
5421 op1);
5423 break;
5425 case MULT_EXPR:
5426 /* We have a special case here if we are doing something like
5427 (C * 8) % 4 since we know that's zero. */
5428 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5429 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5430 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5431 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5432 return omit_one_operand (type, integer_zero_node, op0);
5434 /* ... fall through ... */
5436 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5437 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5438 /* If we can extract our operation from the LHS, do so and return a
5439 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5440 do something only if the second operand is a constant. */
5441 if (same_p
5442 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5443 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5444 fold_convert (ctype, op1));
5445 else if (tcode == MULT_EXPR && code == MULT_EXPR
5446 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5447 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5448 fold_convert (ctype, t1));
5449 else if (TREE_CODE (op1) != INTEGER_CST)
5450 return 0;
5452 /* If these are the same operation types, we can associate them
5453 assuming no overflow. */
5454 if (tcode == code
5455 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5456 fold_convert (ctype, c), 0))
5457 && ! TREE_OVERFLOW (t1))
5458 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5460 /* If these operations "cancel" each other, we have the main
5461 optimizations of this pass, which occur when either constant is a
5462 multiple of the other, in which case we replace this with either an
5463 operation or CODE or TCODE.
5465 If we have an unsigned type that is not a sizetype, we cannot do
5466 this since it will change the result if the original computation
5467 overflowed. */
5468 if ((! TYPE_UNSIGNED (ctype)
5469 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5470 && ! flag_wrapv
5471 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5472 || (tcode == MULT_EXPR
5473 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5474 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5476 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5477 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5478 fold_convert (ctype,
5479 const_binop (TRUNC_DIV_EXPR,
5480 op1, c, 0)));
5481 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5482 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5483 fold_convert (ctype,
5484 const_binop (TRUNC_DIV_EXPR,
5485 c, op1, 0)));
5487 break;
5489 default:
5490 break;
5493 return 0;
5496 /* Return a node which has the indicated constant VALUE (either 0 or
5497 1), and is of the indicated TYPE. */
5499 tree
5500 constant_boolean_node (int value, tree type)
5502 if (type == integer_type_node)
5503 return value ? integer_one_node : integer_zero_node;
5504 else if (type == boolean_type_node)
5505 return value ? boolean_true_node : boolean_false_node;
5506 else
5507 return build_int_cst (type, value);
5511 /* Return true if expr looks like an ARRAY_REF and set base and
5512 offset to the appropriate trees. If there is no offset,
5513 offset is set to NULL_TREE. Base will be canonicalized to
5514 something you can get the element type from using
5515 TREE_TYPE (TREE_TYPE (base)). */
5517 static bool
5518 extract_array_ref (tree expr, tree *base, tree *offset)
5520 /* One canonical form is a PLUS_EXPR with the first
5521 argument being an ADDR_EXPR with a possible NOP_EXPR
5522 attached. */
5523 if (TREE_CODE (expr) == PLUS_EXPR)
5525 tree op0 = TREE_OPERAND (expr, 0);
5526 tree inner_base, dummy1;
5527 /* Strip NOP_EXPRs here because the C frontends and/or
5528 folders present us (int *)&x.a + 4B possibly. */
5529 STRIP_NOPS (op0);
5530 if (extract_array_ref (op0, &inner_base, &dummy1))
5532 *base = inner_base;
5533 if (dummy1 == NULL_TREE)
5534 *offset = TREE_OPERAND (expr, 1);
5535 else
5536 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5537 dummy1, TREE_OPERAND (expr, 1));
5538 return true;
5541 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5542 which we transform into an ADDR_EXPR with appropriate
5543 offset. For other arguments to the ADDR_EXPR we assume
5544 zero offset and as such do not care about the ADDR_EXPR
5545 type and strip possible nops from it. */
5546 else if (TREE_CODE (expr) == ADDR_EXPR)
5548 tree op0 = TREE_OPERAND (expr, 0);
5549 if (TREE_CODE (op0) == ARRAY_REF)
5551 *base = TREE_OPERAND (op0, 0);
5552 *offset = TREE_OPERAND (op0, 1);
5554 else
5556 /* Handle array-to-pointer decay as &a. */
5557 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5558 *base = TREE_OPERAND (expr, 0);
5559 else
5560 *base = expr;
5561 *offset = NULL_TREE;
5563 return true;
5565 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5566 else if (SSA_VAR_P (expr)
5567 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5569 *base = expr;
5570 *offset = NULL_TREE;
5571 return true;
5574 return false;
5578 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5579 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5580 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5581 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5582 COND is the first argument to CODE; otherwise (as in the example
5583 given here), it is the second argument. TYPE is the type of the
5584 original expression. Return NULL_TREE if no simplification is
5585 possible. */
5587 static tree
5588 fold_binary_op_with_conditional_arg (enum tree_code code,
5589 tree type, tree op0, tree op1,
5590 tree cond, tree arg, int cond_first_p)
5592 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5593 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5594 tree test, true_value, false_value;
5595 tree lhs = NULL_TREE;
5596 tree rhs = NULL_TREE;
5598 /* This transformation is only worthwhile if we don't have to wrap
5599 arg in a SAVE_EXPR, and the operation can be simplified on at least
5600 one of the branches once its pushed inside the COND_EXPR. */
5601 if (!TREE_CONSTANT (arg))
5602 return NULL_TREE;
5604 if (TREE_CODE (cond) == COND_EXPR)
5606 test = TREE_OPERAND (cond, 0);
5607 true_value = TREE_OPERAND (cond, 1);
5608 false_value = TREE_OPERAND (cond, 2);
5609 /* If this operand throws an expression, then it does not make
5610 sense to try to perform a logical or arithmetic operation
5611 involving it. */
5612 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5613 lhs = true_value;
5614 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5615 rhs = false_value;
5617 else
5619 tree testtype = TREE_TYPE (cond);
5620 test = cond;
5621 true_value = constant_boolean_node (true, testtype);
5622 false_value = constant_boolean_node (false, testtype);
5625 arg = fold_convert (arg_type, arg);
5626 if (lhs == 0)
5628 true_value = fold_convert (cond_type, true_value);
5629 if (cond_first_p)
5630 lhs = fold_build2 (code, type, true_value, arg);
5631 else
5632 lhs = fold_build2 (code, type, arg, true_value);
5634 if (rhs == 0)
5636 false_value = fold_convert (cond_type, false_value);
5637 if (cond_first_p)
5638 rhs = fold_build2 (code, type, false_value, arg);
5639 else
5640 rhs = fold_build2 (code, type, arg, false_value);
5643 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5644 return fold_convert (type, test);
5648 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5650 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5651 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5652 ADDEND is the same as X.
5654 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5655 and finite. The problematic cases are when X is zero, and its mode
5656 has signed zeros. In the case of rounding towards -infinity,
5657 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5658 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5660 static bool
5661 fold_real_zero_addition_p (tree type, tree addend, int negate)
5663 if (!real_zerop (addend))
5664 return false;
5666 /* Don't allow the fold with -fsignaling-nans. */
5667 if (HONOR_SNANS (TYPE_MODE (type)))
5668 return false;
5670 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5671 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5672 return true;
5674 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5675 if (TREE_CODE (addend) == REAL_CST
5676 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5677 negate = !negate;
5679 /* The mode has signed zeros, and we have to honor their sign.
5680 In this situation, there is only one case we can return true for.
5681 X - 0 is the same as X unless rounding towards -infinity is
5682 supported. */
5683 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5686 /* Subroutine of fold() that checks comparisons of built-in math
5687 functions against real constants.
5689 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5690 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5691 is the type of the result and ARG0 and ARG1 are the operands of the
5692 comparison. ARG1 must be a TREE_REAL_CST.
5694 The function returns the constant folded tree if a simplification
5695 can be made, and NULL_TREE otherwise. */
5697 static tree
5698 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5699 tree type, tree arg0, tree arg1)
5701 REAL_VALUE_TYPE c;
5703 if (BUILTIN_SQRT_P (fcode))
5705 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5706 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5708 c = TREE_REAL_CST (arg1);
5709 if (REAL_VALUE_NEGATIVE (c))
5711 /* sqrt(x) < y is always false, if y is negative. */
5712 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5713 return omit_one_operand (type, integer_zero_node, arg);
5715 /* sqrt(x) > y is always true, if y is negative and we
5716 don't care about NaNs, i.e. negative values of x. */
5717 if (code == NE_EXPR || !HONOR_NANS (mode))
5718 return omit_one_operand (type, integer_one_node, arg);
5720 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5721 return fold_build2 (GE_EXPR, type, arg,
5722 build_real (TREE_TYPE (arg), dconst0));
5724 else if (code == GT_EXPR || code == GE_EXPR)
5726 REAL_VALUE_TYPE c2;
5728 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5729 real_convert (&c2, mode, &c2);
5731 if (REAL_VALUE_ISINF (c2))
5733 /* sqrt(x) > y is x == +Inf, when y is very large. */
5734 if (HONOR_INFINITIES (mode))
5735 return fold_build2 (EQ_EXPR, type, arg,
5736 build_real (TREE_TYPE (arg), c2));
5738 /* sqrt(x) > y is always false, when y is very large
5739 and we don't care about infinities. */
5740 return omit_one_operand (type, integer_zero_node, arg);
5743 /* sqrt(x) > c is the same as x > c*c. */
5744 return fold_build2 (code, type, arg,
5745 build_real (TREE_TYPE (arg), c2));
5747 else if (code == LT_EXPR || code == LE_EXPR)
5749 REAL_VALUE_TYPE c2;
5751 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5752 real_convert (&c2, mode, &c2);
5754 if (REAL_VALUE_ISINF (c2))
5756 /* sqrt(x) < y is always true, when y is a very large
5757 value and we don't care about NaNs or Infinities. */
5758 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5759 return omit_one_operand (type, integer_one_node, arg);
5761 /* sqrt(x) < y is x != +Inf when y is very large and we
5762 don't care about NaNs. */
5763 if (! HONOR_NANS (mode))
5764 return fold_build2 (NE_EXPR, type, arg,
5765 build_real (TREE_TYPE (arg), c2));
5767 /* sqrt(x) < y is x >= 0 when y is very large and we
5768 don't care about Infinities. */
5769 if (! HONOR_INFINITIES (mode))
5770 return fold_build2 (GE_EXPR, type, arg,
5771 build_real (TREE_TYPE (arg), dconst0));
5773 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5774 if (lang_hooks.decls.global_bindings_p () != 0
5775 || CONTAINS_PLACEHOLDER_P (arg))
5776 return NULL_TREE;
5778 arg = save_expr (arg);
5779 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5780 fold_build2 (GE_EXPR, type, arg,
5781 build_real (TREE_TYPE (arg),
5782 dconst0)),
5783 fold_build2 (NE_EXPR, type, arg,
5784 build_real (TREE_TYPE (arg),
5785 c2)));
5788 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5789 if (! HONOR_NANS (mode))
5790 return fold_build2 (code, type, arg,
5791 build_real (TREE_TYPE (arg), c2));
5793 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5794 if (lang_hooks.decls.global_bindings_p () == 0
5795 && ! CONTAINS_PLACEHOLDER_P (arg))
5797 arg = save_expr (arg);
5798 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5799 fold_build2 (GE_EXPR, type, arg,
5800 build_real (TREE_TYPE (arg),
5801 dconst0)),
5802 fold_build2 (code, type, arg,
5803 build_real (TREE_TYPE (arg),
5804 c2)));
5809 return NULL_TREE;
5812 /* Subroutine of fold() that optimizes comparisons against Infinities,
5813 either +Inf or -Inf.
5815 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5816 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5817 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5819 The function returns the constant folded tree if a simplification
5820 can be made, and NULL_TREE otherwise. */
5822 static tree
5823 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5825 enum machine_mode mode;
5826 REAL_VALUE_TYPE max;
5827 tree temp;
5828 bool neg;
5830 mode = TYPE_MODE (TREE_TYPE (arg0));
5832 /* For negative infinity swap the sense of the comparison. */
5833 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5834 if (neg)
5835 code = swap_tree_comparison (code);
5837 switch (code)
5839 case GT_EXPR:
5840 /* x > +Inf is always false, if with ignore sNANs. */
5841 if (HONOR_SNANS (mode))
5842 return NULL_TREE;
5843 return omit_one_operand (type, integer_zero_node, arg0);
5845 case LE_EXPR:
5846 /* x <= +Inf is always true, if we don't case about NaNs. */
5847 if (! HONOR_NANS (mode))
5848 return omit_one_operand (type, integer_one_node, arg0);
5850 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5851 if (lang_hooks.decls.global_bindings_p () == 0
5852 && ! CONTAINS_PLACEHOLDER_P (arg0))
5854 arg0 = save_expr (arg0);
5855 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5857 break;
5859 case EQ_EXPR:
5860 case GE_EXPR:
5861 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5862 real_maxval (&max, neg, mode);
5863 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5864 arg0, build_real (TREE_TYPE (arg0), max));
5866 case LT_EXPR:
5867 /* x < +Inf is always equal to x <= DBL_MAX. */
5868 real_maxval (&max, neg, mode);
5869 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5870 arg0, build_real (TREE_TYPE (arg0), max));
5872 case NE_EXPR:
5873 /* x != +Inf is always equal to !(x > DBL_MAX). */
5874 real_maxval (&max, neg, mode);
5875 if (! HONOR_NANS (mode))
5876 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5877 arg0, build_real (TREE_TYPE (arg0), max));
5879 /* The transformation below creates non-gimple code and thus is
5880 not appropriate if we are in gimple form. */
5881 if (in_gimple_form)
5882 return NULL_TREE;
5884 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5885 arg0, build_real (TREE_TYPE (arg0), max));
5886 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5888 default:
5889 break;
5892 return NULL_TREE;
5895 /* Subroutine of fold() that optimizes comparisons of a division by
5896 a nonzero integer constant against an integer constant, i.e.
5897 X/C1 op C2.
5899 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5900 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5901 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5903 The function returns the constant folded tree if a simplification
5904 can be made, and NULL_TREE otherwise. */
5906 static tree
5907 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5909 tree prod, tmp, hi, lo;
5910 tree arg00 = TREE_OPERAND (arg0, 0);
5911 tree arg01 = TREE_OPERAND (arg0, 1);
5912 unsigned HOST_WIDE_INT lpart;
5913 HOST_WIDE_INT hpart;
5914 int overflow;
5916 /* We have to do this the hard way to detect unsigned overflow.
5917 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5918 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5919 TREE_INT_CST_HIGH (arg01),
5920 TREE_INT_CST_LOW (arg1),
5921 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5922 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5923 prod = force_fit_type (prod, -1, overflow, false);
5925 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5927 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5928 lo = prod;
5930 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5931 overflow = add_double (TREE_INT_CST_LOW (prod),
5932 TREE_INT_CST_HIGH (prod),
5933 TREE_INT_CST_LOW (tmp),
5934 TREE_INT_CST_HIGH (tmp),
5935 &lpart, &hpart);
5936 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5937 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5938 TREE_CONSTANT_OVERFLOW (prod));
5940 else if (tree_int_cst_sgn (arg01) >= 0)
5942 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5943 switch (tree_int_cst_sgn (arg1))
5945 case -1:
5946 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5947 hi = prod;
5948 break;
5950 case 0:
5951 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5952 hi = tmp;
5953 break;
5955 case 1:
5956 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5957 lo = prod;
5958 break;
5960 default:
5961 gcc_unreachable ();
5964 else
5966 /* A negative divisor reverses the relational operators. */
5967 code = swap_tree_comparison (code);
5969 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5970 switch (tree_int_cst_sgn (arg1))
5972 case -1:
5973 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5974 lo = prod;
5975 break;
5977 case 0:
5978 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5979 lo = tmp;
5980 break;
5982 case 1:
5983 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5984 hi = prod;
5985 break;
5987 default:
5988 gcc_unreachable ();
5992 switch (code)
5994 case EQ_EXPR:
5995 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5996 return omit_one_operand (type, integer_zero_node, arg00);
5997 if (TREE_OVERFLOW (hi))
5998 return fold_build2 (GE_EXPR, type, arg00, lo);
5999 if (TREE_OVERFLOW (lo))
6000 return fold_build2 (LE_EXPR, type, arg00, hi);
6001 return build_range_check (type, arg00, 1, lo, hi);
6003 case NE_EXPR:
6004 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6005 return omit_one_operand (type, integer_one_node, arg00);
6006 if (TREE_OVERFLOW (hi))
6007 return fold_build2 (LT_EXPR, type, arg00, lo);
6008 if (TREE_OVERFLOW (lo))
6009 return fold_build2 (GT_EXPR, type, arg00, hi);
6010 return build_range_check (type, arg00, 0, lo, hi);
6012 case LT_EXPR:
6013 if (TREE_OVERFLOW (lo))
6014 return omit_one_operand (type, integer_zero_node, arg00);
6015 return fold_build2 (LT_EXPR, type, arg00, lo);
6017 case LE_EXPR:
6018 if (TREE_OVERFLOW (hi))
6019 return omit_one_operand (type, integer_one_node, arg00);
6020 return fold_build2 (LE_EXPR, type, arg00, hi);
6022 case GT_EXPR:
6023 if (TREE_OVERFLOW (hi))
6024 return omit_one_operand (type, integer_zero_node, arg00);
6025 return fold_build2 (GT_EXPR, type, arg00, hi);
6027 case GE_EXPR:
6028 if (TREE_OVERFLOW (lo))
6029 return omit_one_operand (type, integer_one_node, arg00);
6030 return fold_build2 (GE_EXPR, type, arg00, lo);
6032 default:
6033 break;
6036 return NULL_TREE;
6040 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6041 equality/inequality test, then return a simplified form of the test
6042 using a sign testing. Otherwise return NULL. TYPE is the desired
6043 result type. */
6045 static tree
6046 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6047 tree result_type)
6049 /* If this is testing a single bit, we can optimize the test. */
6050 if ((code == NE_EXPR || code == EQ_EXPR)
6051 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6052 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6054 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6055 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6056 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6058 if (arg00 != NULL_TREE
6059 /* This is only a win if casting to a signed type is cheap,
6060 i.e. when arg00's type is not a partial mode. */
6061 && TYPE_PRECISION (TREE_TYPE (arg00))
6062 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6064 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6065 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6066 result_type, fold_convert (stype, arg00),
6067 fold_convert (stype, integer_zero_node));
6071 return NULL_TREE;
6074 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6075 equality/inequality test, then return a simplified form of
6076 the test using shifts and logical operations. Otherwise return
6077 NULL. TYPE is the desired result type. */
6079 tree
6080 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6081 tree result_type)
6083 /* If this is testing a single bit, we can optimize the test. */
6084 if ((code == NE_EXPR || code == EQ_EXPR)
6085 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6086 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6088 tree inner = TREE_OPERAND (arg0, 0);
6089 tree type = TREE_TYPE (arg0);
6090 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6091 enum machine_mode operand_mode = TYPE_MODE (type);
6092 int ops_unsigned;
6093 tree signed_type, unsigned_type, intermediate_type;
6094 tree tem;
6096 /* First, see if we can fold the single bit test into a sign-bit
6097 test. */
6098 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6099 result_type);
6100 if (tem)
6101 return tem;
6103 /* Otherwise we have (A & C) != 0 where C is a single bit,
6104 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6105 Similarly for (A & C) == 0. */
6107 /* If INNER is a right shift of a constant and it plus BITNUM does
6108 not overflow, adjust BITNUM and INNER. */
6109 if (TREE_CODE (inner) == RSHIFT_EXPR
6110 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6111 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6112 && bitnum < TYPE_PRECISION (type)
6113 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6114 bitnum - TYPE_PRECISION (type)))
6116 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6117 inner = TREE_OPERAND (inner, 0);
6120 /* If we are going to be able to omit the AND below, we must do our
6121 operations as unsigned. If we must use the AND, we have a choice.
6122 Normally unsigned is faster, but for some machines signed is. */
6123 #ifdef LOAD_EXTEND_OP
6124 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6125 && !flag_syntax_only) ? 0 : 1;
6126 #else
6127 ops_unsigned = 1;
6128 #endif
6130 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6131 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6132 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6133 inner = fold_convert (intermediate_type, inner);
6135 if (bitnum != 0)
6136 inner = build2 (RSHIFT_EXPR, intermediate_type,
6137 inner, size_int (bitnum));
6139 if (code == EQ_EXPR)
6140 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6141 inner, integer_one_node);
6143 /* Put the AND last so it can combine with more things. */
6144 inner = build2 (BIT_AND_EXPR, intermediate_type,
6145 inner, integer_one_node);
6147 /* Make sure to return the proper type. */
6148 inner = fold_convert (result_type, inner);
6150 return inner;
6152 return NULL_TREE;
6155 /* Check whether we are allowed to reorder operands arg0 and arg1,
6156 such that the evaluation of arg1 occurs before arg0. */
6158 static bool
6159 reorder_operands_p (tree arg0, tree arg1)
6161 if (! flag_evaluation_order)
6162 return true;
6163 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6164 return true;
6165 return ! TREE_SIDE_EFFECTS (arg0)
6166 && ! TREE_SIDE_EFFECTS (arg1);
6169 /* Test whether it is preferable two swap two operands, ARG0 and
6170 ARG1, for example because ARG0 is an integer constant and ARG1
6171 isn't. If REORDER is true, only recommend swapping if we can
6172 evaluate the operands in reverse order. */
6174 bool
6175 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6177 STRIP_SIGN_NOPS (arg0);
6178 STRIP_SIGN_NOPS (arg1);
6180 if (TREE_CODE (arg1) == INTEGER_CST)
6181 return 0;
6182 if (TREE_CODE (arg0) == INTEGER_CST)
6183 return 1;
6185 if (TREE_CODE (arg1) == REAL_CST)
6186 return 0;
6187 if (TREE_CODE (arg0) == REAL_CST)
6188 return 1;
6190 if (TREE_CODE (arg1) == COMPLEX_CST)
6191 return 0;
6192 if (TREE_CODE (arg0) == COMPLEX_CST)
6193 return 1;
6195 if (TREE_CONSTANT (arg1))
6196 return 0;
6197 if (TREE_CONSTANT (arg0))
6198 return 1;
6200 if (optimize_size)
6201 return 0;
6203 if (reorder && flag_evaluation_order
6204 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6205 return 0;
6207 if (DECL_P (arg1))
6208 return 0;
6209 if (DECL_P (arg0))
6210 return 1;
6212 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6213 for commutative and comparison operators. Ensuring a canonical
6214 form allows the optimizers to find additional redundancies without
6215 having to explicitly check for both orderings. */
6216 if (TREE_CODE (arg0) == SSA_NAME
6217 && TREE_CODE (arg1) == SSA_NAME
6218 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6219 return 1;
6221 return 0;
6224 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6225 ARG0 is extended to a wider type. */
6227 static tree
6228 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6230 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6231 tree arg1_unw;
6232 tree shorter_type, outer_type;
6233 tree min, max;
6234 bool above, below;
6236 if (arg0_unw == arg0)
6237 return NULL_TREE;
6238 shorter_type = TREE_TYPE (arg0_unw);
6240 #ifdef HAVE_canonicalize_funcptr_for_compare
6241 /* Disable this optimization if we're casting a function pointer
6242 type on targets that require function pointer canonicalization. */
6243 if (HAVE_canonicalize_funcptr_for_compare
6244 && TREE_CODE (shorter_type) == POINTER_TYPE
6245 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6246 return NULL_TREE;
6247 #endif
6249 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6250 return NULL_TREE;
6252 arg1_unw = get_unwidened (arg1, shorter_type);
6253 if (!arg1_unw)
6254 return NULL_TREE;
6256 /* If possible, express the comparison in the shorter mode. */
6257 if ((code == EQ_EXPR || code == NE_EXPR
6258 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6259 && (TREE_TYPE (arg1_unw) == shorter_type
6260 || (TREE_CODE (arg1_unw) == INTEGER_CST
6261 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6262 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6263 && int_fits_type_p (arg1_unw, shorter_type))))
6264 return fold_build2 (code, type, arg0_unw,
6265 fold_convert (shorter_type, arg1_unw));
6267 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6268 return NULL_TREE;
6270 /* If we are comparing with the integer that does not fit into the range
6271 of the shorter type, the result is known. */
6272 outer_type = TREE_TYPE (arg1_unw);
6273 min = lower_bound_in_type (outer_type, shorter_type);
6274 max = upper_bound_in_type (outer_type, shorter_type);
6276 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6277 max, arg1_unw));
6278 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6279 arg1_unw, min));
6281 switch (code)
6283 case EQ_EXPR:
6284 if (above || below)
6285 return omit_one_operand (type, integer_zero_node, arg0);
6286 break;
6288 case NE_EXPR:
6289 if (above || below)
6290 return omit_one_operand (type, integer_one_node, arg0);
6291 break;
6293 case LT_EXPR:
6294 case LE_EXPR:
6295 if (above)
6296 return omit_one_operand (type, integer_one_node, arg0);
6297 else if (below)
6298 return omit_one_operand (type, integer_zero_node, arg0);
6300 case GT_EXPR:
6301 case GE_EXPR:
6302 if (above)
6303 return omit_one_operand (type, integer_zero_node, arg0);
6304 else if (below)
6305 return omit_one_operand (type, integer_one_node, arg0);
6307 default:
6308 break;
6311 return NULL_TREE;
6314 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6315 ARG0 just the signedness is changed. */
6317 static tree
6318 fold_sign_changed_comparison (enum tree_code code, tree type,
6319 tree arg0, tree arg1)
6321 tree arg0_inner, tmp;
6322 tree inner_type, outer_type;
6324 if (TREE_CODE (arg0) != NOP_EXPR
6325 && TREE_CODE (arg0) != CONVERT_EXPR)
6326 return NULL_TREE;
6328 outer_type = TREE_TYPE (arg0);
6329 arg0_inner = TREE_OPERAND (arg0, 0);
6330 inner_type = TREE_TYPE (arg0_inner);
6332 #ifdef HAVE_canonicalize_funcptr_for_compare
6333 /* Disable this optimization if we're casting a function pointer
6334 type on targets that require function pointer canonicalization. */
6335 if (HAVE_canonicalize_funcptr_for_compare
6336 && TREE_CODE (inner_type) == POINTER_TYPE
6337 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6338 return NULL_TREE;
6339 #endif
6341 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6342 return NULL_TREE;
6344 if (TREE_CODE (arg1) != INTEGER_CST
6345 && !((TREE_CODE (arg1) == NOP_EXPR
6346 || TREE_CODE (arg1) == CONVERT_EXPR)
6347 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6348 return NULL_TREE;
6350 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6351 && code != NE_EXPR
6352 && code != EQ_EXPR)
6353 return NULL_TREE;
6355 if (TREE_CODE (arg1) == INTEGER_CST)
6357 tmp = build_int_cst_wide (inner_type,
6358 TREE_INT_CST_LOW (arg1),
6359 TREE_INT_CST_HIGH (arg1));
6360 arg1 = force_fit_type (tmp, 0,
6361 TREE_OVERFLOW (arg1),
6362 TREE_CONSTANT_OVERFLOW (arg1));
6364 else
6365 arg1 = fold_convert (inner_type, arg1);
6367 return fold_build2 (code, type, arg0_inner, arg1);
6370 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6371 step of the array. Reconstructs s and delta in the case of s * delta
6372 being an integer constant (and thus already folded).
6373 ADDR is the address. MULT is the multiplicative expression.
6374 If the function succeeds, the new address expression is returned. Otherwise
6375 NULL_TREE is returned. */
6377 static tree
6378 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6380 tree s, delta, step;
6381 tree ref = TREE_OPERAND (addr, 0), pref;
6382 tree ret, pos;
6383 tree itype;
6385 /* Canonicalize op1 into a possibly non-constant delta
6386 and an INTEGER_CST s. */
6387 if (TREE_CODE (op1) == MULT_EXPR)
6389 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6391 STRIP_NOPS (arg0);
6392 STRIP_NOPS (arg1);
6394 if (TREE_CODE (arg0) == INTEGER_CST)
6396 s = arg0;
6397 delta = arg1;
6399 else if (TREE_CODE (arg1) == INTEGER_CST)
6401 s = arg1;
6402 delta = arg0;
6404 else
6405 return NULL_TREE;
6407 else if (TREE_CODE (op1) == INTEGER_CST)
6409 delta = op1;
6410 s = NULL_TREE;
6412 else
6414 /* Simulate we are delta * 1. */
6415 delta = op1;
6416 s = integer_one_node;
6419 for (;; ref = TREE_OPERAND (ref, 0))
6421 if (TREE_CODE (ref) == ARRAY_REF)
6423 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6424 if (! itype)
6425 continue;
6427 step = array_ref_element_size (ref);
6428 if (TREE_CODE (step) != INTEGER_CST)
6429 continue;
6431 if (s)
6433 if (! tree_int_cst_equal (step, s))
6434 continue;
6436 else
6438 /* Try if delta is a multiple of step. */
6439 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6440 if (! tmp)
6441 continue;
6442 delta = tmp;
6445 break;
6448 if (!handled_component_p (ref))
6449 return NULL_TREE;
6452 /* We found the suitable array reference. So copy everything up to it,
6453 and replace the index. */
6455 pref = TREE_OPERAND (addr, 0);
6456 ret = copy_node (pref);
6457 pos = ret;
6459 while (pref != ref)
6461 pref = TREE_OPERAND (pref, 0);
6462 TREE_OPERAND (pos, 0) = copy_node (pref);
6463 pos = TREE_OPERAND (pos, 0);
6466 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6467 fold_convert (itype,
6468 TREE_OPERAND (pos, 1)),
6469 fold_convert (itype, delta));
6471 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6475 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6476 means A >= Y && A != MAX, but in this case we know that
6477 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6479 static tree
6480 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6482 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6484 if (TREE_CODE (bound) == LT_EXPR)
6485 a = TREE_OPERAND (bound, 0);
6486 else if (TREE_CODE (bound) == GT_EXPR)
6487 a = TREE_OPERAND (bound, 1);
6488 else
6489 return NULL_TREE;
6491 typea = TREE_TYPE (a);
6492 if (!INTEGRAL_TYPE_P (typea)
6493 && !POINTER_TYPE_P (typea))
6494 return NULL_TREE;
6496 if (TREE_CODE (ineq) == LT_EXPR)
6498 a1 = TREE_OPERAND (ineq, 1);
6499 y = TREE_OPERAND (ineq, 0);
6501 else if (TREE_CODE (ineq) == GT_EXPR)
6503 a1 = TREE_OPERAND (ineq, 0);
6504 y = TREE_OPERAND (ineq, 1);
6506 else
6507 return NULL_TREE;
6509 if (TREE_TYPE (a1) != typea)
6510 return NULL_TREE;
6512 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6513 if (!integer_onep (diff))
6514 return NULL_TREE;
6516 return fold_build2 (GE_EXPR, type, a, y);
6519 /* Fold a unary expression of code CODE and type TYPE with operand
6520 OP0. Return the folded expression if folding is successful.
6521 Otherwise, return NULL_TREE. */
6523 tree
6524 fold_unary (enum tree_code code, tree type, tree op0)
6526 tree tem;
6527 tree arg0;
6528 enum tree_code_class kind = TREE_CODE_CLASS (code);
6530 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6531 && TREE_CODE_LENGTH (code) == 1);
6533 arg0 = op0;
6534 if (arg0)
6536 if (code == NOP_EXPR || code == CONVERT_EXPR
6537 || code == FLOAT_EXPR || code == ABS_EXPR)
6539 /* Don't use STRIP_NOPS, because signedness of argument type
6540 matters. */
6541 STRIP_SIGN_NOPS (arg0);
6543 else
6545 /* Strip any conversions that don't change the mode. This
6546 is safe for every expression, except for a comparison
6547 expression because its signedness is derived from its
6548 operands.
6550 Note that this is done as an internal manipulation within
6551 the constant folder, in order to find the simplest
6552 representation of the arguments so that their form can be
6553 studied. In any cases, the appropriate type conversions
6554 should be put back in the tree that will get out of the
6555 constant folder. */
6556 STRIP_NOPS (arg0);
6560 if (TREE_CODE_CLASS (code) == tcc_unary)
6562 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6563 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6564 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6565 else if (TREE_CODE (arg0) == COND_EXPR)
6567 tree arg01 = TREE_OPERAND (arg0, 1);
6568 tree arg02 = TREE_OPERAND (arg0, 2);
6569 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6570 arg01 = fold_build1 (code, type, arg01);
6571 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6572 arg02 = fold_build1 (code, type, arg02);
6573 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6574 arg01, arg02);
6576 /* If this was a conversion, and all we did was to move into
6577 inside the COND_EXPR, bring it back out. But leave it if
6578 it is a conversion from integer to integer and the
6579 result precision is no wider than a word since such a
6580 conversion is cheap and may be optimized away by combine,
6581 while it couldn't if it were outside the COND_EXPR. Then return
6582 so we don't get into an infinite recursion loop taking the
6583 conversion out and then back in. */
6585 if ((code == NOP_EXPR || code == CONVERT_EXPR
6586 || code == NON_LVALUE_EXPR)
6587 && TREE_CODE (tem) == COND_EXPR
6588 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6589 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6590 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6591 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6592 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6593 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6594 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6595 && (INTEGRAL_TYPE_P
6596 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6597 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6598 || flag_syntax_only))
6599 tem = build1 (code, type,
6600 build3 (COND_EXPR,
6601 TREE_TYPE (TREE_OPERAND
6602 (TREE_OPERAND (tem, 1), 0)),
6603 TREE_OPERAND (tem, 0),
6604 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6605 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6606 return tem;
6608 else if (COMPARISON_CLASS_P (arg0))
6610 if (TREE_CODE (type) == BOOLEAN_TYPE)
6612 arg0 = copy_node (arg0);
6613 TREE_TYPE (arg0) = type;
6614 return arg0;
6616 else if (TREE_CODE (type) != INTEGER_TYPE)
6617 return fold_build3 (COND_EXPR, type, arg0,
6618 fold_build1 (code, type,
6619 integer_one_node),
6620 fold_build1 (code, type,
6621 integer_zero_node));
6625 switch (code)
6627 case NOP_EXPR:
6628 case FLOAT_EXPR:
6629 case CONVERT_EXPR:
6630 case FIX_TRUNC_EXPR:
6631 case FIX_CEIL_EXPR:
6632 case FIX_FLOOR_EXPR:
6633 case FIX_ROUND_EXPR:
6634 if (TREE_TYPE (op0) == type)
6635 return op0;
6637 /* Handle cases of two conversions in a row. */
6638 if (TREE_CODE (op0) == NOP_EXPR
6639 || TREE_CODE (op0) == CONVERT_EXPR)
6641 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6642 tree inter_type = TREE_TYPE (op0);
6643 int inside_int = INTEGRAL_TYPE_P (inside_type);
6644 int inside_ptr = POINTER_TYPE_P (inside_type);
6645 int inside_float = FLOAT_TYPE_P (inside_type);
6646 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6647 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6648 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6649 int inter_int = INTEGRAL_TYPE_P (inter_type);
6650 int inter_ptr = POINTER_TYPE_P (inter_type);
6651 int inter_float = FLOAT_TYPE_P (inter_type);
6652 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6653 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6654 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6655 int final_int = INTEGRAL_TYPE_P (type);
6656 int final_ptr = POINTER_TYPE_P (type);
6657 int final_float = FLOAT_TYPE_P (type);
6658 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6659 unsigned int final_prec = TYPE_PRECISION (type);
6660 int final_unsignedp = TYPE_UNSIGNED (type);
6662 /* In addition to the cases of two conversions in a row
6663 handled below, if we are converting something to its own
6664 type via an object of identical or wider precision, neither
6665 conversion is needed. */
6666 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6667 && ((inter_int && final_int) || (inter_float && final_float))
6668 && inter_prec >= final_prec)
6669 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6671 /* Likewise, if the intermediate and final types are either both
6672 float or both integer, we don't need the middle conversion if
6673 it is wider than the final type and doesn't change the signedness
6674 (for integers). Avoid this if the final type is a pointer
6675 since then we sometimes need the inner conversion. Likewise if
6676 the outer has a precision not equal to the size of its mode. */
6677 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6678 || (inter_float && inside_float)
6679 || (inter_vec && inside_vec))
6680 && inter_prec >= inside_prec
6681 && (inter_float || inter_vec
6682 || inter_unsignedp == inside_unsignedp)
6683 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6684 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6685 && ! final_ptr
6686 && (! final_vec || inter_prec == inside_prec))
6687 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6689 /* If we have a sign-extension of a zero-extended value, we can
6690 replace that by a single zero-extension. */
6691 if (inside_int && inter_int && final_int
6692 && inside_prec < inter_prec && inter_prec < final_prec
6693 && inside_unsignedp && !inter_unsignedp)
6694 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6696 /* Two conversions in a row are not needed unless:
6697 - some conversion is floating-point (overstrict for now), or
6698 - some conversion is a vector (overstrict for now), or
6699 - the intermediate type is narrower than both initial and
6700 final, or
6701 - the intermediate type and innermost type differ in signedness,
6702 and the outermost type is wider than the intermediate, or
6703 - the initial type is a pointer type and the precisions of the
6704 intermediate and final types differ, or
6705 - the final type is a pointer type and the precisions of the
6706 initial and intermediate types differ. */
6707 if (! inside_float && ! inter_float && ! final_float
6708 && ! inside_vec && ! inter_vec && ! final_vec
6709 && (inter_prec > inside_prec || inter_prec > final_prec)
6710 && ! (inside_int && inter_int
6711 && inter_unsignedp != inside_unsignedp
6712 && inter_prec < final_prec)
6713 && ((inter_unsignedp && inter_prec > inside_prec)
6714 == (final_unsignedp && final_prec > inter_prec))
6715 && ! (inside_ptr && inter_prec != final_prec)
6716 && ! (final_ptr && inside_prec != inter_prec)
6717 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6718 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6719 && ! final_ptr)
6720 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6723 /* Handle (T *)&A.B.C for A being of type T and B and C
6724 living at offset zero. This occurs frequently in
6725 C++ upcasting and then accessing the base. */
6726 if (TREE_CODE (op0) == ADDR_EXPR
6727 && POINTER_TYPE_P (type)
6728 && handled_component_p (TREE_OPERAND (op0, 0)))
6730 HOST_WIDE_INT bitsize, bitpos;
6731 tree offset;
6732 enum machine_mode mode;
6733 int unsignedp, volatilep;
6734 tree base = TREE_OPERAND (op0, 0);
6735 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6736 &mode, &unsignedp, &volatilep, false);
6737 /* If the reference was to a (constant) zero offset, we can use
6738 the address of the base if it has the same base type
6739 as the result type. */
6740 if (! offset && bitpos == 0
6741 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
6742 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
6743 return fold_convert (type, build_fold_addr_expr (base));
6746 if (TREE_CODE (op0) == MODIFY_EXPR
6747 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6748 /* Detect assigning a bitfield. */
6749 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6750 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6752 /* Don't leave an assignment inside a conversion
6753 unless assigning a bitfield. */
6754 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6755 /* First do the assignment, then return converted constant. */
6756 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6757 TREE_NO_WARNING (tem) = 1;
6758 TREE_USED (tem) = 1;
6759 return tem;
6762 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6763 constants (if x has signed type, the sign bit cannot be set
6764 in c). This folds extension into the BIT_AND_EXPR. */
6765 if (INTEGRAL_TYPE_P (type)
6766 && TREE_CODE (type) != BOOLEAN_TYPE
6767 && TREE_CODE (op0) == BIT_AND_EXPR
6768 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6770 tree and = op0;
6771 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6772 int change = 0;
6774 if (TYPE_UNSIGNED (TREE_TYPE (and))
6775 || (TYPE_PRECISION (type)
6776 <= TYPE_PRECISION (TREE_TYPE (and))))
6777 change = 1;
6778 else if (TYPE_PRECISION (TREE_TYPE (and1))
6779 <= HOST_BITS_PER_WIDE_INT
6780 && host_integerp (and1, 1))
6782 unsigned HOST_WIDE_INT cst;
6784 cst = tree_low_cst (and1, 1);
6785 cst &= (HOST_WIDE_INT) -1
6786 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6787 change = (cst == 0);
6788 #ifdef LOAD_EXTEND_OP
6789 if (change
6790 && !flag_syntax_only
6791 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6792 == ZERO_EXTEND))
6794 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6795 and0 = fold_convert (uns, and0);
6796 and1 = fold_convert (uns, and1);
6798 #endif
6800 if (change)
6802 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6803 TREE_INT_CST_HIGH (and1));
6804 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6805 TREE_CONSTANT_OVERFLOW (and1));
6806 return fold_build2 (BIT_AND_EXPR, type,
6807 fold_convert (type, and0), tem);
6811 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6812 T2 being pointers to types of the same size. */
6813 if (POINTER_TYPE_P (type)
6814 && BINARY_CLASS_P (arg0)
6815 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6816 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6818 tree arg00 = TREE_OPERAND (arg0, 0);
6819 tree t0 = type;
6820 tree t1 = TREE_TYPE (arg00);
6821 tree tt0 = TREE_TYPE (t0);
6822 tree tt1 = TREE_TYPE (t1);
6823 tree s0 = TYPE_SIZE (tt0);
6824 tree s1 = TYPE_SIZE (tt1);
6826 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6827 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6828 TREE_OPERAND (arg0, 1));
6831 tem = fold_convert_const (code, type, arg0);
6832 return tem ? tem : NULL_TREE;
6834 case VIEW_CONVERT_EXPR:
6835 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6836 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6837 return NULL_TREE;
6839 case NEGATE_EXPR:
6840 if (negate_expr_p (arg0))
6841 return fold_convert (type, negate_expr (arg0));
6842 /* Convert - (~A) to A + 1. */
6843 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6844 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6845 build_int_cst (type, 1));
6846 return NULL_TREE;
6848 case ABS_EXPR:
6849 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6850 return fold_abs_const (arg0, type);
6851 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6852 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
6853 /* Convert fabs((double)float) into (double)fabsf(float). */
6854 else if (TREE_CODE (arg0) == NOP_EXPR
6855 && TREE_CODE (type) == REAL_TYPE)
6857 tree targ0 = strip_float_extensions (arg0);
6858 if (targ0 != arg0)
6859 return fold_convert (type, fold_build1 (ABS_EXPR,
6860 TREE_TYPE (targ0),
6861 targ0));
6863 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
6864 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
6865 return arg0;
6867 /* Strip sign ops from argument. */
6868 if (TREE_CODE (type) == REAL_TYPE)
6870 tem = fold_strip_sign_ops (arg0);
6871 if (tem)
6872 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
6874 return NULL_TREE;
6876 case CONJ_EXPR:
6877 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6878 return fold_convert (type, arg0);
6879 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6880 return build2 (COMPLEX_EXPR, type,
6881 TREE_OPERAND (arg0, 0),
6882 negate_expr (TREE_OPERAND (arg0, 1)));
6883 else if (TREE_CODE (arg0) == COMPLEX_CST)
6884 return build_complex (type, TREE_REALPART (arg0),
6885 negate_expr (TREE_IMAGPART (arg0)));
6886 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6887 return fold_build2 (TREE_CODE (arg0), type,
6888 fold_build1 (CONJ_EXPR, type,
6889 TREE_OPERAND (arg0, 0)),
6890 fold_build1 (CONJ_EXPR, type,
6891 TREE_OPERAND (arg0, 1)));
6892 else if (TREE_CODE (arg0) == CONJ_EXPR)
6893 return TREE_OPERAND (arg0, 0);
6894 return NULL_TREE;
6896 case BIT_NOT_EXPR:
6897 if (TREE_CODE (arg0) == INTEGER_CST)
6898 return fold_not_const (arg0, type);
6899 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6900 return TREE_OPERAND (arg0, 0);
6901 /* Convert ~ (-A) to A - 1. */
6902 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
6903 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
6904 build_int_cst (type, 1));
6905 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
6906 else if (INTEGRAL_TYPE_P (type)
6907 && ((TREE_CODE (arg0) == MINUS_EXPR
6908 && integer_onep (TREE_OPERAND (arg0, 1)))
6909 || (TREE_CODE (arg0) == PLUS_EXPR
6910 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
6911 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
6912 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
6913 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6914 && (tem = fold_unary (BIT_NOT_EXPR, type,
6915 fold_convert (type,
6916 TREE_OPERAND (arg0, 0)))))
6917 return fold_build2 (BIT_XOR_EXPR, type, tem,
6918 fold_convert (type, TREE_OPERAND (arg0, 1)));
6919 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6920 && (tem = fold_unary (BIT_NOT_EXPR, type,
6921 fold_convert (type,
6922 TREE_OPERAND (arg0, 1)))))
6923 return fold_build2 (BIT_XOR_EXPR, type,
6924 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
6926 return NULL_TREE;
6928 case TRUTH_NOT_EXPR:
6929 /* The argument to invert_truthvalue must have Boolean type. */
6930 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
6931 arg0 = fold_convert (boolean_type_node, arg0);
6933 /* Note that the operand of this must be an int
6934 and its values must be 0 or 1.
6935 ("true" is a fixed value perhaps depending on the language,
6936 but we don't handle values other than 1 correctly yet.) */
6937 tem = invert_truthvalue (arg0);
6938 /* Avoid infinite recursion. */
6939 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6940 return NULL_TREE;
6941 return fold_convert (type, tem);
6943 case REALPART_EXPR:
6944 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6945 return NULL_TREE;
6946 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6947 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6948 TREE_OPERAND (arg0, 1));
6949 else if (TREE_CODE (arg0) == COMPLEX_CST)
6950 return TREE_REALPART (arg0);
6951 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6952 return fold_build2 (TREE_CODE (arg0), type,
6953 fold_build1 (REALPART_EXPR, type,
6954 TREE_OPERAND (arg0, 0)),
6955 fold_build1 (REALPART_EXPR, type,
6956 TREE_OPERAND (arg0, 1)));
6957 return NULL_TREE;
6959 case IMAGPART_EXPR:
6960 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6961 return fold_convert (type, integer_zero_node);
6962 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6963 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
6964 TREE_OPERAND (arg0, 0));
6965 else if (TREE_CODE (arg0) == COMPLEX_CST)
6966 return TREE_IMAGPART (arg0);
6967 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6968 return fold_build2 (TREE_CODE (arg0), type,
6969 fold_build1 (IMAGPART_EXPR, type,
6970 TREE_OPERAND (arg0, 0)),
6971 fold_build1 (IMAGPART_EXPR, type,
6972 TREE_OPERAND (arg0, 1)));
6973 return NULL_TREE;
6975 default:
6976 return NULL_TREE;
6977 } /* switch (code) */
6980 /* Fold a binary expression of code CODE and type TYPE with operands
6981 OP0 and OP1. Return the folded expression if folding is
6982 successful. Otherwise, return NULL_TREE. */
6984 tree
6985 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
6987 tree t1 = NULL_TREE;
6988 tree tem;
6989 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
6990 enum tree_code_class kind = TREE_CODE_CLASS (code);
6992 /* WINS will be nonzero when the switch is done
6993 if all operands are constant. */
6994 int wins = 1;
6996 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6997 && TREE_CODE_LENGTH (code) == 2);
6999 arg0 = op0;
7000 arg1 = op1;
7002 if (arg0)
7004 tree subop;
7006 /* Strip any conversions that don't change the mode. This is
7007 safe for every expression, except for a comparison expression
7008 because its signedness is derived from its operands. So, in
7009 the latter case, only strip conversions that don't change the
7010 signedness.
7012 Note that this is done as an internal manipulation within the
7013 constant folder, in order to find the simplest representation
7014 of the arguments so that their form can be studied. In any
7015 cases, the appropriate type conversions should be put back in
7016 the tree that will get out of the constant folder. */
7017 if (kind == tcc_comparison)
7018 STRIP_SIGN_NOPS (arg0);
7019 else
7020 STRIP_NOPS (arg0);
7022 if (TREE_CODE (arg0) == COMPLEX_CST)
7023 subop = TREE_REALPART (arg0);
7024 else
7025 subop = arg0;
7027 if (TREE_CODE (subop) != INTEGER_CST
7028 && TREE_CODE (subop) != REAL_CST)
7029 /* Note that TREE_CONSTANT isn't enough:
7030 static var addresses are constant but we can't
7031 do arithmetic on them. */
7032 wins = 0;
7035 if (arg1)
7037 tree subop;
7039 /* Strip any conversions that don't change the mode. This is
7040 safe for every expression, except for a comparison expression
7041 because its signedness is derived from its operands. So, in
7042 the latter case, only strip conversions that don't change the
7043 signedness.
7045 Note that this is done as an internal manipulation within the
7046 constant folder, in order to find the simplest representation
7047 of the arguments so that their form can be studied. In any
7048 cases, the appropriate type conversions should be put back in
7049 the tree that will get out of the constant folder. */
7050 if (kind == tcc_comparison)
7051 STRIP_SIGN_NOPS (arg1);
7052 else
7053 STRIP_NOPS (arg1);
7055 if (TREE_CODE (arg1) == COMPLEX_CST)
7056 subop = TREE_REALPART (arg1);
7057 else
7058 subop = arg1;
7060 if (TREE_CODE (subop) != INTEGER_CST
7061 && TREE_CODE (subop) != REAL_CST)
7062 /* Note that TREE_CONSTANT isn't enough:
7063 static var addresses are constant but we can't
7064 do arithmetic on them. */
7065 wins = 0;
7068 /* If this is a commutative operation, and ARG0 is a constant, move it
7069 to ARG1 to reduce the number of tests below. */
7070 if (commutative_tree_code (code)
7071 && tree_swap_operands_p (arg0, arg1, true))
7072 return fold_build2 (code, type, op1, op0);
7074 /* Now WINS is set as described above,
7075 ARG0 is the first operand of EXPR,
7076 and ARG1 is the second operand (if it has more than one operand).
7078 First check for cases where an arithmetic operation is applied to a
7079 compound, conditional, or comparison operation. Push the arithmetic
7080 operation inside the compound or conditional to see if any folding
7081 can then be done. Convert comparison to conditional for this purpose.
7082 The also optimizes non-constant cases that used to be done in
7083 expand_expr.
7085 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7086 one of the operands is a comparison and the other is a comparison, a
7087 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7088 code below would make the expression more complex. Change it to a
7089 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7090 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7092 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7093 || code == EQ_EXPR || code == NE_EXPR)
7094 && ((truth_value_p (TREE_CODE (arg0))
7095 && (truth_value_p (TREE_CODE (arg1))
7096 || (TREE_CODE (arg1) == BIT_AND_EXPR
7097 && integer_onep (TREE_OPERAND (arg1, 1)))))
7098 || (truth_value_p (TREE_CODE (arg1))
7099 && (truth_value_p (TREE_CODE (arg0))
7100 || (TREE_CODE (arg0) == BIT_AND_EXPR
7101 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7103 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7104 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7105 : TRUTH_XOR_EXPR,
7106 boolean_type_node,
7107 fold_convert (boolean_type_node, arg0),
7108 fold_convert (boolean_type_node, arg1));
7110 if (code == EQ_EXPR)
7111 tem = invert_truthvalue (tem);
7113 return fold_convert (type, tem);
7116 if (TREE_CODE_CLASS (code) == tcc_comparison
7117 && TREE_CODE (arg0) == COMPOUND_EXPR)
7118 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7119 fold_build2 (code, type, TREE_OPERAND (arg0, 1), arg1));
7120 else if (TREE_CODE_CLASS (code) == tcc_comparison
7121 && TREE_CODE (arg1) == COMPOUND_EXPR)
7122 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7123 fold_build2 (code, type, arg0, TREE_OPERAND (arg1, 1)));
7124 else if (TREE_CODE_CLASS (code) == tcc_binary
7125 || TREE_CODE_CLASS (code) == tcc_comparison)
7127 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7128 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7129 fold_build2 (code, type, TREE_OPERAND (arg0, 1),
7130 arg1));
7131 if (TREE_CODE (arg1) == COMPOUND_EXPR
7132 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7133 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7134 fold_build2 (code, type,
7135 arg0, TREE_OPERAND (arg1, 1)));
7137 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7139 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7140 arg0, arg1,
7141 /*cond_first_p=*/1);
7142 if (tem != NULL_TREE)
7143 return tem;
7146 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7148 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7149 arg1, arg0,
7150 /*cond_first_p=*/0);
7151 if (tem != NULL_TREE)
7152 return tem;
7156 switch (code)
7158 case PLUS_EXPR:
7159 /* A + (-B) -> A - B */
7160 if (TREE_CODE (arg1) == NEGATE_EXPR)
7161 return fold_build2 (MINUS_EXPR, type,
7162 fold_convert (type, arg0),
7163 fold_convert (type, TREE_OPERAND (arg1, 0)));
7164 /* (-A) + B -> B - A */
7165 if (TREE_CODE (arg0) == NEGATE_EXPR
7166 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7167 return fold_build2 (MINUS_EXPR, type,
7168 fold_convert (type, arg1),
7169 fold_convert (type, TREE_OPERAND (arg0, 0)));
7170 /* Convert ~A + 1 to -A. */
7171 if (INTEGRAL_TYPE_P (type)
7172 && TREE_CODE (arg0) == BIT_NOT_EXPR
7173 && integer_onep (arg1))
7174 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7176 if (! FLOAT_TYPE_P (type))
7178 if (integer_zerop (arg1))
7179 return non_lvalue (fold_convert (type, arg0));
7181 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7182 with a constant, and the two constants have no bits in common,
7183 we should treat this as a BIT_IOR_EXPR since this may produce more
7184 simplifications. */
7185 if (TREE_CODE (arg0) == BIT_AND_EXPR
7186 && TREE_CODE (arg1) == BIT_AND_EXPR
7187 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7188 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7189 && integer_zerop (const_binop (BIT_AND_EXPR,
7190 TREE_OPERAND (arg0, 1),
7191 TREE_OPERAND (arg1, 1), 0)))
7193 code = BIT_IOR_EXPR;
7194 goto bit_ior;
7197 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7198 (plus (plus (mult) (mult)) (foo)) so that we can
7199 take advantage of the factoring cases below. */
7200 if (((TREE_CODE (arg0) == PLUS_EXPR
7201 || TREE_CODE (arg0) == MINUS_EXPR)
7202 && TREE_CODE (arg1) == MULT_EXPR)
7203 || ((TREE_CODE (arg1) == PLUS_EXPR
7204 || TREE_CODE (arg1) == MINUS_EXPR)
7205 && TREE_CODE (arg0) == MULT_EXPR))
7207 tree parg0, parg1, parg, marg;
7208 enum tree_code pcode;
7210 if (TREE_CODE (arg1) == MULT_EXPR)
7211 parg = arg0, marg = arg1;
7212 else
7213 parg = arg1, marg = arg0;
7214 pcode = TREE_CODE (parg);
7215 parg0 = TREE_OPERAND (parg, 0);
7216 parg1 = TREE_OPERAND (parg, 1);
7217 STRIP_NOPS (parg0);
7218 STRIP_NOPS (parg1);
7220 if (TREE_CODE (parg0) == MULT_EXPR
7221 && TREE_CODE (parg1) != MULT_EXPR)
7222 return fold_build2 (pcode, type,
7223 fold_build2 (PLUS_EXPR, type,
7224 fold_convert (type, parg0),
7225 fold_convert (type, marg)),
7226 fold_convert (type, parg1));
7227 if (TREE_CODE (parg0) != MULT_EXPR
7228 && TREE_CODE (parg1) == MULT_EXPR)
7229 return fold_build2 (PLUS_EXPR, type,
7230 fold_convert (type, parg0),
7231 fold_build2 (pcode, type,
7232 fold_convert (type, marg),
7233 fold_convert (type,
7234 parg1)));
7237 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7239 tree arg00, arg01, arg10, arg11;
7240 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7242 /* (A * C) + (B * C) -> (A+B) * C.
7243 We are most concerned about the case where C is a constant,
7244 but other combinations show up during loop reduction. Since
7245 it is not difficult, try all four possibilities. */
7247 arg00 = TREE_OPERAND (arg0, 0);
7248 arg01 = TREE_OPERAND (arg0, 1);
7249 arg10 = TREE_OPERAND (arg1, 0);
7250 arg11 = TREE_OPERAND (arg1, 1);
7251 same = NULL_TREE;
7253 if (operand_equal_p (arg01, arg11, 0))
7254 same = arg01, alt0 = arg00, alt1 = arg10;
7255 else if (operand_equal_p (arg00, arg10, 0))
7256 same = arg00, alt0 = arg01, alt1 = arg11;
7257 else if (operand_equal_p (arg00, arg11, 0))
7258 same = arg00, alt0 = arg01, alt1 = arg10;
7259 else if (operand_equal_p (arg01, arg10, 0))
7260 same = arg01, alt0 = arg00, alt1 = arg11;
7262 /* No identical multiplicands; see if we can find a common
7263 power-of-two factor in non-power-of-two multiplies. This
7264 can help in multi-dimensional array access. */
7265 else if (TREE_CODE (arg01) == INTEGER_CST
7266 && TREE_CODE (arg11) == INTEGER_CST
7267 && TREE_INT_CST_HIGH (arg01) == 0
7268 && TREE_INT_CST_HIGH (arg11) == 0)
7270 HOST_WIDE_INT int01, int11, tmp;
7271 int01 = TREE_INT_CST_LOW (arg01);
7272 int11 = TREE_INT_CST_LOW (arg11);
7274 /* Move min of absolute values to int11. */
7275 if ((int01 >= 0 ? int01 : -int01)
7276 < (int11 >= 0 ? int11 : -int11))
7278 tmp = int01, int01 = int11, int11 = tmp;
7279 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7280 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7283 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7285 alt0 = fold_build2 (MULT_EXPR, type, arg00,
7286 build_int_cst (NULL_TREE,
7287 int01 / int11));
7288 alt1 = arg10;
7289 same = arg11;
7293 if (same)
7294 return fold_build2 (MULT_EXPR, type,
7295 fold_build2 (PLUS_EXPR, type,
7296 fold_convert (type, alt0),
7297 fold_convert (type, alt1)),
7298 fold_convert (type, same));
7301 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7302 of the array. Loop optimizer sometimes produce this type of
7303 expressions. */
7304 if (TREE_CODE (arg0) == ADDR_EXPR)
7306 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7307 if (tem)
7308 return fold_convert (type, tem);
7310 else if (TREE_CODE (arg1) == ADDR_EXPR)
7312 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7313 if (tem)
7314 return fold_convert (type, tem);
7317 else
7319 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7320 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7321 return non_lvalue (fold_convert (type, arg0));
7323 /* Likewise if the operands are reversed. */
7324 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7325 return non_lvalue (fold_convert (type, arg1));
7327 /* Convert X + -C into X - C. */
7328 if (TREE_CODE (arg1) == REAL_CST
7329 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7331 tem = fold_negate_const (arg1, type);
7332 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7333 return fold_build2 (MINUS_EXPR, type,
7334 fold_convert (type, arg0),
7335 fold_convert (type, tem));
7338 if (flag_unsafe_math_optimizations
7339 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7340 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7341 && (tem = distribute_real_division (code, type, arg0, arg1)))
7342 return tem;
7344 /* Convert x+x into x*2.0. */
7345 if (operand_equal_p (arg0, arg1, 0)
7346 && SCALAR_FLOAT_TYPE_P (type))
7347 return fold_build2 (MULT_EXPR, type, arg0,
7348 build_real (type, dconst2));
7350 /* Convert x*c+x into x*(c+1). */
7351 if (flag_unsafe_math_optimizations
7352 && TREE_CODE (arg0) == MULT_EXPR
7353 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7354 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7355 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7357 REAL_VALUE_TYPE c;
7359 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7360 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7361 return fold_build2 (MULT_EXPR, type, arg1,
7362 build_real (type, c));
7365 /* Convert x+x*c into x*(c+1). */
7366 if (flag_unsafe_math_optimizations
7367 && TREE_CODE (arg1) == MULT_EXPR
7368 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7369 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7370 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7372 REAL_VALUE_TYPE c;
7374 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7375 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7376 return fold_build2 (MULT_EXPR, type, arg0,
7377 build_real (type, c));
7380 /* Convert x*c1+x*c2 into x*(c1+c2). */
7381 if (flag_unsafe_math_optimizations
7382 && TREE_CODE (arg0) == MULT_EXPR
7383 && TREE_CODE (arg1) == MULT_EXPR
7384 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7385 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7386 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7387 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7388 && operand_equal_p (TREE_OPERAND (arg0, 0),
7389 TREE_OPERAND (arg1, 0), 0))
7391 REAL_VALUE_TYPE c1, c2;
7393 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7394 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7395 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7396 return fold_build2 (MULT_EXPR, type,
7397 TREE_OPERAND (arg0, 0),
7398 build_real (type, c1));
7400 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7401 if (flag_unsafe_math_optimizations
7402 && TREE_CODE (arg1) == PLUS_EXPR
7403 && TREE_CODE (arg0) != MULT_EXPR)
7405 tree tree10 = TREE_OPERAND (arg1, 0);
7406 tree tree11 = TREE_OPERAND (arg1, 1);
7407 if (TREE_CODE (tree11) == MULT_EXPR
7408 && TREE_CODE (tree10) == MULT_EXPR)
7410 tree tree0;
7411 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7412 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7415 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7416 if (flag_unsafe_math_optimizations
7417 && TREE_CODE (arg0) == PLUS_EXPR
7418 && TREE_CODE (arg1) != MULT_EXPR)
7420 tree tree00 = TREE_OPERAND (arg0, 0);
7421 tree tree01 = TREE_OPERAND (arg0, 1);
7422 if (TREE_CODE (tree01) == MULT_EXPR
7423 && TREE_CODE (tree00) == MULT_EXPR)
7425 tree tree0;
7426 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7427 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7432 bit_rotate:
7433 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7434 is a rotate of A by C1 bits. */
7435 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7436 is a rotate of A by B bits. */
7438 enum tree_code code0, code1;
7439 code0 = TREE_CODE (arg0);
7440 code1 = TREE_CODE (arg1);
7441 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7442 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7443 && operand_equal_p (TREE_OPERAND (arg0, 0),
7444 TREE_OPERAND (arg1, 0), 0)
7445 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7447 tree tree01, tree11;
7448 enum tree_code code01, code11;
7450 tree01 = TREE_OPERAND (arg0, 1);
7451 tree11 = TREE_OPERAND (arg1, 1);
7452 STRIP_NOPS (tree01);
7453 STRIP_NOPS (tree11);
7454 code01 = TREE_CODE (tree01);
7455 code11 = TREE_CODE (tree11);
7456 if (code01 == INTEGER_CST
7457 && code11 == INTEGER_CST
7458 && TREE_INT_CST_HIGH (tree01) == 0
7459 && TREE_INT_CST_HIGH (tree11) == 0
7460 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7461 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7462 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7463 code0 == LSHIFT_EXPR ? tree01 : tree11);
7464 else if (code11 == MINUS_EXPR)
7466 tree tree110, tree111;
7467 tree110 = TREE_OPERAND (tree11, 0);
7468 tree111 = TREE_OPERAND (tree11, 1);
7469 STRIP_NOPS (tree110);
7470 STRIP_NOPS (tree111);
7471 if (TREE_CODE (tree110) == INTEGER_CST
7472 && 0 == compare_tree_int (tree110,
7473 TYPE_PRECISION
7474 (TREE_TYPE (TREE_OPERAND
7475 (arg0, 0))))
7476 && operand_equal_p (tree01, tree111, 0))
7477 return build2 ((code0 == LSHIFT_EXPR
7478 ? LROTATE_EXPR
7479 : RROTATE_EXPR),
7480 type, TREE_OPERAND (arg0, 0), tree01);
7482 else if (code01 == MINUS_EXPR)
7484 tree tree010, tree011;
7485 tree010 = TREE_OPERAND (tree01, 0);
7486 tree011 = TREE_OPERAND (tree01, 1);
7487 STRIP_NOPS (tree010);
7488 STRIP_NOPS (tree011);
7489 if (TREE_CODE (tree010) == INTEGER_CST
7490 && 0 == compare_tree_int (tree010,
7491 TYPE_PRECISION
7492 (TREE_TYPE (TREE_OPERAND
7493 (arg0, 0))))
7494 && operand_equal_p (tree11, tree011, 0))
7495 return build2 ((code0 != LSHIFT_EXPR
7496 ? LROTATE_EXPR
7497 : RROTATE_EXPR),
7498 type, TREE_OPERAND (arg0, 0), tree11);
7503 associate:
7504 /* In most languages, can't associate operations on floats through
7505 parentheses. Rather than remember where the parentheses were, we
7506 don't associate floats at all, unless the user has specified
7507 -funsafe-math-optimizations. */
7509 if (! wins
7510 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7512 tree var0, con0, lit0, minus_lit0;
7513 tree var1, con1, lit1, minus_lit1;
7515 /* Split both trees into variables, constants, and literals. Then
7516 associate each group together, the constants with literals,
7517 then the result with variables. This increases the chances of
7518 literals being recombined later and of generating relocatable
7519 expressions for the sum of a constant and literal. */
7520 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7521 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7522 code == MINUS_EXPR);
7524 /* Only do something if we found more than two objects. Otherwise,
7525 nothing has changed and we risk infinite recursion. */
7526 if (2 < ((var0 != 0) + (var1 != 0)
7527 + (con0 != 0) + (con1 != 0)
7528 + (lit0 != 0) + (lit1 != 0)
7529 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7531 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7532 if (code == MINUS_EXPR)
7533 code = PLUS_EXPR;
7535 var0 = associate_trees (var0, var1, code, type);
7536 con0 = associate_trees (con0, con1, code, type);
7537 lit0 = associate_trees (lit0, lit1, code, type);
7538 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7540 /* Preserve the MINUS_EXPR if the negative part of the literal is
7541 greater than the positive part. Otherwise, the multiplicative
7542 folding code (i.e extract_muldiv) may be fooled in case
7543 unsigned constants are subtracted, like in the following
7544 example: ((X*2 + 4) - 8U)/2. */
7545 if (minus_lit0 && lit0)
7547 if (TREE_CODE (lit0) == INTEGER_CST
7548 && TREE_CODE (minus_lit0) == INTEGER_CST
7549 && tree_int_cst_lt (lit0, minus_lit0))
7551 minus_lit0 = associate_trees (minus_lit0, lit0,
7552 MINUS_EXPR, type);
7553 lit0 = 0;
7555 else
7557 lit0 = associate_trees (lit0, minus_lit0,
7558 MINUS_EXPR, type);
7559 minus_lit0 = 0;
7562 if (minus_lit0)
7564 if (con0 == 0)
7565 return fold_convert (type,
7566 associate_trees (var0, minus_lit0,
7567 MINUS_EXPR, type));
7568 else
7570 con0 = associate_trees (con0, minus_lit0,
7571 MINUS_EXPR, type);
7572 return fold_convert (type,
7573 associate_trees (var0, con0,
7574 PLUS_EXPR, type));
7578 con0 = associate_trees (con0, lit0, code, type);
7579 return fold_convert (type, associate_trees (var0, con0,
7580 code, type));
7584 binary:
7585 if (wins)
7586 t1 = const_binop (code, arg0, arg1, 0);
7587 if (t1 != NULL_TREE)
7589 /* The return value should always have
7590 the same type as the original expression. */
7591 if (TREE_TYPE (t1) != type)
7592 t1 = fold_convert (type, t1);
7594 return t1;
7596 return NULL_TREE;
7598 case MINUS_EXPR:
7599 /* A - (-B) -> A + B */
7600 if (TREE_CODE (arg1) == NEGATE_EXPR)
7601 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7602 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7603 if (TREE_CODE (arg0) == NEGATE_EXPR
7604 && (FLOAT_TYPE_P (type)
7605 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7606 && negate_expr_p (arg1)
7607 && reorder_operands_p (arg0, arg1))
7608 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7609 TREE_OPERAND (arg0, 0));
7610 /* Convert -A - 1 to ~A. */
7611 if (INTEGRAL_TYPE_P (type)
7612 && TREE_CODE (arg0) == NEGATE_EXPR
7613 && integer_onep (arg1))
7614 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7616 /* Convert -1 - A to ~A. */
7617 if (INTEGRAL_TYPE_P (type)
7618 && integer_all_onesp (arg0))
7619 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7621 if (! FLOAT_TYPE_P (type))
7623 if (! wins && integer_zerop (arg0))
7624 return negate_expr (fold_convert (type, arg1));
7625 if (integer_zerop (arg1))
7626 return non_lvalue (fold_convert (type, arg0));
7628 /* Fold A - (A & B) into ~B & A. */
7629 if (!TREE_SIDE_EFFECTS (arg0)
7630 && TREE_CODE (arg1) == BIT_AND_EXPR)
7632 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7633 return fold_build2 (BIT_AND_EXPR, type,
7634 fold_build1 (BIT_NOT_EXPR, type,
7635 TREE_OPERAND (arg1, 0)),
7636 arg0);
7637 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7638 return fold_build2 (BIT_AND_EXPR, type,
7639 fold_build1 (BIT_NOT_EXPR, type,
7640 TREE_OPERAND (arg1, 1)),
7641 arg0);
7644 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7645 any power of 2 minus 1. */
7646 if (TREE_CODE (arg0) == BIT_AND_EXPR
7647 && TREE_CODE (arg1) == BIT_AND_EXPR
7648 && operand_equal_p (TREE_OPERAND (arg0, 0),
7649 TREE_OPERAND (arg1, 0), 0))
7651 tree mask0 = TREE_OPERAND (arg0, 1);
7652 tree mask1 = TREE_OPERAND (arg1, 1);
7653 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7655 if (operand_equal_p (tem, mask1, 0))
7657 tem = fold_build2 (BIT_XOR_EXPR, type,
7658 TREE_OPERAND (arg0, 0), mask1);
7659 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7664 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7665 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7666 return non_lvalue (fold_convert (type, arg0));
7668 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7669 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7670 (-ARG1 + ARG0) reduces to -ARG1. */
7671 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7672 return negate_expr (fold_convert (type, arg1));
7674 /* Fold &x - &x. This can happen from &x.foo - &x.
7675 This is unsafe for certain floats even in non-IEEE formats.
7676 In IEEE, it is unsafe because it does wrong for NaNs.
7677 Also note that operand_equal_p is always false if an operand
7678 is volatile. */
7680 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7681 && operand_equal_p (arg0, arg1, 0))
7682 return fold_convert (type, integer_zero_node);
7684 /* A - B -> A + (-B) if B is easily negatable. */
7685 if (!wins && negate_expr_p (arg1)
7686 && ((FLOAT_TYPE_P (type)
7687 /* Avoid this transformation if B is a positive REAL_CST. */
7688 && (TREE_CODE (arg1) != REAL_CST
7689 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7690 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7691 return fold_build2 (PLUS_EXPR, type,
7692 fold_convert (type, arg0),
7693 fold_convert (type, negate_expr (arg1)));
7695 /* Try folding difference of addresses. */
7697 HOST_WIDE_INT diff;
7699 if ((TREE_CODE (arg0) == ADDR_EXPR
7700 || TREE_CODE (arg1) == ADDR_EXPR)
7701 && ptr_difference_const (arg0, arg1, &diff))
7702 return build_int_cst_type (type, diff);
7705 /* Fold &a[i] - &a[j] to i-j. */
7706 if (TREE_CODE (arg0) == ADDR_EXPR
7707 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7708 && TREE_CODE (arg1) == ADDR_EXPR
7709 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7711 tree aref0 = TREE_OPERAND (arg0, 0);
7712 tree aref1 = TREE_OPERAND (arg1, 0);
7713 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7714 TREE_OPERAND (aref1, 0), 0))
7716 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7717 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7718 tree esz = array_ref_element_size (aref0);
7719 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7720 return fold_build2 (MULT_EXPR, type, diff,
7721 fold_convert (type, esz));
7726 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7727 of the array. Loop optimizer sometimes produce this type of
7728 expressions. */
7729 if (TREE_CODE (arg0) == ADDR_EXPR)
7731 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7732 if (tem)
7733 return fold_convert (type, tem);
7736 if (flag_unsafe_math_optimizations
7737 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7738 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7739 && (tem = distribute_real_division (code, type, arg0, arg1)))
7740 return tem;
7742 if (TREE_CODE (arg0) == MULT_EXPR
7743 && TREE_CODE (arg1) == MULT_EXPR
7744 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7746 /* (A * C) - (B * C) -> (A-B) * C. */
7747 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7748 TREE_OPERAND (arg1, 1), 0))
7749 return fold_build2 (MULT_EXPR, type,
7750 fold_build2 (MINUS_EXPR, type,
7751 TREE_OPERAND (arg0, 0),
7752 TREE_OPERAND (arg1, 0)),
7753 TREE_OPERAND (arg0, 1));
7754 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7755 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7756 TREE_OPERAND (arg1, 0), 0))
7757 return fold_build2 (MULT_EXPR, type,
7758 TREE_OPERAND (arg0, 0),
7759 fold_build2 (MINUS_EXPR, type,
7760 TREE_OPERAND (arg0, 1),
7761 TREE_OPERAND (arg1, 1)));
7764 goto associate;
7766 case MULT_EXPR:
7767 /* (-A) * (-B) -> A * B */
7768 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7769 return fold_build2 (MULT_EXPR, type,
7770 TREE_OPERAND (arg0, 0),
7771 negate_expr (arg1));
7772 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7773 return fold_build2 (MULT_EXPR, type,
7774 negate_expr (arg0),
7775 TREE_OPERAND (arg1, 0));
7777 if (! FLOAT_TYPE_P (type))
7779 if (integer_zerop (arg1))
7780 return omit_one_operand (type, arg1, arg0);
7781 if (integer_onep (arg1))
7782 return non_lvalue (fold_convert (type, arg0));
7783 /* Transform x * -1 into -x. */
7784 if (integer_all_onesp (arg1))
7785 return fold_convert (type, negate_expr (arg0));
7787 /* (a * (1 << b)) is (a << b) */
7788 if (TREE_CODE (arg1) == LSHIFT_EXPR
7789 && integer_onep (TREE_OPERAND (arg1, 0)))
7790 return fold_build2 (LSHIFT_EXPR, type, arg0,
7791 TREE_OPERAND (arg1, 1));
7792 if (TREE_CODE (arg0) == LSHIFT_EXPR
7793 && integer_onep (TREE_OPERAND (arg0, 0)))
7794 return fold_build2 (LSHIFT_EXPR, type, arg1,
7795 TREE_OPERAND (arg0, 1));
7797 if (TREE_CODE (arg1) == INTEGER_CST
7798 && 0 != (tem = extract_muldiv (op0,
7799 fold_convert (type, arg1),
7800 code, NULL_TREE)))
7801 return fold_convert (type, tem);
7804 else
7806 /* Maybe fold x * 0 to 0. The expressions aren't the same
7807 when x is NaN, since x * 0 is also NaN. Nor are they the
7808 same in modes with signed zeros, since multiplying a
7809 negative value by 0 gives -0, not +0. */
7810 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7811 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7812 && real_zerop (arg1))
7813 return omit_one_operand (type, arg1, arg0);
7814 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7815 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7816 && real_onep (arg1))
7817 return non_lvalue (fold_convert (type, arg0));
7819 /* Transform x * -1.0 into -x. */
7820 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7821 && real_minus_onep (arg1))
7822 return fold_convert (type, negate_expr (arg0));
7824 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7825 if (flag_unsafe_math_optimizations
7826 && TREE_CODE (arg0) == RDIV_EXPR
7827 && TREE_CODE (arg1) == REAL_CST
7828 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7830 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7831 arg1, 0);
7832 if (tem)
7833 return fold_build2 (RDIV_EXPR, type, tem,
7834 TREE_OPERAND (arg0, 1));
7837 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7838 if (operand_equal_p (arg0, arg1, 0))
7840 tree tem = fold_strip_sign_ops (arg0);
7841 if (tem != NULL_TREE)
7843 tem = fold_convert (type, tem);
7844 return fold_build2 (MULT_EXPR, type, tem, tem);
7848 if (flag_unsafe_math_optimizations)
7850 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7851 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7853 /* Optimizations of root(...)*root(...). */
7854 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7856 tree rootfn, arg, arglist;
7857 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7858 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7860 /* Optimize sqrt(x)*sqrt(x) as x. */
7861 if (BUILTIN_SQRT_P (fcode0)
7862 && operand_equal_p (arg00, arg10, 0)
7863 && ! HONOR_SNANS (TYPE_MODE (type)))
7864 return arg00;
7866 /* Optimize root(x)*root(y) as root(x*y). */
7867 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7868 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7869 arglist = build_tree_list (NULL_TREE, arg);
7870 return build_function_call_expr (rootfn, arglist);
7873 /* Optimize expN(x)*expN(y) as expN(x+y). */
7874 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7876 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7877 tree arg = fold_build2 (PLUS_EXPR, type,
7878 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7879 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7880 tree arglist = build_tree_list (NULL_TREE, arg);
7881 return build_function_call_expr (expfn, arglist);
7884 /* Optimizations of pow(...)*pow(...). */
7885 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7886 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7887 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7889 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7890 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7891 1)));
7892 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7893 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7894 1)));
7896 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7897 if (operand_equal_p (arg01, arg11, 0))
7899 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7900 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7901 tree arglist = tree_cons (NULL_TREE, arg,
7902 build_tree_list (NULL_TREE,
7903 arg01));
7904 return build_function_call_expr (powfn, arglist);
7907 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7908 if (operand_equal_p (arg00, arg10, 0))
7910 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7911 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7912 tree arglist = tree_cons (NULL_TREE, arg00,
7913 build_tree_list (NULL_TREE,
7914 arg));
7915 return build_function_call_expr (powfn, arglist);
7919 /* Optimize tan(x)*cos(x) as sin(x). */
7920 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7921 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7922 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7923 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7924 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7925 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7926 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7927 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7929 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7931 if (sinfn != NULL_TREE)
7932 return build_function_call_expr (sinfn,
7933 TREE_OPERAND (arg0, 1));
7936 /* Optimize x*pow(x,c) as pow(x,c+1). */
7937 if (fcode1 == BUILT_IN_POW
7938 || fcode1 == BUILT_IN_POWF
7939 || fcode1 == BUILT_IN_POWL)
7941 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7942 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7943 1)));
7944 if (TREE_CODE (arg11) == REAL_CST
7945 && ! TREE_CONSTANT_OVERFLOW (arg11)
7946 && operand_equal_p (arg0, arg10, 0))
7948 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7949 REAL_VALUE_TYPE c;
7950 tree arg, arglist;
7952 c = TREE_REAL_CST (arg11);
7953 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7954 arg = build_real (type, c);
7955 arglist = build_tree_list (NULL_TREE, arg);
7956 arglist = tree_cons (NULL_TREE, arg0, arglist);
7957 return build_function_call_expr (powfn, arglist);
7961 /* Optimize pow(x,c)*x as pow(x,c+1). */
7962 if (fcode0 == BUILT_IN_POW
7963 || fcode0 == BUILT_IN_POWF
7964 || fcode0 == BUILT_IN_POWL)
7966 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7967 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7968 1)));
7969 if (TREE_CODE (arg01) == REAL_CST
7970 && ! TREE_CONSTANT_OVERFLOW (arg01)
7971 && operand_equal_p (arg1, arg00, 0))
7973 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7974 REAL_VALUE_TYPE c;
7975 tree arg, arglist;
7977 c = TREE_REAL_CST (arg01);
7978 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7979 arg = build_real (type, c);
7980 arglist = build_tree_list (NULL_TREE, arg);
7981 arglist = tree_cons (NULL_TREE, arg1, arglist);
7982 return build_function_call_expr (powfn, arglist);
7986 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7987 if (! optimize_size
7988 && operand_equal_p (arg0, arg1, 0))
7990 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7992 if (powfn)
7994 tree arg = build_real (type, dconst2);
7995 tree arglist = build_tree_list (NULL_TREE, arg);
7996 arglist = tree_cons (NULL_TREE, arg0, arglist);
7997 return build_function_call_expr (powfn, arglist);
8002 goto associate;
8004 case BIT_IOR_EXPR:
8005 bit_ior:
8006 if (integer_all_onesp (arg1))
8007 return omit_one_operand (type, arg1, arg0);
8008 if (integer_zerop (arg1))
8009 return non_lvalue (fold_convert (type, arg0));
8010 if (operand_equal_p (arg0, arg1, 0))
8011 return non_lvalue (fold_convert (type, arg0));
8013 /* ~X | X is -1. */
8014 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8015 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8017 t1 = build_int_cst (type, -1);
8018 t1 = force_fit_type (t1, 0, false, false);
8019 return omit_one_operand (type, t1, arg1);
8022 /* X | ~X is -1. */
8023 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8024 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8026 t1 = build_int_cst (type, -1);
8027 t1 = force_fit_type (t1, 0, false, false);
8028 return omit_one_operand (type, t1, arg0);
8031 t1 = distribute_bit_expr (code, type, arg0, arg1);
8032 if (t1 != NULL_TREE)
8033 return t1;
8035 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8037 This results in more efficient code for machines without a NAND
8038 instruction. Combine will canonicalize to the first form
8039 which will allow use of NAND instructions provided by the
8040 backend if they exist. */
8041 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8042 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8044 return fold_build1 (BIT_NOT_EXPR, type,
8045 build2 (BIT_AND_EXPR, type,
8046 TREE_OPERAND (arg0, 0),
8047 TREE_OPERAND (arg1, 0)));
8050 /* See if this can be simplified into a rotate first. If that
8051 is unsuccessful continue in the association code. */
8052 goto bit_rotate;
8054 case BIT_XOR_EXPR:
8055 if (integer_zerop (arg1))
8056 return non_lvalue (fold_convert (type, arg0));
8057 if (integer_all_onesp (arg1))
8058 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8059 if (operand_equal_p (arg0, arg1, 0))
8060 return omit_one_operand (type, integer_zero_node, arg0);
8062 /* ~X ^ X is -1. */
8063 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8064 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8066 t1 = build_int_cst (type, -1);
8067 t1 = force_fit_type (t1, 0, false, false);
8068 return omit_one_operand (type, t1, arg1);
8071 /* X ^ ~X is -1. */
8072 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8073 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8075 t1 = build_int_cst (type, -1);
8076 t1 = force_fit_type (t1, 0, false, false);
8077 return omit_one_operand (type, t1, arg0);
8080 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8081 with a constant, and the two constants have no bits in common,
8082 we should treat this as a BIT_IOR_EXPR since this may produce more
8083 simplifications. */
8084 if (TREE_CODE (arg0) == BIT_AND_EXPR
8085 && TREE_CODE (arg1) == BIT_AND_EXPR
8086 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8087 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8088 && integer_zerop (const_binop (BIT_AND_EXPR,
8089 TREE_OPERAND (arg0, 1),
8090 TREE_OPERAND (arg1, 1), 0)))
8092 code = BIT_IOR_EXPR;
8093 goto bit_ior;
8096 /* (X | Y) ^ X -> Y & ~ X*/
8097 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8098 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8100 tree t2 = TREE_OPERAND (arg0, 1);
8101 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8102 arg1);
8103 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8104 fold_convert (type, t1));
8105 return t1;
8108 /* (Y | X) ^ X -> Y & ~ X*/
8109 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8110 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8112 tree t2 = TREE_OPERAND (arg0, 0);
8113 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8114 arg1);
8115 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8116 fold_convert (type, t1));
8117 return t1;
8120 /* X ^ (X | Y) -> Y & ~ X*/
8121 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8122 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8124 tree t2 = TREE_OPERAND (arg1, 1);
8125 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8126 arg0);
8127 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8128 fold_convert (type, t1));
8129 return t1;
8132 /* X ^ (Y | X) -> Y & ~ X*/
8133 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8134 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8136 tree t2 = TREE_OPERAND (arg1, 0);
8137 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8138 arg0);
8139 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8140 fold_convert (type, t1));
8141 return t1;
8144 /* Convert ~X ^ ~Y to X ^ Y. */
8145 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8146 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8147 return fold_build2 (code, type,
8148 fold_convert (type, TREE_OPERAND (arg0, 0)),
8149 fold_convert (type, TREE_OPERAND (arg1, 0)));
8151 /* See if this can be simplified into a rotate first. If that
8152 is unsuccessful continue in the association code. */
8153 goto bit_rotate;
8155 case BIT_AND_EXPR:
8156 if (integer_all_onesp (arg1))
8157 return non_lvalue (fold_convert (type, arg0));
8158 if (integer_zerop (arg1))
8159 return omit_one_operand (type, arg1, arg0);
8160 if (operand_equal_p (arg0, arg1, 0))
8161 return non_lvalue (fold_convert (type, arg0));
8163 /* ~X & X is always zero. */
8164 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8165 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8166 return omit_one_operand (type, integer_zero_node, arg1);
8168 /* X & ~X is always zero. */
8169 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8170 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8171 return omit_one_operand (type, integer_zero_node, arg0);
8173 t1 = distribute_bit_expr (code, type, arg0, arg1);
8174 if (t1 != NULL_TREE)
8175 return t1;
8176 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8177 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8178 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8180 unsigned int prec
8181 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8183 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8184 && (~TREE_INT_CST_LOW (arg1)
8185 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8186 return fold_convert (type, TREE_OPERAND (arg0, 0));
8189 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8191 This results in more efficient code for machines without a NOR
8192 instruction. Combine will canonicalize to the first form
8193 which will allow use of NOR instructions provided by the
8194 backend if they exist. */
8195 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8196 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8198 return fold_build1 (BIT_NOT_EXPR, type,
8199 build2 (BIT_IOR_EXPR, type,
8200 TREE_OPERAND (arg0, 0),
8201 TREE_OPERAND (arg1, 0)));
8204 goto associate;
8206 case RDIV_EXPR:
8207 /* Don't touch a floating-point divide by zero unless the mode
8208 of the constant can represent infinity. */
8209 if (TREE_CODE (arg1) == REAL_CST
8210 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8211 && real_zerop (arg1))
8212 return NULL_TREE;
8214 /* (-A) / (-B) -> A / B */
8215 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8216 return fold_build2 (RDIV_EXPR, type,
8217 TREE_OPERAND (arg0, 0),
8218 negate_expr (arg1));
8219 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8220 return fold_build2 (RDIV_EXPR, type,
8221 negate_expr (arg0),
8222 TREE_OPERAND (arg1, 0));
8224 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8225 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8226 && real_onep (arg1))
8227 return non_lvalue (fold_convert (type, arg0));
8229 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8230 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8231 && real_minus_onep (arg1))
8232 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8234 /* If ARG1 is a constant, we can convert this to a multiply by the
8235 reciprocal. This does not have the same rounding properties,
8236 so only do this if -funsafe-math-optimizations. We can actually
8237 always safely do it if ARG1 is a power of two, but it's hard to
8238 tell if it is or not in a portable manner. */
8239 if (TREE_CODE (arg1) == REAL_CST)
8241 if (flag_unsafe_math_optimizations
8242 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8243 arg1, 0)))
8244 return fold_build2 (MULT_EXPR, type, arg0, tem);
8245 /* Find the reciprocal if optimizing and the result is exact. */
8246 if (optimize)
8248 REAL_VALUE_TYPE r;
8249 r = TREE_REAL_CST (arg1);
8250 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8252 tem = build_real (type, r);
8253 return fold_build2 (MULT_EXPR, type,
8254 fold_convert (type, arg0), tem);
8258 /* Convert A/B/C to A/(B*C). */
8259 if (flag_unsafe_math_optimizations
8260 && TREE_CODE (arg0) == RDIV_EXPR)
8261 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8262 fold_build2 (MULT_EXPR, type,
8263 TREE_OPERAND (arg0, 1), arg1));
8265 /* Convert A/(B/C) to (A/B)*C. */
8266 if (flag_unsafe_math_optimizations
8267 && TREE_CODE (arg1) == RDIV_EXPR)
8268 return fold_build2 (MULT_EXPR, type,
8269 fold_build2 (RDIV_EXPR, type, arg0,
8270 TREE_OPERAND (arg1, 0)),
8271 TREE_OPERAND (arg1, 1));
8273 /* Convert C1/(X*C2) into (C1/C2)/X. */
8274 if (flag_unsafe_math_optimizations
8275 && TREE_CODE (arg1) == MULT_EXPR
8276 && TREE_CODE (arg0) == REAL_CST
8277 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8279 tree tem = const_binop (RDIV_EXPR, arg0,
8280 TREE_OPERAND (arg1, 1), 0);
8281 if (tem)
8282 return fold_build2 (RDIV_EXPR, type, tem,
8283 TREE_OPERAND (arg1, 0));
8286 if (flag_unsafe_math_optimizations)
8288 enum built_in_function fcode = builtin_mathfn_code (arg1);
8289 /* Optimize x/expN(y) into x*expN(-y). */
8290 if (BUILTIN_EXPONENT_P (fcode))
8292 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8293 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8294 tree arglist = build_tree_list (NULL_TREE,
8295 fold_convert (type, arg));
8296 arg1 = build_function_call_expr (expfn, arglist);
8297 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8300 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8301 if (fcode == BUILT_IN_POW
8302 || fcode == BUILT_IN_POWF
8303 || fcode == BUILT_IN_POWL)
8305 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8306 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8307 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8308 tree neg11 = fold_convert (type, negate_expr (arg11));
8309 tree arglist = tree_cons(NULL_TREE, arg10,
8310 build_tree_list (NULL_TREE, neg11));
8311 arg1 = build_function_call_expr (powfn, arglist);
8312 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8316 if (flag_unsafe_math_optimizations)
8318 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8319 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8321 /* Optimize sin(x)/cos(x) as tan(x). */
8322 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8323 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8324 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8325 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8326 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8328 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8330 if (tanfn != NULL_TREE)
8331 return build_function_call_expr (tanfn,
8332 TREE_OPERAND (arg0, 1));
8335 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8336 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8337 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8338 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8339 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8340 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8342 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8344 if (tanfn != NULL_TREE)
8346 tree tmp = TREE_OPERAND (arg0, 1);
8347 tmp = build_function_call_expr (tanfn, tmp);
8348 return fold_build2 (RDIV_EXPR, type,
8349 build_real (type, dconst1), tmp);
8353 /* Optimize pow(x,c)/x as pow(x,c-1). */
8354 if (fcode0 == BUILT_IN_POW
8355 || fcode0 == BUILT_IN_POWF
8356 || fcode0 == BUILT_IN_POWL)
8358 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8359 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8360 if (TREE_CODE (arg01) == REAL_CST
8361 && ! TREE_CONSTANT_OVERFLOW (arg01)
8362 && operand_equal_p (arg1, arg00, 0))
8364 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8365 REAL_VALUE_TYPE c;
8366 tree arg, arglist;
8368 c = TREE_REAL_CST (arg01);
8369 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8370 arg = build_real (type, c);
8371 arglist = build_tree_list (NULL_TREE, arg);
8372 arglist = tree_cons (NULL_TREE, arg1, arglist);
8373 return build_function_call_expr (powfn, arglist);
8377 goto binary;
8379 case TRUNC_DIV_EXPR:
8380 case ROUND_DIV_EXPR:
8381 case FLOOR_DIV_EXPR:
8382 case CEIL_DIV_EXPR:
8383 case EXACT_DIV_EXPR:
8384 if (integer_onep (arg1))
8385 return non_lvalue (fold_convert (type, arg0));
8386 if (integer_zerop (arg1))
8387 return NULL_TREE;
8388 /* X / -1 is -X. */
8389 if (!TYPE_UNSIGNED (type)
8390 && TREE_CODE (arg1) == INTEGER_CST
8391 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8392 && TREE_INT_CST_HIGH (arg1) == -1)
8393 return fold_convert (type, negate_expr (arg0));
8395 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8396 operation, EXACT_DIV_EXPR.
8398 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8399 At one time others generated faster code, it's not clear if they do
8400 after the last round to changes to the DIV code in expmed.c. */
8401 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8402 && multiple_of_p (type, arg0, arg1))
8403 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8405 if (TREE_CODE (arg1) == INTEGER_CST
8406 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8407 return fold_convert (type, tem);
8409 goto binary;
8411 case CEIL_MOD_EXPR:
8412 case FLOOR_MOD_EXPR:
8413 case ROUND_MOD_EXPR:
8414 case TRUNC_MOD_EXPR:
8415 /* X % 1 is always zero, but be sure to preserve any side
8416 effects in X. */
8417 if (integer_onep (arg1))
8418 return omit_one_operand (type, integer_zero_node, arg0);
8420 /* X % 0, return X % 0 unchanged so that we can get the
8421 proper warnings and errors. */
8422 if (integer_zerop (arg1))
8423 return NULL_TREE;
8425 /* 0 % X is always zero, but be sure to preserve any side
8426 effects in X. Place this after checking for X == 0. */
8427 if (integer_zerop (arg0))
8428 return omit_one_operand (type, integer_zero_node, arg1);
8430 /* X % -1 is zero. */
8431 if (!TYPE_UNSIGNED (type)
8432 && TREE_CODE (arg1) == INTEGER_CST
8433 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8434 && TREE_INT_CST_HIGH (arg1) == -1)
8435 return omit_one_operand (type, integer_zero_node, arg0);
8437 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8438 i.e. "X % C" into "X & C2", if X and C are positive. */
8439 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8440 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8441 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8443 unsigned HOST_WIDE_INT high, low;
8444 tree mask;
8445 int l;
8447 l = tree_log2 (arg1);
8448 if (l >= HOST_BITS_PER_WIDE_INT)
8450 high = ((unsigned HOST_WIDE_INT) 1
8451 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8452 low = -1;
8454 else
8456 high = 0;
8457 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8460 mask = build_int_cst_wide (type, low, high);
8461 return fold_build2 (BIT_AND_EXPR, type,
8462 fold_convert (type, arg0), mask);
8465 /* X % -C is the same as X % C. */
8466 if (code == TRUNC_MOD_EXPR
8467 && !TYPE_UNSIGNED (type)
8468 && TREE_CODE (arg1) == INTEGER_CST
8469 && !TREE_CONSTANT_OVERFLOW (arg1)
8470 && TREE_INT_CST_HIGH (arg1) < 0
8471 && !flag_trapv
8472 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8473 && !sign_bit_p (arg1, arg1))
8474 return fold_build2 (code, type, fold_convert (type, arg0),
8475 fold_convert (type, negate_expr (arg1)));
8477 /* X % -Y is the same as X % Y. */
8478 if (code == TRUNC_MOD_EXPR
8479 && !TYPE_UNSIGNED (type)
8480 && TREE_CODE (arg1) == NEGATE_EXPR
8481 && !flag_trapv)
8482 return fold_build2 (code, type, fold_convert (type, arg0),
8483 fold_convert (type, TREE_OPERAND (arg1, 0)));
8485 if (TREE_CODE (arg1) == INTEGER_CST
8486 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8487 return fold_convert (type, tem);
8489 goto binary;
8491 case LROTATE_EXPR:
8492 case RROTATE_EXPR:
8493 if (integer_all_onesp (arg0))
8494 return omit_one_operand (type, arg0, arg1);
8495 goto shift;
8497 case RSHIFT_EXPR:
8498 /* Optimize -1 >> x for arithmetic right shifts. */
8499 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8500 return omit_one_operand (type, arg0, arg1);
8501 /* ... fall through ... */
8503 case LSHIFT_EXPR:
8504 shift:
8505 if (integer_zerop (arg1))
8506 return non_lvalue (fold_convert (type, arg0));
8507 if (integer_zerop (arg0))
8508 return omit_one_operand (type, arg0, arg1);
8510 /* Since negative shift count is not well-defined,
8511 don't try to compute it in the compiler. */
8512 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8513 return NULL_TREE;
8515 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8516 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
8517 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8518 && host_integerp (TREE_OPERAND (arg0, 1), false)
8519 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8521 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8522 + TREE_INT_CST_LOW (arg1));
8524 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8525 being well defined. */
8526 if (low >= TYPE_PRECISION (type))
8528 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8529 low = low % TYPE_PRECISION (type);
8530 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8531 return build_int_cst (type, 0);
8532 else
8533 low = TYPE_PRECISION (type) - 1;
8536 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8537 build_int_cst (type, low));
8540 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8541 into x & ((unsigned)-1 >> c) for unsigned types. */
8542 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8543 || (TYPE_UNSIGNED (type)
8544 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8545 && host_integerp (arg1, false)
8546 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8547 && host_integerp (TREE_OPERAND (arg0, 1), false)
8548 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8550 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8551 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8552 tree lshift;
8553 tree arg00;
8555 if (low0 == low1)
8557 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8559 lshift = build_int_cst (type, -1);
8560 lshift = int_const_binop (code, lshift, arg1, 0);
8562 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8566 /* Rewrite an LROTATE_EXPR by a constant into an
8567 RROTATE_EXPR by a new constant. */
8568 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8570 tree tem = build_int_cst (NULL_TREE,
8571 GET_MODE_BITSIZE (TYPE_MODE (type)));
8572 tem = fold_convert (TREE_TYPE (arg1), tem);
8573 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8574 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8577 /* If we have a rotate of a bit operation with the rotate count and
8578 the second operand of the bit operation both constant,
8579 permute the two operations. */
8580 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8581 && (TREE_CODE (arg0) == BIT_AND_EXPR
8582 || TREE_CODE (arg0) == BIT_IOR_EXPR
8583 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8584 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8585 return fold_build2 (TREE_CODE (arg0), type,
8586 fold_build2 (code, type,
8587 TREE_OPERAND (arg0, 0), arg1),
8588 fold_build2 (code, type,
8589 TREE_OPERAND (arg0, 1), arg1));
8591 /* Two consecutive rotates adding up to the width of the mode can
8592 be ignored. */
8593 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8594 && TREE_CODE (arg0) == RROTATE_EXPR
8595 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8596 && TREE_INT_CST_HIGH (arg1) == 0
8597 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8598 && ((TREE_INT_CST_LOW (arg1)
8599 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8600 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8601 return TREE_OPERAND (arg0, 0);
8603 goto binary;
8605 case MIN_EXPR:
8606 if (operand_equal_p (arg0, arg1, 0))
8607 return omit_one_operand (type, arg0, arg1);
8608 if (INTEGRAL_TYPE_P (type)
8609 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8610 return omit_one_operand (type, arg1, arg0);
8611 goto associate;
8613 case MAX_EXPR:
8614 if (operand_equal_p (arg0, arg1, 0))
8615 return omit_one_operand (type, arg0, arg1);
8616 if (INTEGRAL_TYPE_P (type)
8617 && TYPE_MAX_VALUE (type)
8618 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8619 return omit_one_operand (type, arg1, arg0);
8620 goto associate;
8622 case TRUTH_ANDIF_EXPR:
8623 /* Note that the operands of this must be ints
8624 and their values must be 0 or 1.
8625 ("true" is a fixed value perhaps depending on the language.) */
8626 /* If first arg is constant zero, return it. */
8627 if (integer_zerop (arg0))
8628 return fold_convert (type, arg0);
8629 case TRUTH_AND_EXPR:
8630 /* If either arg is constant true, drop it. */
8631 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8632 return non_lvalue (fold_convert (type, arg1));
8633 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8634 /* Preserve sequence points. */
8635 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8636 return non_lvalue (fold_convert (type, arg0));
8637 /* If second arg is constant zero, result is zero, but first arg
8638 must be evaluated. */
8639 if (integer_zerop (arg1))
8640 return omit_one_operand (type, arg1, arg0);
8641 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8642 case will be handled here. */
8643 if (integer_zerop (arg0))
8644 return omit_one_operand (type, arg0, arg1);
8646 /* !X && X is always false. */
8647 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8648 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8649 return omit_one_operand (type, integer_zero_node, arg1);
8650 /* X && !X is always false. */
8651 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8652 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8653 return omit_one_operand (type, integer_zero_node, arg0);
8655 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8656 means A >= Y && A != MAX, but in this case we know that
8657 A < X <= MAX. */
8659 if (!TREE_SIDE_EFFECTS (arg0)
8660 && !TREE_SIDE_EFFECTS (arg1))
8662 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8663 if (tem && !operand_equal_p (tem, arg0, 0))
8664 return fold_build2 (code, type, tem, arg1);
8666 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8667 if (tem && !operand_equal_p (tem, arg1, 0))
8668 return fold_build2 (code, type, arg0, tem);
8671 truth_andor:
8672 /* We only do these simplifications if we are optimizing. */
8673 if (!optimize)
8674 return NULL_TREE;
8676 /* Check for things like (A || B) && (A || C). We can convert this
8677 to A || (B && C). Note that either operator can be any of the four
8678 truth and/or operations and the transformation will still be
8679 valid. Also note that we only care about order for the
8680 ANDIF and ORIF operators. If B contains side effects, this
8681 might change the truth-value of A. */
8682 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8683 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8684 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8685 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8686 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8687 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8689 tree a00 = TREE_OPERAND (arg0, 0);
8690 tree a01 = TREE_OPERAND (arg0, 1);
8691 tree a10 = TREE_OPERAND (arg1, 0);
8692 tree a11 = TREE_OPERAND (arg1, 1);
8693 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8694 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8695 && (code == TRUTH_AND_EXPR
8696 || code == TRUTH_OR_EXPR));
8698 if (operand_equal_p (a00, a10, 0))
8699 return fold_build2 (TREE_CODE (arg0), type, a00,
8700 fold_build2 (code, type, a01, a11));
8701 else if (commutative && operand_equal_p (a00, a11, 0))
8702 return fold_build2 (TREE_CODE (arg0), type, a00,
8703 fold_build2 (code, type, a01, a10));
8704 else if (commutative && operand_equal_p (a01, a10, 0))
8705 return fold_build2 (TREE_CODE (arg0), type, a01,
8706 fold_build2 (code, type, a00, a11));
8708 /* This case if tricky because we must either have commutative
8709 operators or else A10 must not have side-effects. */
8711 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8712 && operand_equal_p (a01, a11, 0))
8713 return fold_build2 (TREE_CODE (arg0), type,
8714 fold_build2 (code, type, a00, a10),
8715 a01);
8718 /* See if we can build a range comparison. */
8719 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8720 return tem;
8722 /* Check for the possibility of merging component references. If our
8723 lhs is another similar operation, try to merge its rhs with our
8724 rhs. Then try to merge our lhs and rhs. */
8725 if (TREE_CODE (arg0) == code
8726 && 0 != (tem = fold_truthop (code, type,
8727 TREE_OPERAND (arg0, 1), arg1)))
8728 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8730 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8731 return tem;
8733 return NULL_TREE;
8735 case TRUTH_ORIF_EXPR:
8736 /* Note that the operands of this must be ints
8737 and their values must be 0 or true.
8738 ("true" is a fixed value perhaps depending on the language.) */
8739 /* If first arg is constant true, return it. */
8740 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8741 return fold_convert (type, arg0);
8742 case TRUTH_OR_EXPR:
8743 /* If either arg is constant zero, drop it. */
8744 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8745 return non_lvalue (fold_convert (type, arg1));
8746 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8747 /* Preserve sequence points. */
8748 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8749 return non_lvalue (fold_convert (type, arg0));
8750 /* If second arg is constant true, result is true, but we must
8751 evaluate first arg. */
8752 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8753 return omit_one_operand (type, arg1, arg0);
8754 /* Likewise for first arg, but note this only occurs here for
8755 TRUTH_OR_EXPR. */
8756 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8757 return omit_one_operand (type, arg0, arg1);
8759 /* !X || X is always true. */
8760 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8761 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8762 return omit_one_operand (type, integer_one_node, arg1);
8763 /* X || !X is always true. */
8764 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8765 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8766 return omit_one_operand (type, integer_one_node, arg0);
8768 goto truth_andor;
8770 case TRUTH_XOR_EXPR:
8771 /* If the second arg is constant zero, drop it. */
8772 if (integer_zerop (arg1))
8773 return non_lvalue (fold_convert (type, arg0));
8774 /* If the second arg is constant true, this is a logical inversion. */
8775 if (integer_onep (arg1))
8777 /* Only call invert_truthvalue if operand is a truth value. */
8778 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8779 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8780 else
8781 tem = invert_truthvalue (arg0);
8782 return non_lvalue (fold_convert (type, tem));
8784 /* Identical arguments cancel to zero. */
8785 if (operand_equal_p (arg0, arg1, 0))
8786 return omit_one_operand (type, integer_zero_node, arg0);
8788 /* !X ^ X is always true. */
8789 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8790 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8791 return omit_one_operand (type, integer_one_node, arg1);
8793 /* X ^ !X is always true. */
8794 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8795 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8796 return omit_one_operand (type, integer_one_node, arg0);
8798 return NULL_TREE;
8800 case EQ_EXPR:
8801 case NE_EXPR:
8802 case LT_EXPR:
8803 case GT_EXPR:
8804 case LE_EXPR:
8805 case GE_EXPR:
8806 /* If one arg is a real or integer constant, put it last. */
8807 if (tree_swap_operands_p (arg0, arg1, true))
8808 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8810 /* bool_var != 0 becomes bool_var. */
8811 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8812 && code == NE_EXPR)
8813 return non_lvalue (fold_convert (type, arg0));
8815 /* bool_var == 1 becomes bool_var. */
8816 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8817 && code == EQ_EXPR)
8818 return non_lvalue (fold_convert (type, arg0));
8820 /* If this is an equality comparison of the address of a non-weak
8821 object against zero, then we know the result. */
8822 if ((code == EQ_EXPR || code == NE_EXPR)
8823 && TREE_CODE (arg0) == ADDR_EXPR
8824 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8825 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8826 && integer_zerop (arg1))
8827 return constant_boolean_node (code != EQ_EXPR, type);
8829 /* If this is an equality comparison of the address of two non-weak,
8830 unaliased symbols neither of which are extern (since we do not
8831 have access to attributes for externs), then we know the result. */
8832 if ((code == EQ_EXPR || code == NE_EXPR)
8833 && TREE_CODE (arg0) == ADDR_EXPR
8834 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8835 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8836 && ! lookup_attribute ("alias",
8837 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8838 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8839 && TREE_CODE (arg1) == ADDR_EXPR
8840 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
8841 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8842 && ! lookup_attribute ("alias",
8843 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8844 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8846 /* We know that we're looking at the address of two
8847 non-weak, unaliased, static _DECL nodes.
8849 It is both wasteful and incorrect to call operand_equal_p
8850 to compare the two ADDR_EXPR nodes. It is wasteful in that
8851 all we need to do is test pointer equality for the arguments
8852 to the two ADDR_EXPR nodes. It is incorrect to use
8853 operand_equal_p as that function is NOT equivalent to a
8854 C equality test. It can in fact return false for two
8855 objects which would test as equal using the C equality
8856 operator. */
8857 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
8858 return constant_boolean_node (equal
8859 ? code == EQ_EXPR : code != EQ_EXPR,
8860 type);
8863 /* If this is a comparison of two exprs that look like an
8864 ARRAY_REF of the same object, then we can fold this to a
8865 comparison of the two offsets. */
8866 if (TREE_CODE_CLASS (code) == tcc_comparison)
8868 tree base0, offset0, base1, offset1;
8870 if (extract_array_ref (arg0, &base0, &offset0)
8871 && extract_array_ref (arg1, &base1, &offset1)
8872 && operand_equal_p (base0, base1, 0))
8874 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))
8875 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))))
8876 offset0 = NULL_TREE;
8877 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))
8878 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))))
8879 offset1 = NULL_TREE;
8880 if (offset0 == NULL_TREE
8881 && offset1 == NULL_TREE)
8883 offset0 = integer_zero_node;
8884 offset1 = integer_zero_node;
8886 else if (offset0 == NULL_TREE)
8887 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8888 else if (offset1 == NULL_TREE)
8889 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8891 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
8892 return fold_build2 (code, type, offset0, offset1);
8896 /* Transform comparisons of the form X +- C CMP X. */
8897 if ((code != EQ_EXPR && code != NE_EXPR)
8898 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8899 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8900 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8901 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
8902 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8903 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8904 && !(flag_wrapv || flag_trapv))))
8906 tree arg01 = TREE_OPERAND (arg0, 1);
8907 enum tree_code code0 = TREE_CODE (arg0);
8908 int is_positive;
8910 if (TREE_CODE (arg01) == REAL_CST)
8911 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
8912 else
8913 is_positive = tree_int_cst_sgn (arg01);
8915 /* (X - c) > X becomes false. */
8916 if (code == GT_EXPR
8917 && ((code0 == MINUS_EXPR && is_positive >= 0)
8918 || (code0 == PLUS_EXPR && is_positive <= 0)))
8919 return constant_boolean_node (0, type);
8921 /* Likewise (X + c) < X becomes false. */
8922 if (code == LT_EXPR
8923 && ((code0 == PLUS_EXPR && is_positive >= 0)
8924 || (code0 == MINUS_EXPR && is_positive <= 0)))
8925 return constant_boolean_node (0, type);
8927 /* Convert (X - c) <= X to true. */
8928 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8929 && code == LE_EXPR
8930 && ((code0 == MINUS_EXPR && is_positive >= 0)
8931 || (code0 == PLUS_EXPR && is_positive <= 0)))
8932 return constant_boolean_node (1, type);
8934 /* Convert (X + c) >= X to true. */
8935 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8936 && code == GE_EXPR
8937 && ((code0 == PLUS_EXPR && is_positive >= 0)
8938 || (code0 == MINUS_EXPR && is_positive <= 0)))
8939 return constant_boolean_node (1, type);
8941 if (TREE_CODE (arg01) == INTEGER_CST)
8943 /* Convert X + c > X and X - c < X to true for integers. */
8944 if (code == GT_EXPR
8945 && ((code0 == PLUS_EXPR && is_positive > 0)
8946 || (code0 == MINUS_EXPR && is_positive < 0)))
8947 return constant_boolean_node (1, type);
8949 if (code == LT_EXPR
8950 && ((code0 == MINUS_EXPR && is_positive > 0)
8951 || (code0 == PLUS_EXPR && is_positive < 0)))
8952 return constant_boolean_node (1, type);
8954 /* Convert X + c <= X and X - c >= X to false for integers. */
8955 if (code == LE_EXPR
8956 && ((code0 == PLUS_EXPR && is_positive > 0)
8957 || (code0 == MINUS_EXPR && is_positive < 0)))
8958 return constant_boolean_node (0, type);
8960 if (code == GE_EXPR
8961 && ((code0 == MINUS_EXPR && is_positive > 0)
8962 || (code0 == PLUS_EXPR && is_positive < 0)))
8963 return constant_boolean_node (0, type);
8967 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8968 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8969 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8970 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8971 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8972 && !(flag_wrapv || flag_trapv))
8973 && (TREE_CODE (arg1) == INTEGER_CST
8974 && !TREE_OVERFLOW (arg1)))
8976 tree const1 = TREE_OPERAND (arg0, 1);
8977 tree const2 = arg1;
8978 tree variable = TREE_OPERAND (arg0, 0);
8979 tree lhs;
8980 int lhs_add;
8981 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8983 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8984 TREE_TYPE (arg1), const2, const1);
8985 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8986 && (TREE_CODE (lhs) != INTEGER_CST
8987 || !TREE_OVERFLOW (lhs)))
8988 return fold_build2 (code, type, variable, lhs);
8991 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8993 tree targ0 = strip_float_extensions (arg0);
8994 tree targ1 = strip_float_extensions (arg1);
8995 tree newtype = TREE_TYPE (targ0);
8997 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8998 newtype = TREE_TYPE (targ1);
9000 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9001 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9002 return fold_build2 (code, type, fold_convert (newtype, targ0),
9003 fold_convert (newtype, targ1));
9005 /* (-a) CMP (-b) -> b CMP a */
9006 if (TREE_CODE (arg0) == NEGATE_EXPR
9007 && TREE_CODE (arg1) == NEGATE_EXPR)
9008 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9009 TREE_OPERAND (arg0, 0));
9011 if (TREE_CODE (arg1) == REAL_CST)
9013 REAL_VALUE_TYPE cst;
9014 cst = TREE_REAL_CST (arg1);
9016 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9017 if (TREE_CODE (arg0) == NEGATE_EXPR)
9018 return
9019 fold_build2 (swap_tree_comparison (code), type,
9020 TREE_OPERAND (arg0, 0),
9021 build_real (TREE_TYPE (arg1),
9022 REAL_VALUE_NEGATE (cst)));
9024 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9025 /* a CMP (-0) -> a CMP 0 */
9026 if (REAL_VALUE_MINUS_ZERO (cst))
9027 return fold_build2 (code, type, arg0,
9028 build_real (TREE_TYPE (arg1), dconst0));
9030 /* x != NaN is always true, other ops are always false. */
9031 if (REAL_VALUE_ISNAN (cst)
9032 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9034 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9035 return omit_one_operand (type, tem, arg0);
9038 /* Fold comparisons against infinity. */
9039 if (REAL_VALUE_ISINF (cst))
9041 tem = fold_inf_compare (code, type, arg0, arg1);
9042 if (tem != NULL_TREE)
9043 return tem;
9047 /* If this is a comparison of a real constant with a PLUS_EXPR
9048 or a MINUS_EXPR of a real constant, we can convert it into a
9049 comparison with a revised real constant as long as no overflow
9050 occurs when unsafe_math_optimizations are enabled. */
9051 if (flag_unsafe_math_optimizations
9052 && TREE_CODE (arg1) == REAL_CST
9053 && (TREE_CODE (arg0) == PLUS_EXPR
9054 || TREE_CODE (arg0) == MINUS_EXPR)
9055 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9056 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9057 ? MINUS_EXPR : PLUS_EXPR,
9058 arg1, TREE_OPERAND (arg0, 1), 0))
9059 && ! TREE_CONSTANT_OVERFLOW (tem))
9060 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9062 /* Likewise, we can simplify a comparison of a real constant with
9063 a MINUS_EXPR whose first operand is also a real constant, i.e.
9064 (c1 - x) < c2 becomes x > c1-c2. */
9065 if (flag_unsafe_math_optimizations
9066 && TREE_CODE (arg1) == REAL_CST
9067 && TREE_CODE (arg0) == MINUS_EXPR
9068 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9069 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9070 arg1, 0))
9071 && ! TREE_CONSTANT_OVERFLOW (tem))
9072 return fold_build2 (swap_tree_comparison (code), type,
9073 TREE_OPERAND (arg0, 1), tem);
9075 /* Fold comparisons against built-in math functions. */
9076 if (TREE_CODE (arg1) == REAL_CST
9077 && flag_unsafe_math_optimizations
9078 && ! flag_errno_math)
9080 enum built_in_function fcode = builtin_mathfn_code (arg0);
9082 if (fcode != END_BUILTINS)
9084 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9085 if (tem != NULL_TREE)
9086 return tem;
9091 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9092 if (TREE_CONSTANT (arg1)
9093 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9094 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9095 /* This optimization is invalid for ordered comparisons
9096 if CONST+INCR overflows or if foo+incr might overflow.
9097 This optimization is invalid for floating point due to rounding.
9098 For pointer types we assume overflow doesn't happen. */
9099 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9100 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9101 && (code == EQ_EXPR || code == NE_EXPR))))
9103 tree varop, newconst;
9105 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9107 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9108 arg1, TREE_OPERAND (arg0, 1));
9109 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9110 TREE_OPERAND (arg0, 0),
9111 TREE_OPERAND (arg0, 1));
9113 else
9115 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9116 arg1, TREE_OPERAND (arg0, 1));
9117 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9118 TREE_OPERAND (arg0, 0),
9119 TREE_OPERAND (arg0, 1));
9123 /* If VAROP is a reference to a bitfield, we must mask
9124 the constant by the width of the field. */
9125 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9126 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9127 && host_integerp (DECL_SIZE (TREE_OPERAND
9128 (TREE_OPERAND (varop, 0), 1)), 1))
9130 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9131 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9132 tree folded_compare, shift;
9134 /* First check whether the comparison would come out
9135 always the same. If we don't do that we would
9136 change the meaning with the masking. */
9137 folded_compare = fold_build2 (code, type,
9138 TREE_OPERAND (varop, 0), arg1);
9139 if (integer_zerop (folded_compare)
9140 || integer_onep (folded_compare))
9141 return omit_one_operand (type, folded_compare, varop);
9143 shift = build_int_cst (NULL_TREE,
9144 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9145 shift = fold_convert (TREE_TYPE (varop), shift);
9146 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9147 newconst, shift);
9148 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9149 newconst, shift);
9152 return fold_build2 (code, type, varop, newconst);
9155 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9156 This transformation affects the cases which are handled in later
9157 optimizations involving comparisons with non-negative constants. */
9158 if (TREE_CODE (arg1) == INTEGER_CST
9159 && TREE_CODE (arg0) != INTEGER_CST
9160 && tree_int_cst_sgn (arg1) > 0)
9162 switch (code)
9164 case GE_EXPR:
9165 arg1 = const_binop (MINUS_EXPR, arg1,
9166 build_int_cst (TREE_TYPE (arg1), 1), 0);
9167 return fold_build2 (GT_EXPR, type, arg0,
9168 fold_convert (TREE_TYPE (arg0), arg1));
9170 case LT_EXPR:
9171 arg1 = const_binop (MINUS_EXPR, arg1,
9172 build_int_cst (TREE_TYPE (arg1), 1), 0);
9173 return fold_build2 (LE_EXPR, type, arg0,
9174 fold_convert (TREE_TYPE (arg0), arg1));
9176 default:
9177 break;
9181 /* Comparisons with the highest or lowest possible integer of
9182 the specified size will have known values. */
9184 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9186 if (TREE_CODE (arg1) == INTEGER_CST
9187 && ! TREE_CONSTANT_OVERFLOW (arg1)
9188 && width <= 2 * HOST_BITS_PER_WIDE_INT
9189 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9190 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9192 HOST_WIDE_INT signed_max_hi;
9193 unsigned HOST_WIDE_INT signed_max_lo;
9194 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9196 if (width <= HOST_BITS_PER_WIDE_INT)
9198 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9199 - 1;
9200 signed_max_hi = 0;
9201 max_hi = 0;
9203 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9205 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9206 min_lo = 0;
9207 min_hi = 0;
9209 else
9211 max_lo = signed_max_lo;
9212 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9213 min_hi = -1;
9216 else
9218 width -= HOST_BITS_PER_WIDE_INT;
9219 signed_max_lo = -1;
9220 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9221 - 1;
9222 max_lo = -1;
9223 min_lo = 0;
9225 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9227 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9228 min_hi = 0;
9230 else
9232 max_hi = signed_max_hi;
9233 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9237 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9238 && TREE_INT_CST_LOW (arg1) == max_lo)
9239 switch (code)
9241 case GT_EXPR:
9242 return omit_one_operand (type, integer_zero_node, arg0);
9244 case GE_EXPR:
9245 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9247 case LE_EXPR:
9248 return omit_one_operand (type, integer_one_node, arg0);
9250 case LT_EXPR:
9251 return fold_build2 (NE_EXPR, type, arg0, arg1);
9253 /* The GE_EXPR and LT_EXPR cases above are not normally
9254 reached because of previous transformations. */
9256 default:
9257 break;
9259 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9260 == max_hi
9261 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9262 switch (code)
9264 case GT_EXPR:
9265 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9266 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9267 case LE_EXPR:
9268 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9269 return fold_build2 (NE_EXPR, type, arg0, arg1);
9270 default:
9271 break;
9273 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9274 == min_hi
9275 && TREE_INT_CST_LOW (arg1) == min_lo)
9276 switch (code)
9278 case LT_EXPR:
9279 return omit_one_operand (type, integer_zero_node, arg0);
9281 case LE_EXPR:
9282 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9284 case GE_EXPR:
9285 return omit_one_operand (type, integer_one_node, arg0);
9287 case GT_EXPR:
9288 return fold_build2 (NE_EXPR, type, op0, op1);
9290 default:
9291 break;
9293 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9294 == min_hi
9295 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9296 switch (code)
9298 case GE_EXPR:
9299 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9300 return fold_build2 (NE_EXPR, type, arg0, arg1);
9301 case LT_EXPR:
9302 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9303 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9304 default:
9305 break;
9308 else if (!in_gimple_form
9309 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9310 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9311 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9312 /* signed_type does not work on pointer types. */
9313 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9315 /* The following case also applies to X < signed_max+1
9316 and X >= signed_max+1 because previous transformations. */
9317 if (code == LE_EXPR || code == GT_EXPR)
9319 tree st0, st1;
9320 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9321 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9322 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9323 type, fold_convert (st0, arg0),
9324 build_int_cst (st1, 0));
9330 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9331 a MINUS_EXPR of a constant, we can convert it into a comparison with
9332 a revised constant as long as no overflow occurs. */
9333 if ((code == EQ_EXPR || code == NE_EXPR)
9334 && TREE_CODE (arg1) == INTEGER_CST
9335 && (TREE_CODE (arg0) == PLUS_EXPR
9336 || TREE_CODE (arg0) == MINUS_EXPR)
9337 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9338 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9339 ? MINUS_EXPR : PLUS_EXPR,
9340 arg1, TREE_OPERAND (arg0, 1), 0))
9341 && ! TREE_CONSTANT_OVERFLOW (tem))
9342 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9344 /* Similarly for a NEGATE_EXPR. */
9345 else if ((code == EQ_EXPR || code == NE_EXPR)
9346 && TREE_CODE (arg0) == NEGATE_EXPR
9347 && TREE_CODE (arg1) == INTEGER_CST
9348 && 0 != (tem = negate_expr (arg1))
9349 && TREE_CODE (tem) == INTEGER_CST
9350 && ! TREE_CONSTANT_OVERFLOW (tem))
9351 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9353 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9354 for !=. Don't do this for ordered comparisons due to overflow. */
9355 else if ((code == NE_EXPR || code == EQ_EXPR)
9356 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9357 return fold_build2 (code, type,
9358 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9360 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9361 && (TREE_CODE (arg0) == NOP_EXPR
9362 || TREE_CODE (arg0) == CONVERT_EXPR))
9364 /* If we are widening one operand of an integer comparison,
9365 see if the other operand is similarly being widened. Perhaps we
9366 can do the comparison in the narrower type. */
9367 tem = fold_widened_comparison (code, type, arg0, arg1);
9368 if (tem)
9369 return tem;
9371 /* Or if we are changing signedness. */
9372 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9373 if (tem)
9374 return tem;
9377 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9378 constant, we can simplify it. */
9379 else if (TREE_CODE (arg1) == INTEGER_CST
9380 && (TREE_CODE (arg0) == MIN_EXPR
9381 || TREE_CODE (arg0) == MAX_EXPR)
9382 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9384 tem = optimize_minmax_comparison (code, type, op0, op1);
9385 if (tem)
9386 return tem;
9388 return NULL_TREE;
9391 /* If we are comparing an ABS_EXPR with a constant, we can
9392 convert all the cases into explicit comparisons, but they may
9393 well not be faster than doing the ABS and one comparison.
9394 But ABS (X) <= C is a range comparison, which becomes a subtraction
9395 and a comparison, and is probably faster. */
9396 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9397 && TREE_CODE (arg0) == ABS_EXPR
9398 && ! TREE_SIDE_EFFECTS (arg0)
9399 && (0 != (tem = negate_expr (arg1)))
9400 && TREE_CODE (tem) == INTEGER_CST
9401 && ! TREE_CONSTANT_OVERFLOW (tem))
9402 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9403 build2 (GE_EXPR, type,
9404 TREE_OPERAND (arg0, 0), tem),
9405 build2 (LE_EXPR, type,
9406 TREE_OPERAND (arg0, 0), arg1));
9408 /* Convert ABS_EXPR<x> >= 0 to true. */
9409 else if (code == GE_EXPR
9410 && tree_expr_nonnegative_p (arg0)
9411 && (integer_zerop (arg1)
9412 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9413 && real_zerop (arg1))))
9414 return omit_one_operand (type, integer_one_node, arg0);
9416 /* Convert ABS_EXPR<x> < 0 to false. */
9417 else if (code == LT_EXPR
9418 && tree_expr_nonnegative_p (arg0)
9419 && (integer_zerop (arg1) || real_zerop (arg1)))
9420 return omit_one_operand (type, integer_zero_node, arg0);
9422 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9423 else if ((code == EQ_EXPR || code == NE_EXPR)
9424 && TREE_CODE (arg0) == ABS_EXPR
9425 && (integer_zerop (arg1) || real_zerop (arg1)))
9426 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9428 /* If this is an EQ or NE comparison with zero and ARG0 is
9429 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9430 two operations, but the latter can be done in one less insn
9431 on machines that have only two-operand insns or on which a
9432 constant cannot be the first operand. */
9433 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9434 && TREE_CODE (arg0) == BIT_AND_EXPR)
9436 tree arg00 = TREE_OPERAND (arg0, 0);
9437 tree arg01 = TREE_OPERAND (arg0, 1);
9438 if (TREE_CODE (arg00) == LSHIFT_EXPR
9439 && integer_onep (TREE_OPERAND (arg00, 0)))
9440 return
9441 fold_build2 (code, type,
9442 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9443 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9444 arg01, TREE_OPERAND (arg00, 1)),
9445 fold_convert (TREE_TYPE (arg0),
9446 integer_one_node)),
9447 arg1);
9448 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9449 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9450 return
9451 fold_build2 (code, type,
9452 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9453 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9454 arg00, TREE_OPERAND (arg01, 1)),
9455 fold_convert (TREE_TYPE (arg0),
9456 integer_one_node)),
9457 arg1);
9460 /* If this is an NE or EQ comparison of zero against the result of a
9461 signed MOD operation whose second operand is a power of 2, make
9462 the MOD operation unsigned since it is simpler and equivalent. */
9463 if ((code == NE_EXPR || code == EQ_EXPR)
9464 && integer_zerop (arg1)
9465 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9466 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9467 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9468 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9469 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9470 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9472 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9473 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9474 fold_convert (newtype,
9475 TREE_OPERAND (arg0, 0)),
9476 fold_convert (newtype,
9477 TREE_OPERAND (arg0, 1)));
9479 return fold_build2 (code, type, newmod,
9480 fold_convert (newtype, arg1));
9483 /* If this is an NE comparison of zero with an AND of one, remove the
9484 comparison since the AND will give the correct value. */
9485 if (code == NE_EXPR && integer_zerop (arg1)
9486 && TREE_CODE (arg0) == BIT_AND_EXPR
9487 && integer_onep (TREE_OPERAND (arg0, 1)))
9488 return fold_convert (type, arg0);
9490 /* If we have (A & C) == C where C is a power of 2, convert this into
9491 (A & C) != 0. Similarly for NE_EXPR. */
9492 if ((code == EQ_EXPR || code == NE_EXPR)
9493 && TREE_CODE (arg0) == BIT_AND_EXPR
9494 && integer_pow2p (TREE_OPERAND (arg0, 1))
9495 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9496 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9497 arg0, fold_convert (TREE_TYPE (arg0),
9498 integer_zero_node));
9500 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9501 bit, then fold the expression into A < 0 or A >= 0. */
9502 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9503 if (tem)
9504 return tem;
9506 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9507 Similarly for NE_EXPR. */
9508 if ((code == EQ_EXPR || code == NE_EXPR)
9509 && TREE_CODE (arg0) == BIT_AND_EXPR
9510 && TREE_CODE (arg1) == INTEGER_CST
9511 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9513 tree notc = fold_build1 (BIT_NOT_EXPR,
9514 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9515 TREE_OPERAND (arg0, 1));
9516 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9517 arg1, notc);
9518 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9519 if (integer_nonzerop (dandnotc))
9520 return omit_one_operand (type, rslt, arg0);
9523 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9524 Similarly for NE_EXPR. */
9525 if ((code == EQ_EXPR || code == NE_EXPR)
9526 && TREE_CODE (arg0) == BIT_IOR_EXPR
9527 && TREE_CODE (arg1) == INTEGER_CST
9528 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9530 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9531 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9532 TREE_OPERAND (arg0, 1), notd);
9533 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9534 if (integer_nonzerop (candnotd))
9535 return omit_one_operand (type, rslt, arg0);
9538 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9539 and similarly for >= into !=. */
9540 if ((code == LT_EXPR || code == GE_EXPR)
9541 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9542 && TREE_CODE (arg1) == LSHIFT_EXPR
9543 && integer_onep (TREE_OPERAND (arg1, 0)))
9544 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9545 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9546 TREE_OPERAND (arg1, 1)),
9547 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9549 else if ((code == LT_EXPR || code == GE_EXPR)
9550 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9551 && (TREE_CODE (arg1) == NOP_EXPR
9552 || TREE_CODE (arg1) == CONVERT_EXPR)
9553 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9554 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9555 return
9556 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9557 fold_convert (TREE_TYPE (arg0),
9558 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9559 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9560 1))),
9561 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9563 /* Simplify comparison of something with itself. (For IEEE
9564 floating-point, we can only do some of these simplifications.) */
9565 if (operand_equal_p (arg0, arg1, 0))
9567 switch (code)
9569 case EQ_EXPR:
9570 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9571 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9572 return constant_boolean_node (1, type);
9573 break;
9575 case GE_EXPR:
9576 case LE_EXPR:
9577 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9578 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9579 return constant_boolean_node (1, type);
9580 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9582 case NE_EXPR:
9583 /* For NE, we can only do this simplification if integer
9584 or we don't honor IEEE floating point NaNs. */
9585 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9586 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9587 break;
9588 /* ... fall through ... */
9589 case GT_EXPR:
9590 case LT_EXPR:
9591 return constant_boolean_node (0, type);
9592 default:
9593 gcc_unreachable ();
9597 /* If we are comparing an expression that just has comparisons
9598 of two integer values, arithmetic expressions of those comparisons,
9599 and constants, we can simplify it. There are only three cases
9600 to check: the two values can either be equal, the first can be
9601 greater, or the second can be greater. Fold the expression for
9602 those three values. Since each value must be 0 or 1, we have
9603 eight possibilities, each of which corresponds to the constant 0
9604 or 1 or one of the six possible comparisons.
9606 This handles common cases like (a > b) == 0 but also handles
9607 expressions like ((x > y) - (y > x)) > 0, which supposedly
9608 occur in macroized code. */
9610 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9612 tree cval1 = 0, cval2 = 0;
9613 int save_p = 0;
9615 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9616 /* Don't handle degenerate cases here; they should already
9617 have been handled anyway. */
9618 && cval1 != 0 && cval2 != 0
9619 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9620 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9621 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9622 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9623 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9624 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9625 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9627 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9628 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9630 /* We can't just pass T to eval_subst in case cval1 or cval2
9631 was the same as ARG1. */
9633 tree high_result
9634 = fold_build2 (code, type,
9635 eval_subst (arg0, cval1, maxval,
9636 cval2, minval),
9637 arg1);
9638 tree equal_result
9639 = fold_build2 (code, type,
9640 eval_subst (arg0, cval1, maxval,
9641 cval2, maxval),
9642 arg1);
9643 tree low_result
9644 = fold_build2 (code, type,
9645 eval_subst (arg0, cval1, minval,
9646 cval2, maxval),
9647 arg1);
9649 /* All three of these results should be 0 or 1. Confirm they
9650 are. Then use those values to select the proper code
9651 to use. */
9653 if ((integer_zerop (high_result)
9654 || integer_onep (high_result))
9655 && (integer_zerop (equal_result)
9656 || integer_onep (equal_result))
9657 && (integer_zerop (low_result)
9658 || integer_onep (low_result)))
9660 /* Make a 3-bit mask with the high-order bit being the
9661 value for `>', the next for '=', and the low for '<'. */
9662 switch ((integer_onep (high_result) * 4)
9663 + (integer_onep (equal_result) * 2)
9664 + integer_onep (low_result))
9666 case 0:
9667 /* Always false. */
9668 return omit_one_operand (type, integer_zero_node, arg0);
9669 case 1:
9670 code = LT_EXPR;
9671 break;
9672 case 2:
9673 code = EQ_EXPR;
9674 break;
9675 case 3:
9676 code = LE_EXPR;
9677 break;
9678 case 4:
9679 code = GT_EXPR;
9680 break;
9681 case 5:
9682 code = NE_EXPR;
9683 break;
9684 case 6:
9685 code = GE_EXPR;
9686 break;
9687 case 7:
9688 /* Always true. */
9689 return omit_one_operand (type, integer_one_node, arg0);
9692 if (save_p)
9693 return save_expr (build2 (code, type, cval1, cval2));
9694 else
9695 return fold_build2 (code, type, cval1, cval2);
9700 /* If this is a comparison of a field, we may be able to simplify it. */
9701 if (((TREE_CODE (arg0) == COMPONENT_REF
9702 && lang_hooks.can_use_bit_fields_p ())
9703 || TREE_CODE (arg0) == BIT_FIELD_REF)
9704 && (code == EQ_EXPR || code == NE_EXPR)
9705 /* Handle the constant case even without -O
9706 to make sure the warnings are given. */
9707 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9709 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9710 if (t1)
9711 return t1;
9714 /* Fold a comparison of the address of COMPONENT_REFs with the same
9715 type and component to a comparison of the address of the base
9716 object. In short, &x->a OP &y->a to x OP y and
9717 &x->a OP &y.a to x OP &y */
9718 if (TREE_CODE (arg0) == ADDR_EXPR
9719 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9720 && TREE_CODE (arg1) == ADDR_EXPR
9721 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9723 tree cref0 = TREE_OPERAND (arg0, 0);
9724 tree cref1 = TREE_OPERAND (arg1, 0);
9725 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9727 tree op0 = TREE_OPERAND (cref0, 0);
9728 tree op1 = TREE_OPERAND (cref1, 0);
9729 return fold_build2 (code, type,
9730 build_fold_addr_expr (op0),
9731 build_fold_addr_expr (op1));
9735 /* Optimize comparisons of strlen vs zero to a compare of the
9736 first character of the string vs zero. To wit,
9737 strlen(ptr) == 0 => *ptr == 0
9738 strlen(ptr) != 0 => *ptr != 0
9739 Other cases should reduce to one of these two (or a constant)
9740 due to the return value of strlen being unsigned. */
9741 if ((code == EQ_EXPR || code == NE_EXPR)
9742 && integer_zerop (arg1)
9743 && TREE_CODE (arg0) == CALL_EXPR)
9745 tree fndecl = get_callee_fndecl (arg0);
9746 tree arglist;
9748 if (fndecl
9749 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9750 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9751 && (arglist = TREE_OPERAND (arg0, 1))
9752 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9753 && ! TREE_CHAIN (arglist))
9755 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9756 return fold_build2 (code, type, iref,
9757 build_int_cst (TREE_TYPE (iref), 0));
9761 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9762 into a single range test. */
9763 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9764 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9765 && TREE_CODE (arg1) == INTEGER_CST
9766 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9767 && !integer_zerop (TREE_OPERAND (arg0, 1))
9768 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9769 && !TREE_OVERFLOW (arg1))
9771 t1 = fold_div_compare (code, type, arg0, arg1);
9772 if (t1 != NULL_TREE)
9773 return t1;
9776 if ((code == EQ_EXPR || code == NE_EXPR)
9777 && integer_zerop (arg1)
9778 && tree_expr_nonzero_p (arg0))
9780 tree res = constant_boolean_node (code==NE_EXPR, type);
9781 return omit_one_operand (type, res, arg0);
9784 t1 = fold_relational_const (code, type, arg0, arg1);
9785 return t1 == NULL_TREE ? NULL_TREE : t1;
9787 case UNORDERED_EXPR:
9788 case ORDERED_EXPR:
9789 case UNLT_EXPR:
9790 case UNLE_EXPR:
9791 case UNGT_EXPR:
9792 case UNGE_EXPR:
9793 case UNEQ_EXPR:
9794 case LTGT_EXPR:
9795 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9797 t1 = fold_relational_const (code, type, arg0, arg1);
9798 if (t1 != NULL_TREE)
9799 return t1;
9802 /* If the first operand is NaN, the result is constant. */
9803 if (TREE_CODE (arg0) == REAL_CST
9804 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9805 && (code != LTGT_EXPR || ! flag_trapping_math))
9807 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9808 ? integer_zero_node
9809 : integer_one_node;
9810 return omit_one_operand (type, t1, arg1);
9813 /* If the second operand is NaN, the result is constant. */
9814 if (TREE_CODE (arg1) == REAL_CST
9815 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9816 && (code != LTGT_EXPR || ! flag_trapping_math))
9818 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9819 ? integer_zero_node
9820 : integer_one_node;
9821 return omit_one_operand (type, t1, arg0);
9824 /* Simplify unordered comparison of something with itself. */
9825 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9826 && operand_equal_p (arg0, arg1, 0))
9827 return constant_boolean_node (1, type);
9829 if (code == LTGT_EXPR
9830 && !flag_trapping_math
9831 && operand_equal_p (arg0, arg1, 0))
9832 return constant_boolean_node (0, type);
9834 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9836 tree targ0 = strip_float_extensions (arg0);
9837 tree targ1 = strip_float_extensions (arg1);
9838 tree newtype = TREE_TYPE (targ0);
9840 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9841 newtype = TREE_TYPE (targ1);
9843 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9844 return fold_build2 (code, type, fold_convert (newtype, targ0),
9845 fold_convert (newtype, targ1));
9848 return NULL_TREE;
9850 case COMPOUND_EXPR:
9851 /* When pedantic, a compound expression can be neither an lvalue
9852 nor an integer constant expression. */
9853 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9854 return NULL_TREE;
9855 /* Don't let (0, 0) be null pointer constant. */
9856 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9857 : fold_convert (type, arg1);
9858 return pedantic_non_lvalue (tem);
9860 case COMPLEX_EXPR:
9861 if (wins)
9862 return build_complex (type, arg0, arg1);
9863 return NULL_TREE;
9865 case ASSERT_EXPR:
9866 /* An ASSERT_EXPR should never be passed to fold_binary. */
9867 gcc_unreachable ();
9869 default:
9870 return NULL_TREE;
9871 } /* switch (code) */
9874 /* Callback for walk_tree, looking for LABEL_EXPR.
9875 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
9876 Do not check the sub-tree of GOTO_EXPR. */
9878 static tree
9879 contains_label_1 (tree *tp,
9880 int *walk_subtrees,
9881 void *data ATTRIBUTE_UNUSED)
9883 switch (TREE_CODE (*tp))
9885 case LABEL_EXPR:
9886 return *tp;
9887 case GOTO_EXPR:
9888 *walk_subtrees = 0;
9889 /* no break */
9890 default:
9891 return NULL_TREE;
9895 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
9896 accessible from outside the sub-tree. Returns NULL_TREE if no
9897 addressable label is found. */
9899 static bool
9900 contains_label_p (tree st)
9902 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
9905 /* Fold a ternary expression of code CODE and type TYPE with operands
9906 OP0, OP1, and OP2. Return the folded expression if folding is
9907 successful. Otherwise, return NULL_TREE. */
9909 tree
9910 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
9912 tree tem;
9913 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9914 enum tree_code_class kind = TREE_CODE_CLASS (code);
9916 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9917 && TREE_CODE_LENGTH (code) == 3);
9919 /* Strip any conversions that don't change the mode. This is safe
9920 for every expression, except for a comparison expression because
9921 its signedness is derived from its operands. So, in the latter
9922 case, only strip conversions that don't change the signedness.
9924 Note that this is done as an internal manipulation within the
9925 constant folder, in order to find the simplest representation of
9926 the arguments so that their form can be studied. In any cases,
9927 the appropriate type conversions should be put back in the tree
9928 that will get out of the constant folder. */
9929 if (op0)
9931 arg0 = op0;
9932 STRIP_NOPS (arg0);
9935 if (op1)
9937 arg1 = op1;
9938 STRIP_NOPS (arg1);
9941 switch (code)
9943 case COMPONENT_REF:
9944 if (TREE_CODE (arg0) == CONSTRUCTOR
9945 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
9947 unsigned HOST_WIDE_INT idx;
9948 tree field, value;
9949 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
9950 if (field == arg1)
9951 return value;
9953 return NULL_TREE;
9955 case COND_EXPR:
9956 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9957 so all simple results must be passed through pedantic_non_lvalue. */
9958 if (TREE_CODE (arg0) == INTEGER_CST)
9960 tree unused_op = integer_zerop (arg0) ? op1 : op2;
9961 tem = integer_zerop (arg0) ? op2 : op1;
9962 /* Only optimize constant conditions when the selected branch
9963 has the same type as the COND_EXPR. This avoids optimizing
9964 away "c ? x : throw", where the throw has a void type.
9965 Avoid throwing away that operand which contains label. */
9966 if ((!TREE_SIDE_EFFECTS (unused_op)
9967 || !contains_label_p (unused_op))
9968 && (! VOID_TYPE_P (TREE_TYPE (tem))
9969 || VOID_TYPE_P (type)))
9970 return pedantic_non_lvalue (tem);
9971 return NULL_TREE;
9973 if (operand_equal_p (arg1, op2, 0))
9974 return pedantic_omit_one_operand (type, arg1, arg0);
9976 /* If we have A op B ? A : C, we may be able to convert this to a
9977 simpler expression, depending on the operation and the values
9978 of B and C. Signed zeros prevent all of these transformations,
9979 for reasons given above each one.
9981 Also try swapping the arguments and inverting the conditional. */
9982 if (COMPARISON_CLASS_P (arg0)
9983 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9984 arg1, TREE_OPERAND (arg0, 1))
9985 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9987 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
9988 if (tem)
9989 return tem;
9992 if (COMPARISON_CLASS_P (arg0)
9993 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9994 op2,
9995 TREE_OPERAND (arg0, 1))
9996 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
9998 tem = invert_truthvalue (arg0);
9999 if (COMPARISON_CLASS_P (tem))
10001 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10002 if (tem)
10003 return tem;
10007 /* If the second operand is simpler than the third, swap them
10008 since that produces better jump optimization results. */
10009 if (truth_value_p (TREE_CODE (arg0))
10010 && tree_swap_operands_p (op1, op2, false))
10012 /* See if this can be inverted. If it can't, possibly because
10013 it was a floating-point inequality comparison, don't do
10014 anything. */
10015 tem = invert_truthvalue (arg0);
10017 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10018 return fold_build3 (code, type, tem, op2, op1);
10021 /* Convert A ? 1 : 0 to simply A. */
10022 if (integer_onep (op1)
10023 && integer_zerop (op2)
10024 /* If we try to convert OP0 to our type, the
10025 call to fold will try to move the conversion inside
10026 a COND, which will recurse. In that case, the COND_EXPR
10027 is probably the best choice, so leave it alone. */
10028 && type == TREE_TYPE (arg0))
10029 return pedantic_non_lvalue (arg0);
10031 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10032 over COND_EXPR in cases such as floating point comparisons. */
10033 if (integer_zerop (op1)
10034 && integer_onep (op2)
10035 && truth_value_p (TREE_CODE (arg0)))
10036 return pedantic_non_lvalue (fold_convert (type,
10037 invert_truthvalue (arg0)));
10039 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10040 if (TREE_CODE (arg0) == LT_EXPR
10041 && integer_zerop (TREE_OPERAND (arg0, 1))
10042 && integer_zerop (op2)
10043 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10044 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10045 TREE_TYPE (tem), tem, arg1));
10047 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10048 already handled above. */
10049 if (TREE_CODE (arg0) == BIT_AND_EXPR
10050 && integer_onep (TREE_OPERAND (arg0, 1))
10051 && integer_zerop (op2)
10052 && integer_pow2p (arg1))
10054 tree tem = TREE_OPERAND (arg0, 0);
10055 STRIP_NOPS (tem);
10056 if (TREE_CODE (tem) == RSHIFT_EXPR
10057 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10058 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10059 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10060 return fold_build2 (BIT_AND_EXPR, type,
10061 TREE_OPERAND (tem, 0), arg1);
10064 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10065 is probably obsolete because the first operand should be a
10066 truth value (that's why we have the two cases above), but let's
10067 leave it in until we can confirm this for all front-ends. */
10068 if (integer_zerop (op2)
10069 && TREE_CODE (arg0) == NE_EXPR
10070 && integer_zerop (TREE_OPERAND (arg0, 1))
10071 && integer_pow2p (arg1)
10072 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10073 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10074 arg1, OEP_ONLY_CONST))
10075 return pedantic_non_lvalue (fold_convert (type,
10076 TREE_OPERAND (arg0, 0)));
10078 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10079 if (integer_zerop (op2)
10080 && truth_value_p (TREE_CODE (arg0))
10081 && truth_value_p (TREE_CODE (arg1)))
10082 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10084 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10085 if (integer_onep (op2)
10086 && truth_value_p (TREE_CODE (arg0))
10087 && truth_value_p (TREE_CODE (arg1)))
10089 /* Only perform transformation if ARG0 is easily inverted. */
10090 tem = invert_truthvalue (arg0);
10091 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10092 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10095 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10096 if (integer_zerop (arg1)
10097 && truth_value_p (TREE_CODE (arg0))
10098 && truth_value_p (TREE_CODE (op2)))
10100 /* Only perform transformation if ARG0 is easily inverted. */
10101 tem = invert_truthvalue (arg0);
10102 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10103 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10106 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10107 if (integer_onep (arg1)
10108 && truth_value_p (TREE_CODE (arg0))
10109 && truth_value_p (TREE_CODE (op2)))
10110 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10112 return NULL_TREE;
10114 case CALL_EXPR:
10115 /* Check for a built-in function. */
10116 if (TREE_CODE (op0) == ADDR_EXPR
10117 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10118 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10119 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
10120 return NULL_TREE;
10122 case BIT_FIELD_REF:
10123 if (TREE_CODE (arg0) == VECTOR_CST
10124 && type == TREE_TYPE (TREE_TYPE (arg0))
10125 && host_integerp (arg1, 1)
10126 && host_integerp (op2, 1))
10128 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10129 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10131 if (width != 0
10132 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10133 && (idx % width) == 0
10134 && (idx = idx / width)
10135 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10137 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10138 while (idx-- > 0 && elements)
10139 elements = TREE_CHAIN (elements);
10140 if (elements)
10141 return TREE_VALUE (elements);
10142 else
10143 return fold_convert (type, integer_zero_node);
10146 return NULL_TREE;
10148 default:
10149 return NULL_TREE;
10150 } /* switch (code) */
10153 /* Perform constant folding and related simplification of EXPR.
10154 The related simplifications include x*1 => x, x*0 => 0, etc.,
10155 and application of the associative law.
10156 NOP_EXPR conversions may be removed freely (as long as we
10157 are careful not to change the type of the overall expression).
10158 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10159 but we can constant-fold them if they have constant operands. */
10161 #ifdef ENABLE_FOLD_CHECKING
10162 # define fold(x) fold_1 (x)
10163 static tree fold_1 (tree);
10164 static
10165 #endif
10166 tree
10167 fold (tree expr)
10169 const tree t = expr;
10170 enum tree_code code = TREE_CODE (t);
10171 enum tree_code_class kind = TREE_CODE_CLASS (code);
10172 tree tem;
10174 /* Return right away if a constant. */
10175 if (kind == tcc_constant)
10176 return t;
10178 if (IS_EXPR_CODE_CLASS (kind))
10180 tree type = TREE_TYPE (t);
10181 tree op0, op1, op2;
10183 switch (TREE_CODE_LENGTH (code))
10185 case 1:
10186 op0 = TREE_OPERAND (t, 0);
10187 tem = fold_unary (code, type, op0);
10188 return tem ? tem : expr;
10189 case 2:
10190 op0 = TREE_OPERAND (t, 0);
10191 op1 = TREE_OPERAND (t, 1);
10192 tem = fold_binary (code, type, op0, op1);
10193 return tem ? tem : expr;
10194 case 3:
10195 op0 = TREE_OPERAND (t, 0);
10196 op1 = TREE_OPERAND (t, 1);
10197 op2 = TREE_OPERAND (t, 2);
10198 tem = fold_ternary (code, type, op0, op1, op2);
10199 return tem ? tem : expr;
10200 default:
10201 break;
10205 switch (code)
10207 case CONST_DECL:
10208 return fold (DECL_INITIAL (t));
10210 default:
10211 return t;
10212 } /* switch (code) */
10215 #ifdef ENABLE_FOLD_CHECKING
10216 #undef fold
10218 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10219 static void fold_check_failed (tree, tree);
10220 void print_fold_checksum (tree);
10222 /* When --enable-checking=fold, compute a digest of expr before
10223 and after actual fold call to see if fold did not accidentally
10224 change original expr. */
10226 tree
10227 fold (tree expr)
10229 tree ret;
10230 struct md5_ctx ctx;
10231 unsigned char checksum_before[16], checksum_after[16];
10232 htab_t ht;
10234 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10235 md5_init_ctx (&ctx);
10236 fold_checksum_tree (expr, &ctx, ht);
10237 md5_finish_ctx (&ctx, checksum_before);
10238 htab_empty (ht);
10240 ret = fold_1 (expr);
10242 md5_init_ctx (&ctx);
10243 fold_checksum_tree (expr, &ctx, ht);
10244 md5_finish_ctx (&ctx, checksum_after);
10245 htab_delete (ht);
10247 if (memcmp (checksum_before, checksum_after, 16))
10248 fold_check_failed (expr, ret);
10250 return ret;
10253 void
10254 print_fold_checksum (tree expr)
10256 struct md5_ctx ctx;
10257 unsigned char checksum[16], cnt;
10258 htab_t ht;
10260 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10261 md5_init_ctx (&ctx);
10262 fold_checksum_tree (expr, &ctx, ht);
10263 md5_finish_ctx (&ctx, checksum);
10264 htab_delete (ht);
10265 for (cnt = 0; cnt < 16; ++cnt)
10266 fprintf (stderr, "%02x", checksum[cnt]);
10267 putc ('\n', stderr);
10270 static void
10271 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10273 internal_error ("fold check: original tree changed by fold");
10276 static void
10277 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10279 void **slot;
10280 enum tree_code code;
10281 char buf[sizeof (struct tree_function_decl)];
10282 int i, len;
10284 recursive_label:
10286 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10287 <= sizeof (struct tree_function_decl))
10288 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
10289 if (expr == NULL)
10290 return;
10291 slot = htab_find_slot (ht, expr, INSERT);
10292 if (*slot != NULL)
10293 return;
10294 *slot = expr;
10295 code = TREE_CODE (expr);
10296 if (TREE_CODE_CLASS (code) == tcc_declaration
10297 && DECL_ASSEMBLER_NAME_SET_P (expr))
10299 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10300 memcpy (buf, expr, tree_size (expr));
10301 expr = (tree) buf;
10302 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10304 else if (TREE_CODE_CLASS (code) == tcc_type
10305 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10306 || TYPE_CACHED_VALUES_P (expr)
10307 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10309 /* Allow these fields to be modified. */
10310 memcpy (buf, expr, tree_size (expr));
10311 expr = (tree) buf;
10312 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10313 TYPE_POINTER_TO (expr) = NULL;
10314 TYPE_REFERENCE_TO (expr) = NULL;
10315 if (TYPE_CACHED_VALUES_P (expr))
10317 TYPE_CACHED_VALUES_P (expr) = 0;
10318 TYPE_CACHED_VALUES (expr) = NULL;
10321 md5_process_bytes (expr, tree_size (expr), ctx);
10322 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10323 if (TREE_CODE_CLASS (code) != tcc_type
10324 && TREE_CODE_CLASS (code) != tcc_declaration
10325 && code != TREE_LIST)
10326 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10327 switch (TREE_CODE_CLASS (code))
10329 case tcc_constant:
10330 switch (code)
10332 case STRING_CST:
10333 md5_process_bytes (TREE_STRING_POINTER (expr),
10334 TREE_STRING_LENGTH (expr), ctx);
10335 break;
10336 case COMPLEX_CST:
10337 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10338 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10339 break;
10340 case VECTOR_CST:
10341 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10342 break;
10343 default:
10344 break;
10346 break;
10347 case tcc_exceptional:
10348 switch (code)
10350 case TREE_LIST:
10351 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10352 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10353 expr = TREE_CHAIN (expr);
10354 goto recursive_label;
10355 break;
10356 case TREE_VEC:
10357 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10358 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10359 break;
10360 default:
10361 break;
10363 break;
10364 case tcc_expression:
10365 case tcc_reference:
10366 case tcc_comparison:
10367 case tcc_unary:
10368 case tcc_binary:
10369 case tcc_statement:
10370 len = TREE_CODE_LENGTH (code);
10371 for (i = 0; i < len; ++i)
10372 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10373 break;
10374 case tcc_declaration:
10375 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10376 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10377 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10378 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10379 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10380 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10381 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10382 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
10383 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10385 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
10387 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10388 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10389 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
10391 break;
10392 case tcc_type:
10393 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10394 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10395 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10396 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10397 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10398 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10399 if (INTEGRAL_TYPE_P (expr)
10400 || SCALAR_FLOAT_TYPE_P (expr))
10402 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10403 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10405 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10406 if (TREE_CODE (expr) == RECORD_TYPE
10407 || TREE_CODE (expr) == UNION_TYPE
10408 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10409 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10410 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10411 break;
10412 default:
10413 break;
10417 #endif
10419 /* Fold a unary tree expression with code CODE of type TYPE with an
10420 operand OP0. Return a folded expression if successful. Otherwise,
10421 return a tree expression with code CODE of type TYPE with an
10422 operand OP0. */
10424 tree
10425 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
10427 tree tem;
10428 #ifdef ENABLE_FOLD_CHECKING
10429 unsigned char checksum_before[16], checksum_after[16];
10430 struct md5_ctx ctx;
10431 htab_t ht;
10433 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10434 md5_init_ctx (&ctx);
10435 fold_checksum_tree (op0, &ctx, ht);
10436 md5_finish_ctx (&ctx, checksum_before);
10437 htab_empty (ht);
10438 #endif
10440 tem = fold_unary (code, type, op0);
10441 if (!tem)
10442 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
10444 #ifdef ENABLE_FOLD_CHECKING
10445 md5_init_ctx (&ctx);
10446 fold_checksum_tree (op0, &ctx, ht);
10447 md5_finish_ctx (&ctx, checksum_after);
10448 htab_delete (ht);
10450 if (memcmp (checksum_before, checksum_after, 16))
10451 fold_check_failed (op0, tem);
10452 #endif
10453 return tem;
10456 /* Fold a binary tree expression with code CODE of type TYPE with
10457 operands OP0 and OP1. Return a folded expression if successful.
10458 Otherwise, return a tree expression with code CODE of type TYPE
10459 with operands OP0 and OP1. */
10461 tree
10462 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
10463 MEM_STAT_DECL)
10465 tree tem;
10466 #ifdef ENABLE_FOLD_CHECKING
10467 unsigned char checksum_before_op0[16],
10468 checksum_before_op1[16],
10469 checksum_after_op0[16],
10470 checksum_after_op1[16];
10471 struct md5_ctx ctx;
10472 htab_t ht;
10474 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10475 md5_init_ctx (&ctx);
10476 fold_checksum_tree (op0, &ctx, ht);
10477 md5_finish_ctx (&ctx, checksum_before_op0);
10478 htab_empty (ht);
10480 md5_init_ctx (&ctx);
10481 fold_checksum_tree (op1, &ctx, ht);
10482 md5_finish_ctx (&ctx, checksum_before_op1);
10483 htab_empty (ht);
10484 #endif
10486 tem = fold_binary (code, type, op0, op1);
10487 if (!tem)
10488 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
10490 #ifdef ENABLE_FOLD_CHECKING
10491 md5_init_ctx (&ctx);
10492 fold_checksum_tree (op0, &ctx, ht);
10493 md5_finish_ctx (&ctx, checksum_after_op0);
10494 htab_empty (ht);
10496 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10497 fold_check_failed (op0, tem);
10499 md5_init_ctx (&ctx);
10500 fold_checksum_tree (op1, &ctx, ht);
10501 md5_finish_ctx (&ctx, checksum_after_op1);
10502 htab_delete (ht);
10504 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10505 fold_check_failed (op1, tem);
10506 #endif
10507 return tem;
10510 /* Fold a ternary tree expression with code CODE of type TYPE with
10511 operands OP0, OP1, and OP2. Return a folded expression if
10512 successful. Otherwise, return a tree expression with code CODE of
10513 type TYPE with operands OP0, OP1, and OP2. */
10515 tree
10516 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
10517 MEM_STAT_DECL)
10519 tree tem;
10520 #ifdef ENABLE_FOLD_CHECKING
10521 unsigned char checksum_before_op0[16],
10522 checksum_before_op1[16],
10523 checksum_before_op2[16],
10524 checksum_after_op0[16],
10525 checksum_after_op1[16],
10526 checksum_after_op2[16];
10527 struct md5_ctx ctx;
10528 htab_t ht;
10530 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10531 md5_init_ctx (&ctx);
10532 fold_checksum_tree (op0, &ctx, ht);
10533 md5_finish_ctx (&ctx, checksum_before_op0);
10534 htab_empty (ht);
10536 md5_init_ctx (&ctx);
10537 fold_checksum_tree (op1, &ctx, ht);
10538 md5_finish_ctx (&ctx, checksum_before_op1);
10539 htab_empty (ht);
10541 md5_init_ctx (&ctx);
10542 fold_checksum_tree (op2, &ctx, ht);
10543 md5_finish_ctx (&ctx, checksum_before_op2);
10544 htab_empty (ht);
10545 #endif
10547 tem = fold_ternary (code, type, op0, op1, op2);
10548 if (!tem)
10549 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
10551 #ifdef ENABLE_FOLD_CHECKING
10552 md5_init_ctx (&ctx);
10553 fold_checksum_tree (op0, &ctx, ht);
10554 md5_finish_ctx (&ctx, checksum_after_op0);
10555 htab_empty (ht);
10557 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10558 fold_check_failed (op0, tem);
10560 md5_init_ctx (&ctx);
10561 fold_checksum_tree (op1, &ctx, ht);
10562 md5_finish_ctx (&ctx, checksum_after_op1);
10563 htab_empty (ht);
10565 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10566 fold_check_failed (op1, tem);
10568 md5_init_ctx (&ctx);
10569 fold_checksum_tree (op2, &ctx, ht);
10570 md5_finish_ctx (&ctx, checksum_after_op2);
10571 htab_delete (ht);
10573 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
10574 fold_check_failed (op2, tem);
10575 #endif
10576 return tem;
10579 /* Perform constant folding and related simplification of initializer
10580 expression EXPR. These behave identically to "fold_buildN" but ignore
10581 potential run-time traps and exceptions that fold must preserve. */
10583 #define START_FOLD_INIT \
10584 int saved_signaling_nans = flag_signaling_nans;\
10585 int saved_trapping_math = flag_trapping_math;\
10586 int saved_rounding_math = flag_rounding_math;\
10587 int saved_trapv = flag_trapv;\
10588 flag_signaling_nans = 0;\
10589 flag_trapping_math = 0;\
10590 flag_rounding_math = 0;\
10591 flag_trapv = 0
10593 #define END_FOLD_INIT \
10594 flag_signaling_nans = saved_signaling_nans;\
10595 flag_trapping_math = saved_trapping_math;\
10596 flag_rounding_math = saved_rounding_math;\
10597 flag_trapv = saved_trapv
10599 tree
10600 fold_build1_initializer (enum tree_code code, tree type, tree op)
10602 tree result;
10603 START_FOLD_INIT;
10605 result = fold_build1 (code, type, op);
10607 END_FOLD_INIT;
10608 return result;
10611 tree
10612 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
10614 tree result;
10615 START_FOLD_INIT;
10617 result = fold_build2 (code, type, op0, op1);
10619 END_FOLD_INIT;
10620 return result;
10623 tree
10624 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
10625 tree op2)
10627 tree result;
10628 START_FOLD_INIT;
10630 result = fold_build3 (code, type, op0, op1, op2);
10632 END_FOLD_INIT;
10633 return result;
10636 #undef START_FOLD_INIT
10637 #undef END_FOLD_INIT
10639 /* Determine if first argument is a multiple of second argument. Return 0 if
10640 it is not, or we cannot easily determined it to be.
10642 An example of the sort of thing we care about (at this point; this routine
10643 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10644 fold cases do now) is discovering that
10646 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10648 is a multiple of
10650 SAVE_EXPR (J * 8)
10652 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10654 This code also handles discovering that
10656 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10658 is a multiple of 8 so we don't have to worry about dealing with a
10659 possible remainder.
10661 Note that we *look* inside a SAVE_EXPR only to determine how it was
10662 calculated; it is not safe for fold to do much of anything else with the
10663 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10664 at run time. For example, the latter example above *cannot* be implemented
10665 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10666 evaluation time of the original SAVE_EXPR is not necessarily the same at
10667 the time the new expression is evaluated. The only optimization of this
10668 sort that would be valid is changing
10670 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10672 divided by 8 to
10674 SAVE_EXPR (I) * SAVE_EXPR (J)
10676 (where the same SAVE_EXPR (J) is used in the original and the
10677 transformed version). */
10679 static int
10680 multiple_of_p (tree type, tree top, tree bottom)
10682 if (operand_equal_p (top, bottom, 0))
10683 return 1;
10685 if (TREE_CODE (type) != INTEGER_TYPE)
10686 return 0;
10688 switch (TREE_CODE (top))
10690 case BIT_AND_EXPR:
10691 /* Bitwise and provides a power of two multiple. If the mask is
10692 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10693 if (!integer_pow2p (bottom))
10694 return 0;
10695 /* FALLTHRU */
10697 case MULT_EXPR:
10698 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10699 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10701 case PLUS_EXPR:
10702 case MINUS_EXPR:
10703 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10704 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10706 case LSHIFT_EXPR:
10707 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10709 tree op1, t1;
10711 op1 = TREE_OPERAND (top, 1);
10712 /* const_binop may not detect overflow correctly,
10713 so check for it explicitly here. */
10714 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10715 > TREE_INT_CST_LOW (op1)
10716 && TREE_INT_CST_HIGH (op1) == 0
10717 && 0 != (t1 = fold_convert (type,
10718 const_binop (LSHIFT_EXPR,
10719 size_one_node,
10720 op1, 0)))
10721 && ! TREE_OVERFLOW (t1))
10722 return multiple_of_p (type, t1, bottom);
10724 return 0;
10726 case NOP_EXPR:
10727 /* Can't handle conversions from non-integral or wider integral type. */
10728 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10729 || (TYPE_PRECISION (type)
10730 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10731 return 0;
10733 /* .. fall through ... */
10735 case SAVE_EXPR:
10736 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10738 case INTEGER_CST:
10739 if (TREE_CODE (bottom) != INTEGER_CST
10740 || (TYPE_UNSIGNED (type)
10741 && (tree_int_cst_sgn (top) < 0
10742 || tree_int_cst_sgn (bottom) < 0)))
10743 return 0;
10744 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10745 top, bottom, 0));
10747 default:
10748 return 0;
10752 /* Return true if `t' is known to be non-negative. */
10755 tree_expr_nonnegative_p (tree t)
10757 if (TYPE_UNSIGNED (TREE_TYPE (t)))
10758 return 1;
10760 switch (TREE_CODE (t))
10762 case ABS_EXPR:
10763 /* We can't return 1 if flag_wrapv is set because
10764 ABS_EXPR<INT_MIN> = INT_MIN. */
10765 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
10766 return 1;
10767 break;
10769 case INTEGER_CST:
10770 return tree_int_cst_sgn (t) >= 0;
10772 case REAL_CST:
10773 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10775 case PLUS_EXPR:
10776 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10777 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10778 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10780 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10781 both unsigned and at least 2 bits shorter than the result. */
10782 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10783 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10784 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10786 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10787 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10788 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10789 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10791 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10792 TYPE_PRECISION (inner2)) + 1;
10793 return prec < TYPE_PRECISION (TREE_TYPE (t));
10796 break;
10798 case MULT_EXPR:
10799 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10801 /* x * x for floating point x is always non-negative. */
10802 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10803 return 1;
10804 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10805 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10808 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10809 both unsigned and their total bits is shorter than the result. */
10810 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10811 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10812 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10814 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10815 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10816 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10817 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10818 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10819 < TYPE_PRECISION (TREE_TYPE (t));
10821 return 0;
10823 case BIT_AND_EXPR:
10824 case MAX_EXPR:
10825 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10826 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10828 case BIT_IOR_EXPR:
10829 case BIT_XOR_EXPR:
10830 case MIN_EXPR:
10831 case RDIV_EXPR:
10832 case TRUNC_DIV_EXPR:
10833 case CEIL_DIV_EXPR:
10834 case FLOOR_DIV_EXPR:
10835 case ROUND_DIV_EXPR:
10836 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10837 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10839 case TRUNC_MOD_EXPR:
10840 case CEIL_MOD_EXPR:
10841 case FLOOR_MOD_EXPR:
10842 case ROUND_MOD_EXPR:
10843 case SAVE_EXPR:
10844 case NON_LVALUE_EXPR:
10845 case FLOAT_EXPR:
10846 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10848 case COMPOUND_EXPR:
10849 case MODIFY_EXPR:
10850 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10852 case BIND_EXPR:
10853 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10855 case COND_EXPR:
10856 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10857 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10859 case NOP_EXPR:
10861 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10862 tree outer_type = TREE_TYPE (t);
10864 if (TREE_CODE (outer_type) == REAL_TYPE)
10866 if (TREE_CODE (inner_type) == REAL_TYPE)
10867 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10868 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10870 if (TYPE_UNSIGNED (inner_type))
10871 return 1;
10872 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10875 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10877 if (TREE_CODE (inner_type) == REAL_TYPE)
10878 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10879 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10880 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10881 && TYPE_UNSIGNED (inner_type);
10884 break;
10886 case TARGET_EXPR:
10888 tree temp = TARGET_EXPR_SLOT (t);
10889 t = TARGET_EXPR_INITIAL (t);
10891 /* If the initializer is non-void, then it's a normal expression
10892 that will be assigned to the slot. */
10893 if (!VOID_TYPE_P (t))
10894 return tree_expr_nonnegative_p (t);
10896 /* Otherwise, the initializer sets the slot in some way. One common
10897 way is an assignment statement at the end of the initializer. */
10898 while (1)
10900 if (TREE_CODE (t) == BIND_EXPR)
10901 t = expr_last (BIND_EXPR_BODY (t));
10902 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10903 || TREE_CODE (t) == TRY_CATCH_EXPR)
10904 t = expr_last (TREE_OPERAND (t, 0));
10905 else if (TREE_CODE (t) == STATEMENT_LIST)
10906 t = expr_last (t);
10907 else
10908 break;
10910 if (TREE_CODE (t) == MODIFY_EXPR
10911 && TREE_OPERAND (t, 0) == temp)
10912 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10914 return 0;
10917 case CALL_EXPR:
10919 tree fndecl = get_callee_fndecl (t);
10920 tree arglist = TREE_OPERAND (t, 1);
10921 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10922 switch (DECL_FUNCTION_CODE (fndecl))
10924 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10925 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10926 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10927 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10929 CASE_BUILTIN_F (BUILT_IN_ACOS)
10930 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10931 CASE_BUILTIN_F (BUILT_IN_CABS)
10932 CASE_BUILTIN_F (BUILT_IN_COSH)
10933 CASE_BUILTIN_F (BUILT_IN_ERFC)
10934 CASE_BUILTIN_F (BUILT_IN_EXP)
10935 CASE_BUILTIN_F (BUILT_IN_EXP10)
10936 CASE_BUILTIN_F (BUILT_IN_EXP2)
10937 CASE_BUILTIN_F (BUILT_IN_FABS)
10938 CASE_BUILTIN_F (BUILT_IN_FDIM)
10939 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10940 CASE_BUILTIN_F (BUILT_IN_POW10)
10941 CASE_BUILTIN_I (BUILT_IN_FFS)
10942 CASE_BUILTIN_I (BUILT_IN_PARITY)
10943 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10944 /* Always true. */
10945 return 1;
10947 CASE_BUILTIN_F (BUILT_IN_SQRT)
10948 /* sqrt(-0.0) is -0.0. */
10949 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10950 return 1;
10951 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10953 CASE_BUILTIN_F (BUILT_IN_ASINH)
10954 CASE_BUILTIN_F (BUILT_IN_ATAN)
10955 CASE_BUILTIN_F (BUILT_IN_ATANH)
10956 CASE_BUILTIN_F (BUILT_IN_CBRT)
10957 CASE_BUILTIN_F (BUILT_IN_CEIL)
10958 CASE_BUILTIN_F (BUILT_IN_ERF)
10959 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10960 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10961 CASE_BUILTIN_F (BUILT_IN_FMOD)
10962 CASE_BUILTIN_F (BUILT_IN_FREXP)
10963 CASE_BUILTIN_F (BUILT_IN_LCEIL)
10964 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10965 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
10966 CASE_BUILTIN_F (BUILT_IN_LLCEIL)
10967 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
10968 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10969 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10970 CASE_BUILTIN_F (BUILT_IN_LRINT)
10971 CASE_BUILTIN_F (BUILT_IN_LROUND)
10972 CASE_BUILTIN_F (BUILT_IN_MODF)
10973 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
10974 CASE_BUILTIN_F (BUILT_IN_POW)
10975 CASE_BUILTIN_F (BUILT_IN_RINT)
10976 CASE_BUILTIN_F (BUILT_IN_ROUND)
10977 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
10978 CASE_BUILTIN_F (BUILT_IN_SINH)
10979 CASE_BUILTIN_F (BUILT_IN_TANH)
10980 CASE_BUILTIN_F (BUILT_IN_TRUNC)
10981 /* True if the 1st argument is nonnegative. */
10982 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10984 CASE_BUILTIN_F (BUILT_IN_FMAX)
10985 /* True if the 1st OR 2nd arguments are nonnegative. */
10986 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10987 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10989 CASE_BUILTIN_F (BUILT_IN_FMIN)
10990 /* True if the 1st AND 2nd arguments are nonnegative. */
10991 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10992 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10994 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
10995 /* True if the 2nd argument is nonnegative. */
10996 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10998 default:
10999 break;
11000 #undef CASE_BUILTIN_F
11001 #undef CASE_BUILTIN_I
11005 /* ... fall through ... */
11007 default:
11008 if (truth_value_p (TREE_CODE (t)))
11009 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11010 return 1;
11013 /* We don't know sign of `t', so be conservative and return false. */
11014 return 0;
11017 /* Return true when T is an address and is known to be nonzero.
11018 For floating point we further ensure that T is not denormal.
11019 Similar logic is present in nonzero_address in rtlanal.h. */
11021 bool
11022 tree_expr_nonzero_p (tree t)
11024 tree type = TREE_TYPE (t);
11026 /* Doing something useful for floating point would need more work. */
11027 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11028 return false;
11030 switch (TREE_CODE (t))
11032 case ABS_EXPR:
11033 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11035 case INTEGER_CST:
11036 /* We used to test for !integer_zerop here. This does not work correctly
11037 if TREE_CONSTANT_OVERFLOW (t). */
11038 return (TREE_INT_CST_LOW (t) != 0
11039 || TREE_INT_CST_HIGH (t) != 0);
11041 case PLUS_EXPR:
11042 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11044 /* With the presence of negative values it is hard
11045 to say something. */
11046 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11047 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11048 return false;
11049 /* One of operands must be positive and the other non-negative. */
11050 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11051 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11053 break;
11055 case MULT_EXPR:
11056 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11058 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11059 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11061 break;
11063 case NOP_EXPR:
11065 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11066 tree outer_type = TREE_TYPE (t);
11068 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11069 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11071 break;
11073 case ADDR_EXPR:
11075 tree base = get_base_address (TREE_OPERAND (t, 0));
11077 if (!base)
11078 return false;
11080 /* Weak declarations may link to NULL. */
11081 if (VAR_OR_FUNCTION_DECL_P (base))
11082 return !DECL_WEAK (base);
11084 /* Constants are never weak. */
11085 if (CONSTANT_CLASS_P (base))
11086 return true;
11088 return false;
11091 case COND_EXPR:
11092 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11093 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11095 case MIN_EXPR:
11096 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11097 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11099 case MAX_EXPR:
11100 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11102 /* When both operands are nonzero, then MAX must be too. */
11103 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11104 return true;
11106 /* MAX where operand 0 is positive is positive. */
11107 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11109 /* MAX where operand 1 is positive is positive. */
11110 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11111 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11112 return true;
11113 break;
11115 case COMPOUND_EXPR:
11116 case MODIFY_EXPR:
11117 case BIND_EXPR:
11118 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11120 case SAVE_EXPR:
11121 case NON_LVALUE_EXPR:
11122 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11124 case BIT_IOR_EXPR:
11125 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11126 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11128 case CALL_EXPR:
11129 return alloca_call_p (t);
11131 default:
11132 break;
11134 return false;
11137 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11138 attempt to fold the expression to a constant without modifying TYPE,
11139 OP0 or OP1.
11141 If the expression could be simplified to a constant, then return
11142 the constant. If the expression would not be simplified to a
11143 constant, then return NULL_TREE. */
11145 tree
11146 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11148 tree tem = fold_binary (code, type, op0, op1);
11149 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11152 /* Given the components of a unary expression CODE, TYPE and OP0,
11153 attempt to fold the expression to a constant without modifying
11154 TYPE or OP0.
11156 If the expression could be simplified to a constant, then return
11157 the constant. If the expression would not be simplified to a
11158 constant, then return NULL_TREE. */
11160 tree
11161 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11163 tree tem = fold_unary (code, type, op0);
11164 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11167 /* If EXP represents referencing an element in a constant string
11168 (either via pointer arithmetic or array indexing), return the
11169 tree representing the value accessed, otherwise return NULL. */
11171 tree
11172 fold_read_from_constant_string (tree exp)
11174 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11176 tree exp1 = TREE_OPERAND (exp, 0);
11177 tree index;
11178 tree string;
11180 if (TREE_CODE (exp) == INDIRECT_REF)
11181 string = string_constant (exp1, &index);
11182 else
11184 tree low_bound = array_ref_low_bound (exp);
11185 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11187 /* Optimize the special-case of a zero lower bound.
11189 We convert the low_bound to sizetype to avoid some problems
11190 with constant folding. (E.g. suppose the lower bound is 1,
11191 and its mode is QI. Without the conversion,l (ARRAY
11192 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11193 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11194 if (! integer_zerop (low_bound))
11195 index = size_diffop (index, fold_convert (sizetype, low_bound));
11197 string = exp1;
11200 if (string
11201 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11202 && TREE_CODE (string) == STRING_CST
11203 && TREE_CODE (index) == INTEGER_CST
11204 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11205 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11206 == MODE_INT)
11207 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11208 return fold_convert (TREE_TYPE (exp),
11209 build_int_cst (NULL_TREE,
11210 (TREE_STRING_POINTER (string)
11211 [TREE_INT_CST_LOW (index)])));
11213 return NULL;
11216 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11217 an integer constant or real constant.
11219 TYPE is the type of the result. */
11221 static tree
11222 fold_negate_const (tree arg0, tree type)
11224 tree t = NULL_TREE;
11226 switch (TREE_CODE (arg0))
11228 case INTEGER_CST:
11230 unsigned HOST_WIDE_INT low;
11231 HOST_WIDE_INT high;
11232 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11233 TREE_INT_CST_HIGH (arg0),
11234 &low, &high);
11235 t = build_int_cst_wide (type, low, high);
11236 t = force_fit_type (t, 1,
11237 (overflow | TREE_OVERFLOW (arg0))
11238 && !TYPE_UNSIGNED (type),
11239 TREE_CONSTANT_OVERFLOW (arg0));
11240 break;
11243 case REAL_CST:
11244 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11245 break;
11247 default:
11248 gcc_unreachable ();
11251 return t;
11254 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11255 an integer constant or real constant.
11257 TYPE is the type of the result. */
11259 tree
11260 fold_abs_const (tree arg0, tree type)
11262 tree t = NULL_TREE;
11264 switch (TREE_CODE (arg0))
11266 case INTEGER_CST:
11267 /* If the value is unsigned, then the absolute value is
11268 the same as the ordinary value. */
11269 if (TYPE_UNSIGNED (type))
11270 t = arg0;
11271 /* Similarly, if the value is non-negative. */
11272 else if (INT_CST_LT (integer_minus_one_node, arg0))
11273 t = arg0;
11274 /* If the value is negative, then the absolute value is
11275 its negation. */
11276 else
11278 unsigned HOST_WIDE_INT low;
11279 HOST_WIDE_INT high;
11280 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11281 TREE_INT_CST_HIGH (arg0),
11282 &low, &high);
11283 t = build_int_cst_wide (type, low, high);
11284 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11285 TREE_CONSTANT_OVERFLOW (arg0));
11287 break;
11289 case REAL_CST:
11290 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11291 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11292 else
11293 t = arg0;
11294 break;
11296 default:
11297 gcc_unreachable ();
11300 return t;
11303 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11304 constant. TYPE is the type of the result. */
11306 static tree
11307 fold_not_const (tree arg0, tree type)
11309 tree t = NULL_TREE;
11311 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11313 t = build_int_cst_wide (type,
11314 ~ TREE_INT_CST_LOW (arg0),
11315 ~ TREE_INT_CST_HIGH (arg0));
11316 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11317 TREE_CONSTANT_OVERFLOW (arg0));
11319 return t;
11322 /* Given CODE, a relational operator, the target type, TYPE and two
11323 constant operands OP0 and OP1, return the result of the
11324 relational operation. If the result is not a compile time
11325 constant, then return NULL_TREE. */
11327 static tree
11328 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11330 int result, invert;
11332 /* From here on, the only cases we handle are when the result is
11333 known to be a constant. */
11335 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11337 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11338 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11340 /* Handle the cases where either operand is a NaN. */
11341 if (real_isnan (c0) || real_isnan (c1))
11343 switch (code)
11345 case EQ_EXPR:
11346 case ORDERED_EXPR:
11347 result = 0;
11348 break;
11350 case NE_EXPR:
11351 case UNORDERED_EXPR:
11352 case UNLT_EXPR:
11353 case UNLE_EXPR:
11354 case UNGT_EXPR:
11355 case UNGE_EXPR:
11356 case UNEQ_EXPR:
11357 result = 1;
11358 break;
11360 case LT_EXPR:
11361 case LE_EXPR:
11362 case GT_EXPR:
11363 case GE_EXPR:
11364 case LTGT_EXPR:
11365 if (flag_trapping_math)
11366 return NULL_TREE;
11367 result = 0;
11368 break;
11370 default:
11371 gcc_unreachable ();
11374 return constant_boolean_node (result, type);
11377 return constant_boolean_node (real_compare (code, c0, c1), type);
11380 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11382 To compute GT, swap the arguments and do LT.
11383 To compute GE, do LT and invert the result.
11384 To compute LE, swap the arguments, do LT and invert the result.
11385 To compute NE, do EQ and invert the result.
11387 Therefore, the code below must handle only EQ and LT. */
11389 if (code == LE_EXPR || code == GT_EXPR)
11391 tree tem = op0;
11392 op0 = op1;
11393 op1 = tem;
11394 code = swap_tree_comparison (code);
11397 /* Note that it is safe to invert for real values here because we
11398 have already handled the one case that it matters. */
11400 invert = 0;
11401 if (code == NE_EXPR || code == GE_EXPR)
11403 invert = 1;
11404 code = invert_tree_comparison (code, false);
11407 /* Compute a result for LT or EQ if args permit;
11408 Otherwise return T. */
11409 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11411 if (code == EQ_EXPR)
11412 result = tree_int_cst_equal (op0, op1);
11413 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11414 result = INT_CST_LT_UNSIGNED (op0, op1);
11415 else
11416 result = INT_CST_LT (op0, op1);
11418 else
11419 return NULL_TREE;
11421 if (invert)
11422 result ^= 1;
11423 return constant_boolean_node (result, type);
11426 /* Build an expression for the a clean point containing EXPR with type TYPE.
11427 Don't build a cleanup point expression for EXPR which don't have side
11428 effects. */
11430 tree
11431 fold_build_cleanup_point_expr (tree type, tree expr)
11433 /* If the expression does not have side effects then we don't have to wrap
11434 it with a cleanup point expression. */
11435 if (!TREE_SIDE_EFFECTS (expr))
11436 return expr;
11438 /* If the expression is a return, check to see if the expression inside the
11439 return has no side effects or the right hand side of the modify expression
11440 inside the return. If either don't have side effects set we don't need to
11441 wrap the expression in a cleanup point expression. Note we don't check the
11442 left hand side of the modify because it should always be a return decl. */
11443 if (TREE_CODE (expr) == RETURN_EXPR)
11445 tree op = TREE_OPERAND (expr, 0);
11446 if (!op || !TREE_SIDE_EFFECTS (op))
11447 return expr;
11448 op = TREE_OPERAND (op, 1);
11449 if (!TREE_SIDE_EFFECTS (op))
11450 return expr;
11453 return build1 (CLEANUP_POINT_EXPR, type, expr);
11456 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11457 avoid confusing the gimplify process. */
11459 tree
11460 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11462 /* The size of the object is not relevant when talking about its address. */
11463 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11464 t = TREE_OPERAND (t, 0);
11466 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11467 if (TREE_CODE (t) == INDIRECT_REF
11468 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11470 t = TREE_OPERAND (t, 0);
11471 if (TREE_TYPE (t) != ptrtype)
11472 t = build1 (NOP_EXPR, ptrtype, t);
11474 else
11476 tree base = t;
11478 while (handled_component_p (base))
11479 base = TREE_OPERAND (base, 0);
11480 if (DECL_P (base))
11481 TREE_ADDRESSABLE (base) = 1;
11483 t = build1 (ADDR_EXPR, ptrtype, t);
11486 return t;
11489 tree
11490 build_fold_addr_expr (tree t)
11492 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11495 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11496 of an indirection through OP0, or NULL_TREE if no simplification is
11497 possible. */
11499 tree
11500 fold_indirect_ref_1 (tree type, tree op0)
11502 tree sub = op0;
11503 tree subtype;
11505 STRIP_NOPS (sub);
11506 subtype = TREE_TYPE (sub);
11507 if (!POINTER_TYPE_P (subtype))
11508 return NULL_TREE;
11510 if (TREE_CODE (sub) == ADDR_EXPR)
11512 tree op = TREE_OPERAND (sub, 0);
11513 tree optype = TREE_TYPE (op);
11514 /* *&p => p */
11515 if (type == optype)
11516 return op;
11517 /* *(foo *)&fooarray => fooarray[0] */
11518 else if (TREE_CODE (optype) == ARRAY_TYPE
11519 && type == TREE_TYPE (optype))
11521 tree type_domain = TYPE_DOMAIN (optype);
11522 tree min_val = size_zero_node;
11523 if (type_domain && TYPE_MIN_VALUE (type_domain))
11524 min_val = TYPE_MIN_VALUE (type_domain);
11525 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11529 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11530 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11531 && type == TREE_TYPE (TREE_TYPE (subtype)))
11533 tree type_domain;
11534 tree min_val = size_zero_node;
11535 sub = build_fold_indirect_ref (sub);
11536 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11537 if (type_domain && TYPE_MIN_VALUE (type_domain))
11538 min_val = TYPE_MIN_VALUE (type_domain);
11539 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11542 return NULL_TREE;
11545 /* Builds an expression for an indirection through T, simplifying some
11546 cases. */
11548 tree
11549 build_fold_indirect_ref (tree t)
11551 tree type = TREE_TYPE (TREE_TYPE (t));
11552 tree sub = fold_indirect_ref_1 (type, t);
11554 if (sub)
11555 return sub;
11556 else
11557 return build1 (INDIRECT_REF, type, t);
11560 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11562 tree
11563 fold_indirect_ref (tree t)
11565 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11567 if (sub)
11568 return sub;
11569 else
11570 return t;
11573 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11574 whose result is ignored. The type of the returned tree need not be
11575 the same as the original expression. */
11577 tree
11578 fold_ignored_result (tree t)
11580 if (!TREE_SIDE_EFFECTS (t))
11581 return integer_zero_node;
11583 for (;;)
11584 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11586 case tcc_unary:
11587 t = TREE_OPERAND (t, 0);
11588 break;
11590 case tcc_binary:
11591 case tcc_comparison:
11592 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11593 t = TREE_OPERAND (t, 0);
11594 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11595 t = TREE_OPERAND (t, 1);
11596 else
11597 return t;
11598 break;
11600 case tcc_expression:
11601 switch (TREE_CODE (t))
11603 case COMPOUND_EXPR:
11604 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11605 return t;
11606 t = TREE_OPERAND (t, 0);
11607 break;
11609 case COND_EXPR:
11610 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11611 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11612 return t;
11613 t = TREE_OPERAND (t, 0);
11614 break;
11616 default:
11617 return t;
11619 break;
11621 default:
11622 return t;
11626 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11627 This can only be applied to objects of a sizetype. */
11629 tree
11630 round_up (tree value, int divisor)
11632 tree div = NULL_TREE;
11634 gcc_assert (divisor > 0);
11635 if (divisor == 1)
11636 return value;
11638 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11639 have to do anything. Only do this when we are not given a const,
11640 because in that case, this check is more expensive than just
11641 doing it. */
11642 if (TREE_CODE (value) != INTEGER_CST)
11644 div = build_int_cst (TREE_TYPE (value), divisor);
11646 if (multiple_of_p (TREE_TYPE (value), value, div))
11647 return value;
11650 /* If divisor is a power of two, simplify this to bit manipulation. */
11651 if (divisor == (divisor & -divisor))
11653 tree t;
11655 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11656 value = size_binop (PLUS_EXPR, value, t);
11657 t = build_int_cst (TREE_TYPE (value), -divisor);
11658 value = size_binop (BIT_AND_EXPR, value, t);
11660 else
11662 if (!div)
11663 div = build_int_cst (TREE_TYPE (value), divisor);
11664 value = size_binop (CEIL_DIV_EXPR, value, div);
11665 value = size_binop (MULT_EXPR, value, div);
11668 return value;
11671 /* Likewise, but round down. */
11673 tree
11674 round_down (tree value, int divisor)
11676 tree div = NULL_TREE;
11678 gcc_assert (divisor > 0);
11679 if (divisor == 1)
11680 return value;
11682 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11683 have to do anything. Only do this when we are not given a const,
11684 because in that case, this check is more expensive than just
11685 doing it. */
11686 if (TREE_CODE (value) != INTEGER_CST)
11688 div = build_int_cst (TREE_TYPE (value), divisor);
11690 if (multiple_of_p (TREE_TYPE (value), value, div))
11691 return value;
11694 /* If divisor is a power of two, simplify this to bit manipulation. */
11695 if (divisor == (divisor & -divisor))
11697 tree t;
11699 t = build_int_cst (TREE_TYPE (value), -divisor);
11700 value = size_binop (BIT_AND_EXPR, value, t);
11702 else
11704 if (!div)
11705 div = build_int_cst (TREE_TYPE (value), divisor);
11706 value = size_binop (FLOOR_DIV_EXPR, value, div);
11707 value = size_binop (MULT_EXPR, value, div);
11710 return value;
11713 /* Returns the pointer to the base of the object addressed by EXP and
11714 extracts the information about the offset of the access, storing it
11715 to PBITPOS and POFFSET. */
11717 static tree
11718 split_address_to_core_and_offset (tree exp,
11719 HOST_WIDE_INT *pbitpos, tree *poffset)
11721 tree core;
11722 enum machine_mode mode;
11723 int unsignedp, volatilep;
11724 HOST_WIDE_INT bitsize;
11726 if (TREE_CODE (exp) == ADDR_EXPR)
11728 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11729 poffset, &mode, &unsignedp, &volatilep,
11730 false);
11731 core = build_fold_addr_expr (core);
11733 else
11735 core = exp;
11736 *pbitpos = 0;
11737 *poffset = NULL_TREE;
11740 return core;
11743 /* Returns true if addresses of E1 and E2 differ by a constant, false
11744 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11746 bool
11747 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11749 tree core1, core2;
11750 HOST_WIDE_INT bitpos1, bitpos2;
11751 tree toffset1, toffset2, tdiff, type;
11753 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11754 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11756 if (bitpos1 % BITS_PER_UNIT != 0
11757 || bitpos2 % BITS_PER_UNIT != 0
11758 || !operand_equal_p (core1, core2, 0))
11759 return false;
11761 if (toffset1 && toffset2)
11763 type = TREE_TYPE (toffset1);
11764 if (type != TREE_TYPE (toffset2))
11765 toffset2 = fold_convert (type, toffset2);
11767 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11768 if (!cst_and_fits_in_hwi (tdiff))
11769 return false;
11771 *diff = int_cst_value (tdiff);
11773 else if (toffset1 || toffset2)
11775 /* If only one of the offsets is non-constant, the difference cannot
11776 be a constant. */
11777 return false;
11779 else
11780 *diff = 0;
11782 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11783 return true;
11786 /* Simplify the floating point expression EXP when the sign of the
11787 result is not significant. Return NULL_TREE if no simplification
11788 is possible. */
11790 tree
11791 fold_strip_sign_ops (tree exp)
11793 tree arg0, arg1;
11795 switch (TREE_CODE (exp))
11797 case ABS_EXPR:
11798 case NEGATE_EXPR:
11799 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11800 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11802 case MULT_EXPR:
11803 case RDIV_EXPR:
11804 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11805 return NULL_TREE;
11806 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11807 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11808 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11809 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11810 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11811 arg1 ? arg1 : TREE_OPERAND (exp, 1));
11812 break;
11814 default:
11815 break;
11817 return NULL_TREE;