* Mainline merge as of 2006-02-16 (@111136).
[official-gcc.git] / gcc / fold-const.c
blob2683dab7d249d3db604a8be8b958e97e772445a4
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
106 tree *, tree *);
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
114 tree);
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
124 tree, tree,
125 tree, tree, int);
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
128 tree, tree, tree);
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
139 addition.
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
143 sign. */
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
151 #define LOWPART(x) \
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
161 static void
162 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
164 words[0] = LOWPART (low);
165 words[1] = HIGHPART (low);
166 words[2] = LOWPART (hi);
167 words[3] = HIGHPART (hi);
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
174 static void
175 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
176 HOST_WIDE_INT *hi)
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
197 tree
198 force_fit_type (tree t, int overflowable,
199 bool overflowed, bool overflowed_const)
201 unsigned HOST_WIDE_INT low;
202 HOST_WIDE_INT high;
203 unsigned int prec;
204 int sign_extended_type;
206 gcc_assert (TREE_CODE (t) == INTEGER_CST);
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
211 if (POINTER_TYPE_P (TREE_TYPE (t))
212 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
213 prec = POINTER_SIZE;
214 else
215 prec = TYPE_PRECISION (TREE_TYPE (t));
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
218 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 else
229 high = 0;
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 low &= ~((HOST_WIDE_INT) (-1) << prec);
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (high & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)low < 0)
248 high = -1;
250 else
252 /* Sign extend bottom half? */
253 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
255 high = -1;
256 low |= (HOST_WIDE_INT)(-1) << prec;
260 /* If the value changed, return a new node. */
261 if (overflowed || overflowed_const
262 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
264 t = build_int_cst_wide (TREE_TYPE (t), low, high);
266 if (overflowed
267 || overflowable < 0
268 || (overflowable > 0 && sign_extended_type))
270 t = copy_node (t);
271 TREE_OVERFLOW (t) = 1;
272 TREE_CONSTANT_OVERFLOW (t) = 1;
274 else if (overflowed_const)
276 t = copy_node (t);
277 TREE_CONSTANT_OVERFLOW (t) = 1;
281 return t;
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
291 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
292 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
294 unsigned HOST_WIDE_INT l;
295 HOST_WIDE_INT h;
297 l = l1 + l2;
298 h = h1 + h2 + (l < l1);
300 *lv = l;
301 *hv = h;
302 return OVERFLOW_SUM_SIGN (h1, h2, h);
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
314 if (l1 == 0)
316 *lv = 0;
317 *hv = - h1;
318 return (*hv & h1) < 0;
320 else
322 *lv = -l1;
323 *hv = ~h1;
324 return 0;
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
335 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
336 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
337 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
339 HOST_WIDE_INT arg1[4];
340 HOST_WIDE_INT arg2[4];
341 HOST_WIDE_INT prod[4 * 2];
342 unsigned HOST_WIDE_INT carry;
343 int i, j, k;
344 unsigned HOST_WIDE_INT toplow, neglow;
345 HOST_WIDE_INT tophigh, neghigh;
347 encode (arg1, l1, h1);
348 encode (arg2, l2, h2);
350 memset (prod, 0, sizeof prod);
352 for (i = 0; i < 4; i++)
354 carry = 0;
355 for (j = 0; j < 4; j++)
357 k = i + j;
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry += arg1[i] * arg2[j];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
361 carry += prod[k];
362 prod[k] = LOWPART (carry);
363 carry = HIGHPART (carry);
365 prod[i + 4] = carry;
368 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod + 4, &toplow, &tophigh);
373 if (h1 < 0)
375 neg_double (l2, h2, &neglow, &neghigh);
376 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
378 if (h2 < 0)
380 neg_double (l1, h1, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
392 void
393 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
394 HOST_WIDE_INT count, unsigned int prec,
395 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
397 unsigned HOST_WIDE_INT signmask;
399 if (count < 0)
401 rshift_double (l1, h1, -count, prec, lv, hv, arith);
402 return;
405 if (SHIFT_COUNT_TRUNCATED)
406 count %= prec;
408 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
412 *hv = 0;
413 *lv = 0;
415 else if (count >= HOST_BITS_PER_WIDE_INT)
417 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
418 *lv = 0;
420 else
422 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
423 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
424 *lv = l1 << count;
427 /* Sign extend all bits that are beyond the precision. */
429 signmask = -((prec > HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT) *hv
431 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
432 : (*lv >> (prec - 1))) & 1);
434 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
436 else if (prec >= HOST_BITS_PER_WIDE_INT)
438 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
439 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
441 else
443 *hv = signmask;
444 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
445 *lv |= signmask << prec;
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
454 void
455 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
456 HOST_WIDE_INT count, unsigned int prec,
457 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
458 int arith)
460 unsigned HOST_WIDE_INT signmask;
462 signmask = (arith
463 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
464 : 0);
466 if (SHIFT_COUNT_TRUNCATED)
467 count %= prec;
469 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
473 *hv = 0;
474 *lv = 0;
476 else if (count >= HOST_BITS_PER_WIDE_INT)
478 *hv = 0;
479 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
481 else
483 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
484 *lv = ((l1 >> count)
485 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
488 /* Zero / sign extend all bits that are beyond the precision. */
490 if (count >= (HOST_WIDE_INT)prec)
492 *hv = signmask;
493 *lv = signmask;
495 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
497 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
499 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
500 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
502 else
504 *hv = signmask;
505 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
506 *lv |= signmask << (prec - count);
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
515 void
516 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
517 HOST_WIDE_INT count, unsigned int prec,
518 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
520 unsigned HOST_WIDE_INT s1l, s2l;
521 HOST_WIDE_INT s1h, s2h;
523 count %= prec;
524 if (count < 0)
525 count += prec;
527 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
528 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
529 *lv = s1l | s2l;
530 *hv = s1h | s2h;
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
537 void
538 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
545 count %= prec;
546 if (count < 0)
547 count += prec;
549 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551 *lv = s1l | s2l;
552 *hv = s1h | s2h;
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
559 or EXACT_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
565 div_and_round_double (enum tree_code code, int uns,
566 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig,
568 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig,
570 unsigned HOST_WIDE_INT *lquo,
571 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
572 HOST_WIDE_INT *hrem)
574 int quo_neg = 0;
575 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den[4], quo[4];
577 int i, j;
578 unsigned HOST_WIDE_INT work;
579 unsigned HOST_WIDE_INT carry = 0;
580 unsigned HOST_WIDE_INT lnum = lnum_orig;
581 HOST_WIDE_INT hnum = hnum_orig;
582 unsigned HOST_WIDE_INT lden = lden_orig;
583 HOST_WIDE_INT hden = hden_orig;
584 int overflow = 0;
586 if (hden == 0 && lden == 0)
587 overflow = 1, lden = 1;
589 /* Calculate quotient sign and convert operands to unsigned. */
590 if (!uns)
592 if (hnum < 0)
594 quo_neg = ~ quo_neg;
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum, hnum, &lnum, &hnum)
597 && ((HOST_WIDE_INT) lden & hden) == -1)
598 overflow = 1;
600 if (hden < 0)
602 quo_neg = ~ quo_neg;
603 neg_double (lden, hden, &lden, &hden);
607 if (hnum == 0 && hden == 0)
608 { /* single precision */
609 *hquo = *hrem = 0;
610 /* This unsigned division rounds toward zero. */
611 *lquo = lnum / lden;
612 goto finish_up;
615 if (hnum == 0)
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
618 *hquo = *lquo = 0;
619 *hrem = hnum;
620 *lrem = lnum;
621 goto finish_up;
624 memset (quo, 0, sizeof quo);
626 memset (num, 0, sizeof num); /* to zero 9th element */
627 memset (den, 0, sizeof den);
629 encode (num, lnum, hnum);
630 encode (den, lden, hden);
632 /* Special code for when the divisor < BASE. */
633 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
635 /* hnum != 0 already checked. */
636 for (i = 4 - 1; i >= 0; i--)
638 work = num[i] + carry * BASE;
639 quo[i] = work / lden;
640 carry = work % lden;
643 else
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig, den_hi_sig;
648 unsigned HOST_WIDE_INT quo_est, scale;
650 /* Find the highest nonzero divisor digit. */
651 for (i = 4 - 1;; i--)
652 if (den[i] != 0)
654 den_hi_sig = i;
655 break;
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
661 scale = BASE / (den[den_hi_sig] + 1);
662 if (scale > 1)
663 { /* scale divisor and dividend */
664 carry = 0;
665 for (i = 0; i <= 4 - 1; i++)
667 work = (num[i] * scale) + carry;
668 num[i] = LOWPART (work);
669 carry = HIGHPART (work);
672 num[4] = carry;
673 carry = 0;
674 for (i = 0; i <= 4 - 1; i++)
676 work = (den[i] * scale) + carry;
677 den[i] = LOWPART (work);
678 carry = HIGHPART (work);
679 if (den[i] != 0) den_hi_sig = i;
683 num_hi_sig = 4;
685 /* Main loop */
686 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp;
693 num_hi_sig = i + den_hi_sig + 1;
694 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
695 if (num[num_hi_sig] != den[den_hi_sig])
696 quo_est = work / den[den_hi_sig];
697 else
698 quo_est = BASE - 1;
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp = work - quo_est * den[den_hi_sig];
702 if (tmp < BASE
703 && (den[den_hi_sig - 1] * quo_est
704 > (tmp * BASE + num[num_hi_sig - 2])))
705 quo_est--;
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
711 carry = 0;
712 for (j = 0; j <= den_hi_sig; j++)
714 work = quo_est * den[j] + carry;
715 carry = HIGHPART (work);
716 work = num[i + j] - LOWPART (work);
717 num[i + j] = LOWPART (work);
718 carry += HIGHPART (work) != 0;
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
725 quo_est--;
726 carry = 0; /* add divisor back in */
727 for (j = 0; j <= den_hi_sig; j++)
729 work = num[i + j] + den[j] + carry;
730 carry = HIGHPART (work);
731 num[i + j] = LOWPART (work);
734 num [num_hi_sig] += carry;
737 /* Store the quotient digit. */
738 quo[i] = quo_est;
742 decode (quo, lquo, hquo);
744 finish_up:
745 /* If result is negative, make it so. */
746 if (quo_neg)
747 neg_double (*lquo, *hquo, lquo, hquo);
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
751 neg_double (*lrem, *hrem, lrem, hrem);
752 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
754 switch (code)
756 case TRUNC_DIV_EXPR:
757 case TRUNC_MOD_EXPR: /* round toward zero */
758 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
759 return overflow;
761 case FLOOR_DIV_EXPR:
762 case FLOOR_MOD_EXPR: /* round toward negative infinity */
763 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
765 /* quo = quo - 1; */
766 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
767 lquo, hquo);
769 else
770 return overflow;
771 break;
773 case CEIL_DIV_EXPR:
774 case CEIL_MOD_EXPR: /* round toward positive infinity */
775 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
778 lquo, hquo);
780 else
781 return overflow;
782 break;
784 case ROUND_DIV_EXPR:
785 case ROUND_MOD_EXPR: /* round to closest integer */
787 unsigned HOST_WIDE_INT labs_rem = *lrem;
788 HOST_WIDE_INT habs_rem = *hrem;
789 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
790 HOST_WIDE_INT habs_den = hden, htwice;
792 /* Get absolute values. */
793 if (*hrem < 0)
794 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
795 if (hden < 0)
796 neg_double (lden, hden, &labs_den, &habs_den);
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
800 labs_rem, habs_rem, &ltwice, &htwice);
802 if (((unsigned HOST_WIDE_INT) habs_den
803 < (unsigned HOST_WIDE_INT) htwice)
804 || (((unsigned HOST_WIDE_INT) habs_den
805 == (unsigned HOST_WIDE_INT) htwice)
806 && (labs_den < ltwice)))
808 if (*hquo < 0)
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo,
811 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
812 else
813 /* quo = quo + 1; */
814 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
815 lquo, hquo);
817 else
818 return overflow;
820 break;
822 default:
823 gcc_unreachable ();
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
828 neg_double (*lrem, *hrem, lrem, hrem);
829 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
830 return overflow;
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
837 static tree
838 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
840 unsigned HOST_WIDE_INT int1l, int2l;
841 HOST_WIDE_INT int1h, int2h;
842 unsigned HOST_WIDE_INT quol, reml;
843 HOST_WIDE_INT quoh, remh;
844 tree type = TREE_TYPE (arg1);
845 int uns = TYPE_UNSIGNED (type);
847 int1l = TREE_INT_CST_LOW (arg1);
848 int1h = TREE_INT_CST_HIGH (arg1);
849 int2l = TREE_INT_CST_LOW (arg2);
850 int2h = TREE_INT_CST_HIGH (arg2);
852 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
853 &quol, &quoh, &reml, &remh);
854 if (remh != 0 || reml != 0)
855 return NULL_TREE;
857 return build_int_cst_wide (type, quol, quoh);
860 /* Return true if the built-in mathematical function specified by CODE
861 is odd, i.e. -f(x) == f(-x). */
863 static bool
864 negate_mathfn_p (enum built_in_function code)
866 switch (code)
868 CASE_FLT_FN (BUILT_IN_ASIN):
869 CASE_FLT_FN (BUILT_IN_ASINH):
870 CASE_FLT_FN (BUILT_IN_ATAN):
871 CASE_FLT_FN (BUILT_IN_ATANH):
872 CASE_FLT_FN (BUILT_IN_CBRT):
873 CASE_FLT_FN (BUILT_IN_SIN):
874 CASE_FLT_FN (BUILT_IN_SINH):
875 CASE_FLT_FN (BUILT_IN_TAN):
876 CASE_FLT_FN (BUILT_IN_TANH):
877 return true;
879 default:
880 break;
882 return false;
885 /* Check whether we may negate an integer constant T without causing
886 overflow. */
888 bool
889 may_negate_without_overflow_p (tree t)
891 unsigned HOST_WIDE_INT val;
892 unsigned int prec;
893 tree type;
895 gcc_assert (TREE_CODE (t) == INTEGER_CST);
897 type = TREE_TYPE (t);
898 if (TYPE_UNSIGNED (type))
899 return false;
901 prec = TYPE_PRECISION (type);
902 if (prec > HOST_BITS_PER_WIDE_INT)
904 if (TREE_INT_CST_LOW (t) != 0)
905 return true;
906 prec -= HOST_BITS_PER_WIDE_INT;
907 val = TREE_INT_CST_HIGH (t);
909 else
910 val = TREE_INT_CST_LOW (t);
911 if (prec < HOST_BITS_PER_WIDE_INT)
912 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
913 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
916 /* Determine whether an expression T can be cheaply negated using
917 the function negate_expr. */
919 static bool
920 negate_expr_p (tree t)
922 tree type;
924 if (t == 0)
925 return false;
927 type = TREE_TYPE (t);
929 STRIP_SIGN_NOPS (t);
930 switch (TREE_CODE (t))
932 case INTEGER_CST:
933 if (TYPE_UNSIGNED (type) || ! flag_trapv)
934 return true;
936 /* Check that -CST will not overflow type. */
937 return may_negate_without_overflow_p (t);
938 case BIT_NOT_EXPR:
939 return INTEGRAL_TYPE_P (type);
941 case REAL_CST:
942 case NEGATE_EXPR:
943 return true;
945 case COMPLEX_CST:
946 return negate_expr_p (TREE_REALPART (t))
947 && negate_expr_p (TREE_IMAGPART (t));
949 case PLUS_EXPR:
950 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
951 return false;
952 /* -(A + B) -> (-B) - A. */
953 if (negate_expr_p (TREE_OPERAND (t, 1))
954 && reorder_operands_p (TREE_OPERAND (t, 0),
955 TREE_OPERAND (t, 1)))
956 return true;
957 /* -(A + B) -> (-A) - B. */
958 return negate_expr_p (TREE_OPERAND (t, 0));
960 case MINUS_EXPR:
961 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
962 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
963 && reorder_operands_p (TREE_OPERAND (t, 0),
964 TREE_OPERAND (t, 1));
966 case MULT_EXPR:
967 if (TYPE_UNSIGNED (TREE_TYPE (t)))
968 break;
970 /* Fall through. */
972 case RDIV_EXPR:
973 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
974 return negate_expr_p (TREE_OPERAND (t, 1))
975 || negate_expr_p (TREE_OPERAND (t, 0));
976 break;
978 case TRUNC_DIV_EXPR:
979 case ROUND_DIV_EXPR:
980 case FLOOR_DIV_EXPR:
981 case CEIL_DIV_EXPR:
982 case EXACT_DIV_EXPR:
983 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
984 break;
985 return negate_expr_p (TREE_OPERAND (t, 1))
986 || negate_expr_p (TREE_OPERAND (t, 0));
988 case NOP_EXPR:
989 /* Negate -((double)float) as (double)(-float). */
990 if (TREE_CODE (type) == REAL_TYPE)
992 tree tem = strip_float_extensions (t);
993 if (tem != t)
994 return negate_expr_p (tem);
996 break;
998 case CALL_EXPR:
999 /* Negate -f(x) as f(-x). */
1000 if (negate_mathfn_p (builtin_mathfn_code (t)))
1001 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1002 break;
1004 case RSHIFT_EXPR:
1005 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1006 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1008 tree op1 = TREE_OPERAND (t, 1);
1009 if (TREE_INT_CST_HIGH (op1) == 0
1010 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1011 == TREE_INT_CST_LOW (op1))
1012 return true;
1014 break;
1016 default:
1017 break;
1019 return false;
1022 /* Given T, an expression, return the negation of T. Allow for T to be
1023 null, in which case return null. */
1025 static tree
1026 negate_expr (tree t)
1028 tree type;
1029 tree tem;
1031 if (t == 0)
1032 return 0;
1034 type = TREE_TYPE (t);
1035 STRIP_SIGN_NOPS (t);
1037 switch (TREE_CODE (t))
1039 /* Convert - (~A) to A + 1. */
1040 case BIT_NOT_EXPR:
1041 if (INTEGRAL_TYPE_P (type))
1042 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1043 build_int_cst (type, 1));
1044 break;
1046 case INTEGER_CST:
1047 tem = fold_negate_const (t, type);
1048 if (! TREE_OVERFLOW (tem)
1049 || TYPE_UNSIGNED (type)
1050 || ! flag_trapv)
1051 return tem;
1052 break;
1054 case REAL_CST:
1055 tem = fold_negate_const (t, type);
1056 /* Two's complement FP formats, such as c4x, may overflow. */
1057 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1058 return fold_convert (type, tem);
1059 break;
1061 case COMPLEX_CST:
1063 tree rpart = negate_expr (TREE_REALPART (t));
1064 tree ipart = negate_expr (TREE_IMAGPART (t));
1066 if ((TREE_CODE (rpart) == REAL_CST
1067 && TREE_CODE (ipart) == REAL_CST)
1068 || (TREE_CODE (rpart) == INTEGER_CST
1069 && TREE_CODE (ipart) == INTEGER_CST))
1070 return build_complex (type, rpart, ipart);
1072 break;
1074 case NEGATE_EXPR:
1075 return fold_convert (type, TREE_OPERAND (t, 0));
1077 case PLUS_EXPR:
1078 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1080 /* -(A + B) -> (-B) - A. */
1081 if (negate_expr_p (TREE_OPERAND (t, 1))
1082 && reorder_operands_p (TREE_OPERAND (t, 0),
1083 TREE_OPERAND (t, 1)))
1085 tem = negate_expr (TREE_OPERAND (t, 1));
1086 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1087 tem, TREE_OPERAND (t, 0));
1088 return fold_convert (type, tem);
1091 /* -(A + B) -> (-A) - B. */
1092 if (negate_expr_p (TREE_OPERAND (t, 0)))
1094 tem = negate_expr (TREE_OPERAND (t, 0));
1095 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1096 tem, TREE_OPERAND (t, 1));
1097 return fold_convert (type, tem);
1100 break;
1102 case MINUS_EXPR:
1103 /* - (A - B) -> B - A */
1104 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1105 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1106 return fold_convert (type,
1107 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1108 TREE_OPERAND (t, 1),
1109 TREE_OPERAND (t, 0)));
1110 break;
1112 case MULT_EXPR:
1113 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1114 break;
1116 /* Fall through. */
1118 case RDIV_EXPR:
1119 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1121 tem = TREE_OPERAND (t, 1);
1122 if (negate_expr_p (tem))
1123 return fold_convert (type,
1124 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1125 TREE_OPERAND (t, 0),
1126 negate_expr (tem)));
1127 tem = TREE_OPERAND (t, 0);
1128 if (negate_expr_p (tem))
1129 return fold_convert (type,
1130 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1131 negate_expr (tem),
1132 TREE_OPERAND (t, 1)));
1134 break;
1136 case TRUNC_DIV_EXPR:
1137 case ROUND_DIV_EXPR:
1138 case FLOOR_DIV_EXPR:
1139 case CEIL_DIV_EXPR:
1140 case EXACT_DIV_EXPR:
1141 if (!TYPE_UNSIGNED (TREE_TYPE (t)) && !flag_wrapv)
1143 tem = TREE_OPERAND (t, 1);
1144 if (negate_expr_p (tem))
1145 return fold_convert (type,
1146 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1147 TREE_OPERAND (t, 0),
1148 negate_expr (tem)));
1149 tem = TREE_OPERAND (t, 0);
1150 if (negate_expr_p (tem))
1151 return fold_convert (type,
1152 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1153 negate_expr (tem),
1154 TREE_OPERAND (t, 1)));
1156 break;
1158 case NOP_EXPR:
1159 /* Convert -((double)float) into (double)(-float). */
1160 if (TREE_CODE (type) == REAL_TYPE)
1162 tem = strip_float_extensions (t);
1163 if (tem != t && negate_expr_p (tem))
1164 return fold_convert (type, negate_expr (tem));
1166 break;
1168 case CALL_EXPR:
1169 /* Negate -f(x) as f(-x). */
1170 if (negate_mathfn_p (builtin_mathfn_code (t))
1171 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1173 tree fndecl, arg, arglist;
1175 fndecl = get_callee_fndecl (t);
1176 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1177 arglist = build_tree_list (NULL_TREE, arg);
1178 return build_function_call_expr (fndecl, arglist);
1180 break;
1182 case RSHIFT_EXPR:
1183 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1184 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1186 tree op1 = TREE_OPERAND (t, 1);
1187 if (TREE_INT_CST_HIGH (op1) == 0
1188 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1189 == TREE_INT_CST_LOW (op1))
1191 tree ntype = TYPE_UNSIGNED (type)
1192 ? lang_hooks.types.signed_type (type)
1193 : lang_hooks.types.unsigned_type (type);
1194 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1195 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1196 return fold_convert (type, temp);
1199 break;
1201 default:
1202 break;
1205 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1206 return fold_convert (type, tem);
1209 /* Split a tree IN into a constant, literal and variable parts that could be
1210 combined with CODE to make IN. "constant" means an expression with
1211 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1212 commutative arithmetic operation. Store the constant part into *CONP,
1213 the literal in *LITP and return the variable part. If a part isn't
1214 present, set it to null. If the tree does not decompose in this way,
1215 return the entire tree as the variable part and the other parts as null.
1217 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1218 case, we negate an operand that was subtracted. Except if it is a
1219 literal for which we use *MINUS_LITP instead.
1221 If NEGATE_P is true, we are negating all of IN, again except a literal
1222 for which we use *MINUS_LITP instead.
1224 If IN is itself a literal or constant, return it as appropriate.
1226 Note that we do not guarantee that any of the three values will be the
1227 same type as IN, but they will have the same signedness and mode. */
1229 static tree
1230 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1231 tree *minus_litp, int negate_p)
1233 tree var = 0;
1235 *conp = 0;
1236 *litp = 0;
1237 *minus_litp = 0;
1239 /* Strip any conversions that don't change the machine mode or signedness. */
1240 STRIP_SIGN_NOPS (in);
1242 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1243 *litp = in;
1244 else if (TREE_CODE (in) == code
1245 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1246 /* We can associate addition and subtraction together (even
1247 though the C standard doesn't say so) for integers because
1248 the value is not affected. For reals, the value might be
1249 affected, so we can't. */
1250 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1251 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1253 tree op0 = TREE_OPERAND (in, 0);
1254 tree op1 = TREE_OPERAND (in, 1);
1255 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1256 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1258 /* First see if either of the operands is a literal, then a constant. */
1259 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1260 *litp = op0, op0 = 0;
1261 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1262 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1264 if (op0 != 0 && TREE_CONSTANT (op0))
1265 *conp = op0, op0 = 0;
1266 else if (op1 != 0 && TREE_CONSTANT (op1))
1267 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1269 /* If we haven't dealt with either operand, this is not a case we can
1270 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1271 if (op0 != 0 && op1 != 0)
1272 var = in;
1273 else if (op0 != 0)
1274 var = op0;
1275 else
1276 var = op1, neg_var_p = neg1_p;
1278 /* Now do any needed negations. */
1279 if (neg_litp_p)
1280 *minus_litp = *litp, *litp = 0;
1281 if (neg_conp_p)
1282 *conp = negate_expr (*conp);
1283 if (neg_var_p)
1284 var = negate_expr (var);
1286 else if (TREE_CONSTANT (in))
1287 *conp = in;
1288 else
1289 var = in;
1291 if (negate_p)
1293 if (*litp)
1294 *minus_litp = *litp, *litp = 0;
1295 else if (*minus_litp)
1296 *litp = *minus_litp, *minus_litp = 0;
1297 *conp = negate_expr (*conp);
1298 var = negate_expr (var);
1301 return var;
1304 /* Re-associate trees split by the above function. T1 and T2 are either
1305 expressions to associate or null. Return the new expression, if any. If
1306 we build an operation, do it in TYPE and with CODE. */
1308 static tree
1309 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1311 if (t1 == 0)
1312 return t2;
1313 else if (t2 == 0)
1314 return t1;
1316 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1317 try to fold this since we will have infinite recursion. But do
1318 deal with any NEGATE_EXPRs. */
1319 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1320 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1322 if (code == PLUS_EXPR)
1324 if (TREE_CODE (t1) == NEGATE_EXPR)
1325 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1326 fold_convert (type, TREE_OPERAND (t1, 0)));
1327 else if (TREE_CODE (t2) == NEGATE_EXPR)
1328 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1329 fold_convert (type, TREE_OPERAND (t2, 0)));
1330 else if (integer_zerop (t2))
1331 return fold_convert (type, t1);
1333 else if (code == MINUS_EXPR)
1335 if (integer_zerop (t2))
1336 return fold_convert (type, t1);
1339 return build2 (code, type, fold_convert (type, t1),
1340 fold_convert (type, t2));
1343 return fold_build2 (code, type, fold_convert (type, t1),
1344 fold_convert (type, t2));
1347 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1348 to produce a new constant. Return NULL_TREE if we don't know how
1349 to evaluate CODE at compile-time.
1351 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1353 tree
1354 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1356 unsigned HOST_WIDE_INT int1l, int2l;
1357 HOST_WIDE_INT int1h, int2h;
1358 unsigned HOST_WIDE_INT low;
1359 HOST_WIDE_INT hi;
1360 unsigned HOST_WIDE_INT garbagel;
1361 HOST_WIDE_INT garbageh;
1362 tree t;
1363 tree type = TREE_TYPE (arg1);
1364 int uns = TYPE_UNSIGNED (type);
1365 int is_sizetype
1366 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1367 int overflow = 0;
1369 int1l = TREE_INT_CST_LOW (arg1);
1370 int1h = TREE_INT_CST_HIGH (arg1);
1371 int2l = TREE_INT_CST_LOW (arg2);
1372 int2h = TREE_INT_CST_HIGH (arg2);
1374 switch (code)
1376 case BIT_IOR_EXPR:
1377 low = int1l | int2l, hi = int1h | int2h;
1378 break;
1380 case BIT_XOR_EXPR:
1381 low = int1l ^ int2l, hi = int1h ^ int2h;
1382 break;
1384 case BIT_AND_EXPR:
1385 low = int1l & int2l, hi = int1h & int2h;
1386 break;
1388 case RSHIFT_EXPR:
1389 int2l = -int2l;
1390 case LSHIFT_EXPR:
1391 /* It's unclear from the C standard whether shifts can overflow.
1392 The following code ignores overflow; perhaps a C standard
1393 interpretation ruling is needed. */
1394 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1395 &low, &hi, !uns);
1396 break;
1398 case RROTATE_EXPR:
1399 int2l = - int2l;
1400 case LROTATE_EXPR:
1401 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1402 &low, &hi);
1403 break;
1405 case PLUS_EXPR:
1406 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1407 break;
1409 case MINUS_EXPR:
1410 neg_double (int2l, int2h, &low, &hi);
1411 add_double (int1l, int1h, low, hi, &low, &hi);
1412 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1413 break;
1415 case MULT_EXPR:
1416 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1417 break;
1419 case TRUNC_DIV_EXPR:
1420 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1421 case EXACT_DIV_EXPR:
1422 /* This is a shortcut for a common special case. */
1423 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1424 && ! TREE_CONSTANT_OVERFLOW (arg1)
1425 && ! TREE_CONSTANT_OVERFLOW (arg2)
1426 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1428 if (code == CEIL_DIV_EXPR)
1429 int1l += int2l - 1;
1431 low = int1l / int2l, hi = 0;
1432 break;
1435 /* ... fall through ... */
1437 case ROUND_DIV_EXPR:
1438 if (int2h == 0 && int2l == 0)
1439 return NULL_TREE;
1440 if (int2h == 0 && int2l == 1)
1442 low = int1l, hi = int1h;
1443 break;
1445 if (int1l == int2l && int1h == int2h
1446 && ! (int1l == 0 && int1h == 0))
1448 low = 1, hi = 0;
1449 break;
1451 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1452 &low, &hi, &garbagel, &garbageh);
1453 break;
1455 case TRUNC_MOD_EXPR:
1456 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1457 /* This is a shortcut for a common special case. */
1458 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1459 && ! TREE_CONSTANT_OVERFLOW (arg1)
1460 && ! TREE_CONSTANT_OVERFLOW (arg2)
1461 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1463 if (code == CEIL_MOD_EXPR)
1464 int1l += int2l - 1;
1465 low = int1l % int2l, hi = 0;
1466 break;
1469 /* ... fall through ... */
1471 case ROUND_MOD_EXPR:
1472 if (int2h == 0 && int2l == 0)
1473 return NULL_TREE;
1474 overflow = div_and_round_double (code, uns,
1475 int1l, int1h, int2l, int2h,
1476 &garbagel, &garbageh, &low, &hi);
1477 break;
1479 case MIN_EXPR:
1480 case MAX_EXPR:
1481 if (uns)
1482 low = (((unsigned HOST_WIDE_INT) int1h
1483 < (unsigned HOST_WIDE_INT) int2h)
1484 || (((unsigned HOST_WIDE_INT) int1h
1485 == (unsigned HOST_WIDE_INT) int2h)
1486 && int1l < int2l));
1487 else
1488 low = (int1h < int2h
1489 || (int1h == int2h && int1l < int2l));
1491 if (low == (code == MIN_EXPR))
1492 low = int1l, hi = int1h;
1493 else
1494 low = int2l, hi = int2h;
1495 break;
1497 default:
1498 return NULL_TREE;
1501 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1503 if (notrunc)
1505 /* Propagate overflow flags ourselves. */
1506 if (((!uns || is_sizetype) && overflow)
1507 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1509 t = copy_node (t);
1510 TREE_OVERFLOW (t) = 1;
1511 TREE_CONSTANT_OVERFLOW (t) = 1;
1513 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1515 t = copy_node (t);
1516 TREE_CONSTANT_OVERFLOW (t) = 1;
1519 else
1520 t = force_fit_type (t, 1,
1521 ((!uns || is_sizetype) && overflow)
1522 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1523 TREE_CONSTANT_OVERFLOW (arg1)
1524 | TREE_CONSTANT_OVERFLOW (arg2));
1526 return t;
1529 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1530 constant. We assume ARG1 and ARG2 have the same data type, or at least
1531 are the same kind of constant and the same machine mode.
1533 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1535 static tree
1536 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1538 STRIP_NOPS (arg1);
1539 STRIP_NOPS (arg2);
1541 if (TREE_CODE (arg1) == INTEGER_CST)
1542 return int_const_binop (code, arg1, arg2, notrunc);
1544 if (TREE_CODE (arg1) == REAL_CST)
1546 enum machine_mode mode;
1547 REAL_VALUE_TYPE d1;
1548 REAL_VALUE_TYPE d2;
1549 REAL_VALUE_TYPE value;
1550 REAL_VALUE_TYPE result;
1551 bool inexact;
1552 tree t, type;
1554 /* The following codes are handled by real_arithmetic. */
1555 switch (code)
1557 case PLUS_EXPR:
1558 case MINUS_EXPR:
1559 case MULT_EXPR:
1560 case RDIV_EXPR:
1561 case MIN_EXPR:
1562 case MAX_EXPR:
1563 break;
1565 default:
1566 return NULL_TREE;
1569 d1 = TREE_REAL_CST (arg1);
1570 d2 = TREE_REAL_CST (arg2);
1572 type = TREE_TYPE (arg1);
1573 mode = TYPE_MODE (type);
1575 /* Don't perform operation if we honor signaling NaNs and
1576 either operand is a NaN. */
1577 if (HONOR_SNANS (mode)
1578 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1579 return NULL_TREE;
1581 /* Don't perform operation if it would raise a division
1582 by zero exception. */
1583 if (code == RDIV_EXPR
1584 && REAL_VALUES_EQUAL (d2, dconst0)
1585 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1586 return NULL_TREE;
1588 /* If either operand is a NaN, just return it. Otherwise, set up
1589 for floating-point trap; we return an overflow. */
1590 if (REAL_VALUE_ISNAN (d1))
1591 return arg1;
1592 else if (REAL_VALUE_ISNAN (d2))
1593 return arg2;
1595 inexact = real_arithmetic (&value, code, &d1, &d2);
1596 real_convert (&result, mode, &value);
1598 /* Don't constant fold this floating point operation if
1599 the result has overflowed and flag_trapping_math. */
1601 if (flag_trapping_math
1602 && MODE_HAS_INFINITIES (mode)
1603 && REAL_VALUE_ISINF (result)
1604 && !REAL_VALUE_ISINF (d1)
1605 && !REAL_VALUE_ISINF (d2))
1606 return NULL_TREE;
1608 /* Don't constant fold this floating point operation if the
1609 result may dependent upon the run-time rounding mode and
1610 flag_rounding_math is set, or if GCC's software emulation
1611 is unable to accurately represent the result. */
1613 if ((flag_rounding_math
1614 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1615 && !flag_unsafe_math_optimizations))
1616 && (inexact || !real_identical (&result, &value)))
1617 return NULL_TREE;
1619 t = build_real (type, result);
1621 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1622 TREE_CONSTANT_OVERFLOW (t)
1623 = TREE_OVERFLOW (t)
1624 | TREE_CONSTANT_OVERFLOW (arg1)
1625 | TREE_CONSTANT_OVERFLOW (arg2);
1626 return t;
1629 if (TREE_CODE (arg1) == COMPLEX_CST)
1631 tree type = TREE_TYPE (arg1);
1632 tree r1 = TREE_REALPART (arg1);
1633 tree i1 = TREE_IMAGPART (arg1);
1634 tree r2 = TREE_REALPART (arg2);
1635 tree i2 = TREE_IMAGPART (arg2);
1636 tree t;
1638 switch (code)
1640 case PLUS_EXPR:
1641 t = build_complex (type,
1642 const_binop (PLUS_EXPR, r1, r2, notrunc),
1643 const_binop (PLUS_EXPR, i1, i2, notrunc));
1644 break;
1646 case MINUS_EXPR:
1647 t = build_complex (type,
1648 const_binop (MINUS_EXPR, r1, r2, notrunc),
1649 const_binop (MINUS_EXPR, i1, i2, notrunc));
1650 break;
1652 case MULT_EXPR:
1653 t = build_complex (type,
1654 const_binop (MINUS_EXPR,
1655 const_binop (MULT_EXPR,
1656 r1, r2, notrunc),
1657 const_binop (MULT_EXPR,
1658 i1, i2, notrunc),
1659 notrunc),
1660 const_binop (PLUS_EXPR,
1661 const_binop (MULT_EXPR,
1662 r1, i2, notrunc),
1663 const_binop (MULT_EXPR,
1664 i1, r2, notrunc),
1665 notrunc));
1666 break;
1668 case RDIV_EXPR:
1670 tree t1, t2, real, imag;
1671 tree magsquared
1672 = const_binop (PLUS_EXPR,
1673 const_binop (MULT_EXPR, r2, r2, notrunc),
1674 const_binop (MULT_EXPR, i2, i2, notrunc),
1675 notrunc);
1677 t1 = const_binop (PLUS_EXPR,
1678 const_binop (MULT_EXPR, r1, r2, notrunc),
1679 const_binop (MULT_EXPR, i1, i2, notrunc),
1680 notrunc);
1681 t2 = const_binop (MINUS_EXPR,
1682 const_binop (MULT_EXPR, i1, r2, notrunc),
1683 const_binop (MULT_EXPR, r1, i2, notrunc),
1684 notrunc);
1686 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1688 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1689 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1691 else
1693 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1694 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1695 if (!real || !imag)
1696 return NULL_TREE;
1699 t = build_complex (type, real, imag);
1701 break;
1703 default:
1704 return NULL_TREE;
1706 return t;
1708 return NULL_TREE;
1711 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1712 indicates which particular sizetype to create. */
1714 tree
1715 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1717 return build_int_cst (sizetype_tab[(int) kind], number);
1720 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1721 is a tree code. The type of the result is taken from the operands.
1722 Both must be the same type integer type and it must be a size type.
1723 If the operands are constant, so is the result. */
1725 tree
1726 size_binop (enum tree_code code, tree arg0, tree arg1)
1728 tree type = TREE_TYPE (arg0);
1730 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1731 && type == TREE_TYPE (arg1));
1733 /* Handle the special case of two integer constants faster. */
1734 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1736 /* And some specific cases even faster than that. */
1737 if (code == PLUS_EXPR && integer_zerop (arg0))
1738 return arg1;
1739 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1740 && integer_zerop (arg1))
1741 return arg0;
1742 else if (code == MULT_EXPR && integer_onep (arg0))
1743 return arg1;
1745 /* Handle general case of two integer constants. */
1746 return int_const_binop (code, arg0, arg1, 0);
1749 if (arg0 == error_mark_node || arg1 == error_mark_node)
1750 return error_mark_node;
1752 return fold_build2 (code, type, arg0, arg1);
1755 /* Given two values, either both of sizetype or both of bitsizetype,
1756 compute the difference between the two values. Return the value
1757 in signed type corresponding to the type of the operands. */
1759 tree
1760 size_diffop (tree arg0, tree arg1)
1762 tree type = TREE_TYPE (arg0);
1763 tree ctype;
1765 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1766 && type == TREE_TYPE (arg1));
1768 /* If the type is already signed, just do the simple thing. */
1769 if (!TYPE_UNSIGNED (type))
1770 return size_binop (MINUS_EXPR, arg0, arg1);
1772 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1774 /* If either operand is not a constant, do the conversions to the signed
1775 type and subtract. The hardware will do the right thing with any
1776 overflow in the subtraction. */
1777 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1778 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1779 fold_convert (ctype, arg1));
1781 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1782 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1783 overflow) and negate (which can't either). Special-case a result
1784 of zero while we're here. */
1785 if (tree_int_cst_equal (arg0, arg1))
1786 return build_int_cst (ctype, 0);
1787 else if (tree_int_cst_lt (arg1, arg0))
1788 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1789 else
1790 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1791 fold_convert (ctype, size_binop (MINUS_EXPR,
1792 arg1, arg0)));
1795 /* A subroutine of fold_convert_const handling conversions of an
1796 INTEGER_CST to another integer type. */
1798 static tree
1799 fold_convert_const_int_from_int (tree type, tree arg1)
1801 tree t;
1803 /* Given an integer constant, make new constant with new type,
1804 appropriately sign-extended or truncated. */
1805 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1806 TREE_INT_CST_HIGH (arg1));
1808 t = force_fit_type (t,
1809 /* Don't set the overflow when
1810 converting a pointer */
1811 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1812 (TREE_INT_CST_HIGH (arg1) < 0
1813 && (TYPE_UNSIGNED (type)
1814 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1815 | TREE_OVERFLOW (arg1),
1816 TREE_CONSTANT_OVERFLOW (arg1));
1818 return t;
1821 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1822 to an integer type. */
1824 static tree
1825 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1827 int overflow = 0;
1828 tree t;
1830 /* The following code implements the floating point to integer
1831 conversion rules required by the Java Language Specification,
1832 that IEEE NaNs are mapped to zero and values that overflow
1833 the target precision saturate, i.e. values greater than
1834 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1835 are mapped to INT_MIN. These semantics are allowed by the
1836 C and C++ standards that simply state that the behavior of
1837 FP-to-integer conversion is unspecified upon overflow. */
1839 HOST_WIDE_INT high, low;
1840 REAL_VALUE_TYPE r;
1841 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1843 switch (code)
1845 case FIX_TRUNC_EXPR:
1846 real_trunc (&r, VOIDmode, &x);
1847 break;
1849 case FIX_CEIL_EXPR:
1850 real_ceil (&r, VOIDmode, &x);
1851 break;
1853 case FIX_FLOOR_EXPR:
1854 real_floor (&r, VOIDmode, &x);
1855 break;
1857 case FIX_ROUND_EXPR:
1858 real_round (&r, VOIDmode, &x);
1859 break;
1861 default:
1862 gcc_unreachable ();
1865 /* If R is NaN, return zero and show we have an overflow. */
1866 if (REAL_VALUE_ISNAN (r))
1868 overflow = 1;
1869 high = 0;
1870 low = 0;
1873 /* See if R is less than the lower bound or greater than the
1874 upper bound. */
1876 if (! overflow)
1878 tree lt = TYPE_MIN_VALUE (type);
1879 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1880 if (REAL_VALUES_LESS (r, l))
1882 overflow = 1;
1883 high = TREE_INT_CST_HIGH (lt);
1884 low = TREE_INT_CST_LOW (lt);
1888 if (! overflow)
1890 tree ut = TYPE_MAX_VALUE (type);
1891 if (ut)
1893 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1894 if (REAL_VALUES_LESS (u, r))
1896 overflow = 1;
1897 high = TREE_INT_CST_HIGH (ut);
1898 low = TREE_INT_CST_LOW (ut);
1903 if (! overflow)
1904 REAL_VALUE_TO_INT (&low, &high, r);
1906 t = build_int_cst_wide (type, low, high);
1908 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1909 TREE_CONSTANT_OVERFLOW (arg1));
1910 return t;
1913 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1914 to another floating point type. */
1916 static tree
1917 fold_convert_const_real_from_real (tree type, tree arg1)
1919 REAL_VALUE_TYPE value;
1920 tree t;
1922 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1923 t = build_real (type, value);
1925 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1926 TREE_CONSTANT_OVERFLOW (t)
1927 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1928 return t;
1931 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1932 type TYPE. If no simplification can be done return NULL_TREE. */
1934 static tree
1935 fold_convert_const (enum tree_code code, tree type, tree arg1)
1937 if (TREE_TYPE (arg1) == type)
1938 return arg1;
1940 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1942 if (TREE_CODE (arg1) == INTEGER_CST)
1943 return fold_convert_const_int_from_int (type, arg1);
1944 else if (TREE_CODE (arg1) == REAL_CST)
1945 return fold_convert_const_int_from_real (code, type, arg1);
1947 else if (TREE_CODE (type) == REAL_TYPE)
1949 if (TREE_CODE (arg1) == INTEGER_CST)
1950 return build_real_from_int_cst (type, arg1);
1951 if (TREE_CODE (arg1) == REAL_CST)
1952 return fold_convert_const_real_from_real (type, arg1);
1954 return NULL_TREE;
1957 /* Construct a vector of zero elements of vector type TYPE. */
1959 static tree
1960 build_zero_vector (tree type)
1962 tree elem, list;
1963 int i, units;
1965 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1966 units = TYPE_VECTOR_SUBPARTS (type);
1968 list = NULL_TREE;
1969 for (i = 0; i < units; i++)
1970 list = tree_cons (NULL_TREE, elem, list);
1971 return build_vector (type, list);
1974 /* Convert expression ARG to type TYPE. Used by the middle-end for
1975 simple conversions in preference to calling the front-end's convert. */
1977 tree
1978 fold_convert (tree type, tree arg)
1980 tree orig = TREE_TYPE (arg);
1981 tree tem;
1983 if (type == orig)
1984 return arg;
1986 if (TREE_CODE (arg) == ERROR_MARK
1987 || TREE_CODE (type) == ERROR_MARK
1988 || TREE_CODE (orig) == ERROR_MARK)
1989 return error_mark_node;
1991 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1992 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1993 TYPE_MAIN_VARIANT (orig)))
1994 return fold_build1 (NOP_EXPR, type, arg);
1996 switch (TREE_CODE (type))
1998 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1999 case POINTER_TYPE: case REFERENCE_TYPE:
2000 case OFFSET_TYPE:
2001 if (TREE_CODE (arg) == INTEGER_CST)
2003 tem = fold_convert_const (NOP_EXPR, type, arg);
2004 if (tem != NULL_TREE)
2005 return tem;
2007 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2008 || TREE_CODE (orig) == OFFSET_TYPE)
2009 return fold_build1 (NOP_EXPR, type, arg);
2010 if (TREE_CODE (orig) == COMPLEX_TYPE)
2012 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2013 return fold_convert (type, tem);
2015 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2016 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2017 return fold_build1 (NOP_EXPR, type, arg);
2019 case REAL_TYPE:
2020 if (TREE_CODE (arg) == INTEGER_CST)
2022 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2023 if (tem != NULL_TREE)
2024 return tem;
2026 else if (TREE_CODE (arg) == REAL_CST)
2028 tem = fold_convert_const (NOP_EXPR, type, arg);
2029 if (tem != NULL_TREE)
2030 return tem;
2033 switch (TREE_CODE (orig))
2035 case INTEGER_TYPE:
2036 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2037 case POINTER_TYPE: case REFERENCE_TYPE:
2038 return fold_build1 (FLOAT_EXPR, type, arg);
2040 case REAL_TYPE:
2041 return fold_build1 (NOP_EXPR, type, arg);
2043 case COMPLEX_TYPE:
2044 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2045 return fold_convert (type, tem);
2047 default:
2048 gcc_unreachable ();
2051 case COMPLEX_TYPE:
2052 switch (TREE_CODE (orig))
2054 case INTEGER_TYPE:
2055 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2056 case POINTER_TYPE: case REFERENCE_TYPE:
2057 case REAL_TYPE:
2058 return build2 (COMPLEX_EXPR, type,
2059 fold_convert (TREE_TYPE (type), arg),
2060 fold_convert (TREE_TYPE (type), integer_zero_node));
2061 case COMPLEX_TYPE:
2063 tree rpart, ipart;
2065 if (TREE_CODE (arg) == COMPLEX_EXPR)
2067 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2068 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2069 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2072 arg = save_expr (arg);
2073 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2074 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2075 rpart = fold_convert (TREE_TYPE (type), rpart);
2076 ipart = fold_convert (TREE_TYPE (type), ipart);
2077 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2080 default:
2081 gcc_unreachable ();
2084 case VECTOR_TYPE:
2085 if (integer_zerop (arg))
2086 return build_zero_vector (type);
2087 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2088 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2089 || TREE_CODE (orig) == VECTOR_TYPE);
2090 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2092 case VOID_TYPE:
2093 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2095 default:
2096 gcc_unreachable ();
2100 /* Return false if expr can be assumed not to be an lvalue, true
2101 otherwise. */
2103 static bool
2104 maybe_lvalue_p (tree x)
2106 /* We only need to wrap lvalue tree codes. */
2107 switch (TREE_CODE (x))
2109 case VAR_DECL:
2110 case PARM_DECL:
2111 case RESULT_DECL:
2112 case LABEL_DECL:
2113 case FUNCTION_DECL:
2114 case SSA_NAME:
2116 case COMPONENT_REF:
2117 case INDIRECT_REF:
2118 case ALIGN_INDIRECT_REF:
2119 case MISALIGNED_INDIRECT_REF:
2120 case ARRAY_REF:
2121 case ARRAY_RANGE_REF:
2122 case BIT_FIELD_REF:
2123 case OBJ_TYPE_REF:
2125 case REALPART_EXPR:
2126 case IMAGPART_EXPR:
2127 case PREINCREMENT_EXPR:
2128 case PREDECREMENT_EXPR:
2129 case SAVE_EXPR:
2130 case TRY_CATCH_EXPR:
2131 case WITH_CLEANUP_EXPR:
2132 case COMPOUND_EXPR:
2133 case MODIFY_EXPR:
2134 case TARGET_EXPR:
2135 case COND_EXPR:
2136 case BIND_EXPR:
2137 case MIN_EXPR:
2138 case MAX_EXPR:
2139 break;
2141 default:
2142 /* Assume the worst for front-end tree codes. */
2143 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2144 break;
2145 return false;
2148 return true;
2151 /* Return an expr equal to X but certainly not valid as an lvalue. */
2153 tree
2154 non_lvalue (tree x)
2156 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2157 us. */
2158 if (in_gimple_form)
2159 return x;
2161 if (! maybe_lvalue_p (x))
2162 return x;
2163 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2166 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2167 Zero means allow extended lvalues. */
2169 int pedantic_lvalues;
2171 /* When pedantic, return an expr equal to X but certainly not valid as a
2172 pedantic lvalue. Otherwise, return X. */
2174 static tree
2175 pedantic_non_lvalue (tree x)
2177 if (pedantic_lvalues)
2178 return non_lvalue (x);
2179 else
2180 return x;
2183 /* Given a tree comparison code, return the code that is the logical inverse
2184 of the given code. It is not safe to do this for floating-point
2185 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2186 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2188 enum tree_code
2189 invert_tree_comparison (enum tree_code code, bool honor_nans)
2191 if (honor_nans && flag_trapping_math)
2192 return ERROR_MARK;
2194 switch (code)
2196 case EQ_EXPR:
2197 return NE_EXPR;
2198 case NE_EXPR:
2199 return EQ_EXPR;
2200 case GT_EXPR:
2201 return honor_nans ? UNLE_EXPR : LE_EXPR;
2202 case GE_EXPR:
2203 return honor_nans ? UNLT_EXPR : LT_EXPR;
2204 case LT_EXPR:
2205 return honor_nans ? UNGE_EXPR : GE_EXPR;
2206 case LE_EXPR:
2207 return honor_nans ? UNGT_EXPR : GT_EXPR;
2208 case LTGT_EXPR:
2209 return UNEQ_EXPR;
2210 case UNEQ_EXPR:
2211 return LTGT_EXPR;
2212 case UNGT_EXPR:
2213 return LE_EXPR;
2214 case UNGE_EXPR:
2215 return LT_EXPR;
2216 case UNLT_EXPR:
2217 return GE_EXPR;
2218 case UNLE_EXPR:
2219 return GT_EXPR;
2220 case ORDERED_EXPR:
2221 return UNORDERED_EXPR;
2222 case UNORDERED_EXPR:
2223 return ORDERED_EXPR;
2224 default:
2225 gcc_unreachable ();
2229 /* Similar, but return the comparison that results if the operands are
2230 swapped. This is safe for floating-point. */
2232 enum tree_code
2233 swap_tree_comparison (enum tree_code code)
2235 switch (code)
2237 case EQ_EXPR:
2238 case NE_EXPR:
2239 case ORDERED_EXPR:
2240 case UNORDERED_EXPR:
2241 case LTGT_EXPR:
2242 case UNEQ_EXPR:
2243 return code;
2244 case GT_EXPR:
2245 return LT_EXPR;
2246 case GE_EXPR:
2247 return LE_EXPR;
2248 case LT_EXPR:
2249 return GT_EXPR;
2250 case LE_EXPR:
2251 return GE_EXPR;
2252 case UNGT_EXPR:
2253 return UNLT_EXPR;
2254 case UNGE_EXPR:
2255 return UNLE_EXPR;
2256 case UNLT_EXPR:
2257 return UNGT_EXPR;
2258 case UNLE_EXPR:
2259 return UNGE_EXPR;
2260 default:
2261 gcc_unreachable ();
2266 /* Convert a comparison tree code from an enum tree_code representation
2267 into a compcode bit-based encoding. This function is the inverse of
2268 compcode_to_comparison. */
2270 static enum comparison_code
2271 comparison_to_compcode (enum tree_code code)
2273 switch (code)
2275 case LT_EXPR:
2276 return COMPCODE_LT;
2277 case EQ_EXPR:
2278 return COMPCODE_EQ;
2279 case LE_EXPR:
2280 return COMPCODE_LE;
2281 case GT_EXPR:
2282 return COMPCODE_GT;
2283 case NE_EXPR:
2284 return COMPCODE_NE;
2285 case GE_EXPR:
2286 return COMPCODE_GE;
2287 case ORDERED_EXPR:
2288 return COMPCODE_ORD;
2289 case UNORDERED_EXPR:
2290 return COMPCODE_UNORD;
2291 case UNLT_EXPR:
2292 return COMPCODE_UNLT;
2293 case UNEQ_EXPR:
2294 return COMPCODE_UNEQ;
2295 case UNLE_EXPR:
2296 return COMPCODE_UNLE;
2297 case UNGT_EXPR:
2298 return COMPCODE_UNGT;
2299 case LTGT_EXPR:
2300 return COMPCODE_LTGT;
2301 case UNGE_EXPR:
2302 return COMPCODE_UNGE;
2303 default:
2304 gcc_unreachable ();
2308 /* Convert a compcode bit-based encoding of a comparison operator back
2309 to GCC's enum tree_code representation. This function is the
2310 inverse of comparison_to_compcode. */
2312 static enum tree_code
2313 compcode_to_comparison (enum comparison_code code)
2315 switch (code)
2317 case COMPCODE_LT:
2318 return LT_EXPR;
2319 case COMPCODE_EQ:
2320 return EQ_EXPR;
2321 case COMPCODE_LE:
2322 return LE_EXPR;
2323 case COMPCODE_GT:
2324 return GT_EXPR;
2325 case COMPCODE_NE:
2326 return NE_EXPR;
2327 case COMPCODE_GE:
2328 return GE_EXPR;
2329 case COMPCODE_ORD:
2330 return ORDERED_EXPR;
2331 case COMPCODE_UNORD:
2332 return UNORDERED_EXPR;
2333 case COMPCODE_UNLT:
2334 return UNLT_EXPR;
2335 case COMPCODE_UNEQ:
2336 return UNEQ_EXPR;
2337 case COMPCODE_UNLE:
2338 return UNLE_EXPR;
2339 case COMPCODE_UNGT:
2340 return UNGT_EXPR;
2341 case COMPCODE_LTGT:
2342 return LTGT_EXPR;
2343 case COMPCODE_UNGE:
2344 return UNGE_EXPR;
2345 default:
2346 gcc_unreachable ();
2350 /* Return a tree for the comparison which is the combination of
2351 doing the AND or OR (depending on CODE) of the two operations LCODE
2352 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2353 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2354 if this makes the transformation invalid. */
2356 tree
2357 combine_comparisons (enum tree_code code, enum tree_code lcode,
2358 enum tree_code rcode, tree truth_type,
2359 tree ll_arg, tree lr_arg)
2361 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2362 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2363 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2364 enum comparison_code compcode;
2366 switch (code)
2368 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2369 compcode = lcompcode & rcompcode;
2370 break;
2372 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2373 compcode = lcompcode | rcompcode;
2374 break;
2376 default:
2377 return NULL_TREE;
2380 if (!honor_nans)
2382 /* Eliminate unordered comparisons, as well as LTGT and ORD
2383 which are not used unless the mode has NaNs. */
2384 compcode &= ~COMPCODE_UNORD;
2385 if (compcode == COMPCODE_LTGT)
2386 compcode = COMPCODE_NE;
2387 else if (compcode == COMPCODE_ORD)
2388 compcode = COMPCODE_TRUE;
2390 else if (flag_trapping_math)
2392 /* Check that the original operation and the optimized ones will trap
2393 under the same condition. */
2394 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2395 && (lcompcode != COMPCODE_EQ)
2396 && (lcompcode != COMPCODE_ORD);
2397 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2398 && (rcompcode != COMPCODE_EQ)
2399 && (rcompcode != COMPCODE_ORD);
2400 bool trap = (compcode & COMPCODE_UNORD) == 0
2401 && (compcode != COMPCODE_EQ)
2402 && (compcode != COMPCODE_ORD);
2404 /* In a short-circuited boolean expression the LHS might be
2405 such that the RHS, if evaluated, will never trap. For
2406 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2407 if neither x nor y is NaN. (This is a mixed blessing: for
2408 example, the expression above will never trap, hence
2409 optimizing it to x < y would be invalid). */
2410 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2411 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2412 rtrap = false;
2414 /* If the comparison was short-circuited, and only the RHS
2415 trapped, we may now generate a spurious trap. */
2416 if (rtrap && !ltrap
2417 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2418 return NULL_TREE;
2420 /* If we changed the conditions that cause a trap, we lose. */
2421 if ((ltrap || rtrap) != trap)
2422 return NULL_TREE;
2425 if (compcode == COMPCODE_TRUE)
2426 return constant_boolean_node (true, truth_type);
2427 else if (compcode == COMPCODE_FALSE)
2428 return constant_boolean_node (false, truth_type);
2429 else
2430 return fold_build2 (compcode_to_comparison (compcode),
2431 truth_type, ll_arg, lr_arg);
2434 /* Return nonzero if CODE is a tree code that represents a truth value. */
2436 static int
2437 truth_value_p (enum tree_code code)
2439 return (TREE_CODE_CLASS (code) == tcc_comparison
2440 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2441 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2442 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2445 /* Return nonzero if two operands (typically of the same tree node)
2446 are necessarily equal. If either argument has side-effects this
2447 function returns zero. FLAGS modifies behavior as follows:
2449 If OEP_ONLY_CONST is set, only return nonzero for constants.
2450 This function tests whether the operands are indistinguishable;
2451 it does not test whether they are equal using C's == operation.
2452 The distinction is important for IEEE floating point, because
2453 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2454 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2456 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2457 even though it may hold multiple values during a function.
2458 This is because a GCC tree node guarantees that nothing else is
2459 executed between the evaluation of its "operands" (which may often
2460 be evaluated in arbitrary order). Hence if the operands themselves
2461 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2462 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2463 unset means assuming isochronic (or instantaneous) tree equivalence.
2464 Unless comparing arbitrary expression trees, such as from different
2465 statements, this flag can usually be left unset.
2467 If OEP_PURE_SAME is set, then pure functions with identical arguments
2468 are considered the same. It is used when the caller has other ways
2469 to ensure that global memory is unchanged in between. */
2472 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2474 /* If either is ERROR_MARK, they aren't equal. */
2475 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2476 return 0;
2478 /* If both types don't have the same signedness, then we can't consider
2479 them equal. We must check this before the STRIP_NOPS calls
2480 because they may change the signedness of the arguments. */
2481 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2482 return 0;
2484 STRIP_NOPS (arg0);
2485 STRIP_NOPS (arg1);
2487 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2488 /* This is needed for conversions and for COMPONENT_REF.
2489 Might as well play it safe and always test this. */
2490 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2491 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2492 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2493 return 0;
2495 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2496 We don't care about side effects in that case because the SAVE_EXPR
2497 takes care of that for us. In all other cases, two expressions are
2498 equal if they have no side effects. If we have two identical
2499 expressions with side effects that should be treated the same due
2500 to the only side effects being identical SAVE_EXPR's, that will
2501 be detected in the recursive calls below. */
2502 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2503 && (TREE_CODE (arg0) == SAVE_EXPR
2504 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2505 return 1;
2507 /* Next handle constant cases, those for which we can return 1 even
2508 if ONLY_CONST is set. */
2509 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2510 switch (TREE_CODE (arg0))
2512 case INTEGER_CST:
2513 return (! TREE_CONSTANT_OVERFLOW (arg0)
2514 && ! TREE_CONSTANT_OVERFLOW (arg1)
2515 && tree_int_cst_equal (arg0, arg1));
2517 case REAL_CST:
2518 return (! TREE_CONSTANT_OVERFLOW (arg0)
2519 && ! TREE_CONSTANT_OVERFLOW (arg1)
2520 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2521 TREE_REAL_CST (arg1)));
2523 case VECTOR_CST:
2525 tree v1, v2;
2527 if (TREE_CONSTANT_OVERFLOW (arg0)
2528 || TREE_CONSTANT_OVERFLOW (arg1))
2529 return 0;
2531 v1 = TREE_VECTOR_CST_ELTS (arg0);
2532 v2 = TREE_VECTOR_CST_ELTS (arg1);
2533 while (v1 && v2)
2535 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2536 flags))
2537 return 0;
2538 v1 = TREE_CHAIN (v1);
2539 v2 = TREE_CHAIN (v2);
2542 return v1 == v2;
2545 case COMPLEX_CST:
2546 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2547 flags)
2548 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2549 flags));
2551 case STRING_CST:
2552 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2553 && ! memcmp (TREE_STRING_POINTER (arg0),
2554 TREE_STRING_POINTER (arg1),
2555 TREE_STRING_LENGTH (arg0)));
2557 case ADDR_EXPR:
2558 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2560 default:
2561 break;
2564 if (flags & OEP_ONLY_CONST)
2565 return 0;
2567 /* Define macros to test an operand from arg0 and arg1 for equality and a
2568 variant that allows null and views null as being different from any
2569 non-null value. In the latter case, if either is null, the both
2570 must be; otherwise, do the normal comparison. */
2571 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2572 TREE_OPERAND (arg1, N), flags)
2574 #define OP_SAME_WITH_NULL(N) \
2575 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2576 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2578 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2580 case tcc_unary:
2581 /* Two conversions are equal only if signedness and modes match. */
2582 switch (TREE_CODE (arg0))
2584 case NOP_EXPR:
2585 case CONVERT_EXPR:
2586 case FIX_CEIL_EXPR:
2587 case FIX_TRUNC_EXPR:
2588 case FIX_FLOOR_EXPR:
2589 case FIX_ROUND_EXPR:
2590 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2591 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2592 return 0;
2593 break;
2594 default:
2595 break;
2598 return OP_SAME (0);
2601 case tcc_comparison:
2602 case tcc_binary:
2603 if (OP_SAME (0) && OP_SAME (1))
2604 return 1;
2606 /* For commutative ops, allow the other order. */
2607 return (commutative_tree_code (TREE_CODE (arg0))
2608 && operand_equal_p (TREE_OPERAND (arg0, 0),
2609 TREE_OPERAND (arg1, 1), flags)
2610 && operand_equal_p (TREE_OPERAND (arg0, 1),
2611 TREE_OPERAND (arg1, 0), flags));
2613 case tcc_reference:
2614 /* If either of the pointer (or reference) expressions we are
2615 dereferencing contain a side effect, these cannot be equal. */
2616 if (TREE_SIDE_EFFECTS (arg0)
2617 || TREE_SIDE_EFFECTS (arg1))
2618 return 0;
2620 switch (TREE_CODE (arg0))
2622 case INDIRECT_REF:
2623 case ALIGN_INDIRECT_REF:
2624 case MISALIGNED_INDIRECT_REF:
2625 case REALPART_EXPR:
2626 case IMAGPART_EXPR:
2627 return OP_SAME (0);
2629 case ARRAY_REF:
2630 case ARRAY_RANGE_REF:
2631 /* Operands 2 and 3 may be null. */
2632 return (OP_SAME (0)
2633 && OP_SAME (1)
2634 && OP_SAME_WITH_NULL (2)
2635 && OP_SAME_WITH_NULL (3));
2637 case COMPONENT_REF:
2638 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2639 may be NULL when we're called to compare MEM_EXPRs. */
2640 return OP_SAME_WITH_NULL (0)
2641 && OP_SAME (1)
2642 && OP_SAME_WITH_NULL (2);
2644 case BIT_FIELD_REF:
2645 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2647 default:
2648 return 0;
2651 case tcc_expression:
2652 switch (TREE_CODE (arg0))
2654 case ADDR_EXPR:
2655 case TRUTH_NOT_EXPR:
2656 return OP_SAME (0);
2658 case TRUTH_ANDIF_EXPR:
2659 case TRUTH_ORIF_EXPR:
2660 return OP_SAME (0) && OP_SAME (1);
2662 case TRUTH_AND_EXPR:
2663 case TRUTH_OR_EXPR:
2664 case TRUTH_XOR_EXPR:
2665 if (OP_SAME (0) && OP_SAME (1))
2666 return 1;
2668 /* Otherwise take into account this is a commutative operation. */
2669 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2670 TREE_OPERAND (arg1, 1), flags)
2671 && operand_equal_p (TREE_OPERAND (arg0, 1),
2672 TREE_OPERAND (arg1, 0), flags));
2674 case CALL_EXPR:
2675 /* If the CALL_EXPRs call different functions, then they
2676 clearly can not be equal. */
2677 if (!OP_SAME (0))
2678 return 0;
2681 unsigned int cef = call_expr_flags (arg0);
2682 if (flags & OEP_PURE_SAME)
2683 cef &= ECF_CONST | ECF_PURE;
2684 else
2685 cef &= ECF_CONST;
2686 if (!cef)
2687 return 0;
2690 /* Now see if all the arguments are the same. operand_equal_p
2691 does not handle TREE_LIST, so we walk the operands here
2692 feeding them to operand_equal_p. */
2693 arg0 = TREE_OPERAND (arg0, 1);
2694 arg1 = TREE_OPERAND (arg1, 1);
2695 while (arg0 && arg1)
2697 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2698 flags))
2699 return 0;
2701 arg0 = TREE_CHAIN (arg0);
2702 arg1 = TREE_CHAIN (arg1);
2705 /* If we get here and both argument lists are exhausted
2706 then the CALL_EXPRs are equal. */
2707 return ! (arg0 || arg1);
2709 default:
2710 return 0;
2713 case tcc_declaration:
2714 /* Consider __builtin_sqrt equal to sqrt. */
2715 return (TREE_CODE (arg0) == FUNCTION_DECL
2716 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2717 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2718 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2720 default:
2721 return 0;
2724 #undef OP_SAME
2725 #undef OP_SAME_WITH_NULL
2728 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2729 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2731 When in doubt, return 0. */
2733 static int
2734 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2736 int unsignedp1, unsignedpo;
2737 tree primarg0, primarg1, primother;
2738 unsigned int correct_width;
2740 if (operand_equal_p (arg0, arg1, 0))
2741 return 1;
2743 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2744 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2745 return 0;
2747 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2748 and see if the inner values are the same. This removes any
2749 signedness comparison, which doesn't matter here. */
2750 primarg0 = arg0, primarg1 = arg1;
2751 STRIP_NOPS (primarg0);
2752 STRIP_NOPS (primarg1);
2753 if (operand_equal_p (primarg0, primarg1, 0))
2754 return 1;
2756 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2757 actual comparison operand, ARG0.
2759 First throw away any conversions to wider types
2760 already present in the operands. */
2762 primarg1 = get_narrower (arg1, &unsignedp1);
2763 primother = get_narrower (other, &unsignedpo);
2765 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2766 if (unsignedp1 == unsignedpo
2767 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2768 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2770 tree type = TREE_TYPE (arg0);
2772 /* Make sure shorter operand is extended the right way
2773 to match the longer operand. */
2774 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2775 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2777 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2778 return 1;
2781 return 0;
2784 /* See if ARG is an expression that is either a comparison or is performing
2785 arithmetic on comparisons. The comparisons must only be comparing
2786 two different values, which will be stored in *CVAL1 and *CVAL2; if
2787 they are nonzero it means that some operands have already been found.
2788 No variables may be used anywhere else in the expression except in the
2789 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2790 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2792 If this is true, return 1. Otherwise, return zero. */
2794 static int
2795 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2797 enum tree_code code = TREE_CODE (arg);
2798 enum tree_code_class class = TREE_CODE_CLASS (code);
2800 /* We can handle some of the tcc_expression cases here. */
2801 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2802 class = tcc_unary;
2803 else if (class == tcc_expression
2804 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2805 || code == COMPOUND_EXPR))
2806 class = tcc_binary;
2808 else if (class == tcc_expression && code == SAVE_EXPR
2809 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2811 /* If we've already found a CVAL1 or CVAL2, this expression is
2812 two complex to handle. */
2813 if (*cval1 || *cval2)
2814 return 0;
2816 class = tcc_unary;
2817 *save_p = 1;
2820 switch (class)
2822 case tcc_unary:
2823 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2825 case tcc_binary:
2826 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2827 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2828 cval1, cval2, save_p));
2830 case tcc_constant:
2831 return 1;
2833 case tcc_expression:
2834 if (code == COND_EXPR)
2835 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2836 cval1, cval2, save_p)
2837 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2838 cval1, cval2, save_p)
2839 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2840 cval1, cval2, save_p));
2841 return 0;
2843 case tcc_comparison:
2844 /* First see if we can handle the first operand, then the second. For
2845 the second operand, we know *CVAL1 can't be zero. It must be that
2846 one side of the comparison is each of the values; test for the
2847 case where this isn't true by failing if the two operands
2848 are the same. */
2850 if (operand_equal_p (TREE_OPERAND (arg, 0),
2851 TREE_OPERAND (arg, 1), 0))
2852 return 0;
2854 if (*cval1 == 0)
2855 *cval1 = TREE_OPERAND (arg, 0);
2856 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2858 else if (*cval2 == 0)
2859 *cval2 = TREE_OPERAND (arg, 0);
2860 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2862 else
2863 return 0;
2865 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2867 else if (*cval2 == 0)
2868 *cval2 = TREE_OPERAND (arg, 1);
2869 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2871 else
2872 return 0;
2874 return 1;
2876 default:
2877 return 0;
2881 /* ARG is a tree that is known to contain just arithmetic operations and
2882 comparisons. Evaluate the operations in the tree substituting NEW0 for
2883 any occurrence of OLD0 as an operand of a comparison and likewise for
2884 NEW1 and OLD1. */
2886 static tree
2887 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2889 tree type = TREE_TYPE (arg);
2890 enum tree_code code = TREE_CODE (arg);
2891 enum tree_code_class class = TREE_CODE_CLASS (code);
2893 /* We can handle some of the tcc_expression cases here. */
2894 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2895 class = tcc_unary;
2896 else if (class == tcc_expression
2897 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2898 class = tcc_binary;
2900 switch (class)
2902 case tcc_unary:
2903 return fold_build1 (code, type,
2904 eval_subst (TREE_OPERAND (arg, 0),
2905 old0, new0, old1, new1));
2907 case tcc_binary:
2908 return fold_build2 (code, type,
2909 eval_subst (TREE_OPERAND (arg, 0),
2910 old0, new0, old1, new1),
2911 eval_subst (TREE_OPERAND (arg, 1),
2912 old0, new0, old1, new1));
2914 case tcc_expression:
2915 switch (code)
2917 case SAVE_EXPR:
2918 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2920 case COMPOUND_EXPR:
2921 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2923 case COND_EXPR:
2924 return fold_build3 (code, type,
2925 eval_subst (TREE_OPERAND (arg, 0),
2926 old0, new0, old1, new1),
2927 eval_subst (TREE_OPERAND (arg, 1),
2928 old0, new0, old1, new1),
2929 eval_subst (TREE_OPERAND (arg, 2),
2930 old0, new0, old1, new1));
2931 default:
2932 break;
2934 /* Fall through - ??? */
2936 case tcc_comparison:
2938 tree arg0 = TREE_OPERAND (arg, 0);
2939 tree arg1 = TREE_OPERAND (arg, 1);
2941 /* We need to check both for exact equality and tree equality. The
2942 former will be true if the operand has a side-effect. In that
2943 case, we know the operand occurred exactly once. */
2945 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2946 arg0 = new0;
2947 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2948 arg0 = new1;
2950 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2951 arg1 = new0;
2952 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2953 arg1 = new1;
2955 return fold_build2 (code, type, arg0, arg1);
2958 default:
2959 return arg;
2963 /* Return a tree for the case when the result of an expression is RESULT
2964 converted to TYPE and OMITTED was previously an operand of the expression
2965 but is now not needed (e.g., we folded OMITTED * 0).
2967 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2968 the conversion of RESULT to TYPE. */
2970 tree
2971 omit_one_operand (tree type, tree result, tree omitted)
2973 tree t = fold_convert (type, result);
2975 if (TREE_SIDE_EFFECTS (omitted))
2976 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2978 return non_lvalue (t);
2981 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2983 static tree
2984 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2986 tree t = fold_convert (type, result);
2988 if (TREE_SIDE_EFFECTS (omitted))
2989 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2991 return pedantic_non_lvalue (t);
2994 /* Return a tree for the case when the result of an expression is RESULT
2995 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2996 of the expression but are now not needed.
2998 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2999 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3000 evaluated before OMITTED2. Otherwise, if neither has side effects,
3001 just do the conversion of RESULT to TYPE. */
3003 tree
3004 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3006 tree t = fold_convert (type, result);
3008 if (TREE_SIDE_EFFECTS (omitted2))
3009 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3010 if (TREE_SIDE_EFFECTS (omitted1))
3011 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3013 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3017 /* Return a simplified tree node for the truth-negation of ARG. This
3018 never alters ARG itself. We assume that ARG is an operation that
3019 returns a truth value (0 or 1).
3021 FIXME: one would think we would fold the result, but it causes
3022 problems with the dominator optimizer. */
3023 tree
3024 invert_truthvalue (tree arg)
3026 tree type = TREE_TYPE (arg);
3027 enum tree_code code = TREE_CODE (arg);
3029 if (code == ERROR_MARK)
3030 return arg;
3032 /* If this is a comparison, we can simply invert it, except for
3033 floating-point non-equality comparisons, in which case we just
3034 enclose a TRUTH_NOT_EXPR around what we have. */
3036 if (TREE_CODE_CLASS (code) == tcc_comparison)
3038 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3039 if (FLOAT_TYPE_P (op_type)
3040 && flag_trapping_math
3041 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3042 && code != NE_EXPR && code != EQ_EXPR)
3043 return build1 (TRUTH_NOT_EXPR, type, arg);
3044 else
3046 code = invert_tree_comparison (code,
3047 HONOR_NANS (TYPE_MODE (op_type)));
3048 if (code == ERROR_MARK)
3049 return build1 (TRUTH_NOT_EXPR, type, arg);
3050 else
3051 return build2 (code, type,
3052 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3056 switch (code)
3058 case INTEGER_CST:
3059 return constant_boolean_node (integer_zerop (arg), type);
3061 case TRUTH_AND_EXPR:
3062 return build2 (TRUTH_OR_EXPR, type,
3063 invert_truthvalue (TREE_OPERAND (arg, 0)),
3064 invert_truthvalue (TREE_OPERAND (arg, 1)));
3066 case TRUTH_OR_EXPR:
3067 return build2 (TRUTH_AND_EXPR, type,
3068 invert_truthvalue (TREE_OPERAND (arg, 0)),
3069 invert_truthvalue (TREE_OPERAND (arg, 1)));
3071 case TRUTH_XOR_EXPR:
3072 /* Here we can invert either operand. We invert the first operand
3073 unless the second operand is a TRUTH_NOT_EXPR in which case our
3074 result is the XOR of the first operand with the inside of the
3075 negation of the second operand. */
3077 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3078 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3079 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3080 else
3081 return build2 (TRUTH_XOR_EXPR, type,
3082 invert_truthvalue (TREE_OPERAND (arg, 0)),
3083 TREE_OPERAND (arg, 1));
3085 case TRUTH_ANDIF_EXPR:
3086 return build2 (TRUTH_ORIF_EXPR, type,
3087 invert_truthvalue (TREE_OPERAND (arg, 0)),
3088 invert_truthvalue (TREE_OPERAND (arg, 1)));
3090 case TRUTH_ORIF_EXPR:
3091 return build2 (TRUTH_ANDIF_EXPR, type,
3092 invert_truthvalue (TREE_OPERAND (arg, 0)),
3093 invert_truthvalue (TREE_OPERAND (arg, 1)));
3095 case TRUTH_NOT_EXPR:
3096 return TREE_OPERAND (arg, 0);
3098 case COND_EXPR:
3100 tree arg1 = TREE_OPERAND (arg, 1);
3101 tree arg2 = TREE_OPERAND (arg, 2);
3102 /* A COND_EXPR may have a throw as one operand, which
3103 then has void type. Just leave void operands
3104 as they are. */
3105 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3106 VOID_TYPE_P (TREE_TYPE (arg1))
3107 ? arg1 : invert_truthvalue (arg1),
3108 VOID_TYPE_P (TREE_TYPE (arg2))
3109 ? arg2 : invert_truthvalue (arg2));
3112 case COMPOUND_EXPR:
3113 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3114 invert_truthvalue (TREE_OPERAND (arg, 1)));
3116 case NON_LVALUE_EXPR:
3117 return invert_truthvalue (TREE_OPERAND (arg, 0));
3119 case NOP_EXPR:
3120 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3121 break;
3123 case CONVERT_EXPR:
3124 case FLOAT_EXPR:
3125 return build1 (TREE_CODE (arg), type,
3126 invert_truthvalue (TREE_OPERAND (arg, 0)));
3128 case BIT_AND_EXPR:
3129 if (!integer_onep (TREE_OPERAND (arg, 1)))
3130 break;
3131 return build2 (EQ_EXPR, type, arg,
3132 build_int_cst (type, 0));
3134 case SAVE_EXPR:
3135 return build1 (TRUTH_NOT_EXPR, type, arg);
3137 case CLEANUP_POINT_EXPR:
3138 return build1 (CLEANUP_POINT_EXPR, type,
3139 invert_truthvalue (TREE_OPERAND (arg, 0)));
3141 default:
3142 break;
3144 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3145 return build1 (TRUTH_NOT_EXPR, type, arg);
3148 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3149 operands are another bit-wise operation with a common input. If so,
3150 distribute the bit operations to save an operation and possibly two if
3151 constants are involved. For example, convert
3152 (A | B) & (A | C) into A | (B & C)
3153 Further simplification will occur if B and C are constants.
3155 If this optimization cannot be done, 0 will be returned. */
3157 static tree
3158 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3160 tree common;
3161 tree left, right;
3163 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3164 || TREE_CODE (arg0) == code
3165 || (TREE_CODE (arg0) != BIT_AND_EXPR
3166 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3167 return 0;
3169 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3171 common = TREE_OPERAND (arg0, 0);
3172 left = TREE_OPERAND (arg0, 1);
3173 right = TREE_OPERAND (arg1, 1);
3175 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3177 common = TREE_OPERAND (arg0, 0);
3178 left = TREE_OPERAND (arg0, 1);
3179 right = TREE_OPERAND (arg1, 0);
3181 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3183 common = TREE_OPERAND (arg0, 1);
3184 left = TREE_OPERAND (arg0, 0);
3185 right = TREE_OPERAND (arg1, 1);
3187 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3189 common = TREE_OPERAND (arg0, 1);
3190 left = TREE_OPERAND (arg0, 0);
3191 right = TREE_OPERAND (arg1, 0);
3193 else
3194 return 0;
3196 return fold_build2 (TREE_CODE (arg0), type, common,
3197 fold_build2 (code, type, left, right));
3200 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3201 with code CODE. This optimization is unsafe. */
3202 static tree
3203 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3205 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3206 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3208 /* (A / C) +- (B / C) -> (A +- B) / C. */
3209 if (mul0 == mul1
3210 && operand_equal_p (TREE_OPERAND (arg0, 1),
3211 TREE_OPERAND (arg1, 1), 0))
3212 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3213 fold_build2 (code, type,
3214 TREE_OPERAND (arg0, 0),
3215 TREE_OPERAND (arg1, 0)),
3216 TREE_OPERAND (arg0, 1));
3218 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3219 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3220 TREE_OPERAND (arg1, 0), 0)
3221 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3222 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3224 REAL_VALUE_TYPE r0, r1;
3225 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3226 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3227 if (!mul0)
3228 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3229 if (!mul1)
3230 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3231 real_arithmetic (&r0, code, &r0, &r1);
3232 return fold_build2 (MULT_EXPR, type,
3233 TREE_OPERAND (arg0, 0),
3234 build_real (type, r0));
3237 return NULL_TREE;
3240 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3241 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3243 static tree
3244 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3245 int unsignedp)
3247 tree result;
3249 if (bitpos == 0)
3251 tree size = TYPE_SIZE (TREE_TYPE (inner));
3252 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3253 || POINTER_TYPE_P (TREE_TYPE (inner)))
3254 && host_integerp (size, 0)
3255 && tree_low_cst (size, 0) == bitsize)
3256 return fold_convert (type, inner);
3259 result = build3 (BIT_FIELD_REF, type, inner,
3260 size_int (bitsize), bitsize_int (bitpos));
3262 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3264 return result;
3267 /* Optimize a bit-field compare.
3269 There are two cases: First is a compare against a constant and the
3270 second is a comparison of two items where the fields are at the same
3271 bit position relative to the start of a chunk (byte, halfword, word)
3272 large enough to contain it. In these cases we can avoid the shift
3273 implicit in bitfield extractions.
3275 For constants, we emit a compare of the shifted constant with the
3276 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3277 compared. For two fields at the same position, we do the ANDs with the
3278 similar mask and compare the result of the ANDs.
3280 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3281 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3282 are the left and right operands of the comparison, respectively.
3284 If the optimization described above can be done, we return the resulting
3285 tree. Otherwise we return zero. */
3287 static tree
3288 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3289 tree lhs, tree rhs)
3291 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3292 tree type = TREE_TYPE (lhs);
3293 tree signed_type, unsigned_type;
3294 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3295 enum machine_mode lmode, rmode, nmode;
3296 int lunsignedp, runsignedp;
3297 int lvolatilep = 0, rvolatilep = 0;
3298 tree linner, rinner = NULL_TREE;
3299 tree mask;
3300 tree offset;
3302 /* Get all the information about the extractions being done. If the bit size
3303 if the same as the size of the underlying object, we aren't doing an
3304 extraction at all and so can do nothing. We also don't want to
3305 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3306 then will no longer be able to replace it. */
3307 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3308 &lunsignedp, &lvolatilep, false);
3309 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3310 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3311 return 0;
3313 if (!const_p)
3315 /* If this is not a constant, we can only do something if bit positions,
3316 sizes, and signedness are the same. */
3317 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3318 &runsignedp, &rvolatilep, false);
3320 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3321 || lunsignedp != runsignedp || offset != 0
3322 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3323 return 0;
3326 /* See if we can find a mode to refer to this field. We should be able to,
3327 but fail if we can't. */
3328 nmode = get_best_mode (lbitsize, lbitpos,
3329 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3330 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3331 TYPE_ALIGN (TREE_TYPE (rinner))),
3332 word_mode, lvolatilep || rvolatilep);
3333 if (nmode == VOIDmode)
3334 return 0;
3336 /* Set signed and unsigned types of the precision of this mode for the
3337 shifts below. */
3338 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3339 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3341 /* Compute the bit position and size for the new reference and our offset
3342 within it. If the new reference is the same size as the original, we
3343 won't optimize anything, so return zero. */
3344 nbitsize = GET_MODE_BITSIZE (nmode);
3345 nbitpos = lbitpos & ~ (nbitsize - 1);
3346 lbitpos -= nbitpos;
3347 if (nbitsize == lbitsize)
3348 return 0;
3350 if (BYTES_BIG_ENDIAN)
3351 lbitpos = nbitsize - lbitsize - lbitpos;
3353 /* Make the mask to be used against the extracted field. */
3354 mask = build_int_cst (unsigned_type, -1);
3355 mask = force_fit_type (mask, 0, false, false);
3356 mask = fold_convert (unsigned_type, mask);
3357 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3358 mask = const_binop (RSHIFT_EXPR, mask,
3359 size_int (nbitsize - lbitsize - lbitpos), 0);
3361 if (! const_p)
3362 /* If not comparing with constant, just rework the comparison
3363 and return. */
3364 return build2 (code, compare_type,
3365 build2 (BIT_AND_EXPR, unsigned_type,
3366 make_bit_field_ref (linner, unsigned_type,
3367 nbitsize, nbitpos, 1),
3368 mask),
3369 build2 (BIT_AND_EXPR, unsigned_type,
3370 make_bit_field_ref (rinner, unsigned_type,
3371 nbitsize, nbitpos, 1),
3372 mask));
3374 /* Otherwise, we are handling the constant case. See if the constant is too
3375 big for the field. Warn and return a tree of for 0 (false) if so. We do
3376 this not only for its own sake, but to avoid having to test for this
3377 error case below. If we didn't, we might generate wrong code.
3379 For unsigned fields, the constant shifted right by the field length should
3380 be all zero. For signed fields, the high-order bits should agree with
3381 the sign bit. */
3383 if (lunsignedp)
3385 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3386 fold_convert (unsigned_type, rhs),
3387 size_int (lbitsize), 0)))
3389 warning (0, "comparison is always %d due to width of bit-field",
3390 code == NE_EXPR);
3391 return constant_boolean_node (code == NE_EXPR, compare_type);
3394 else
3396 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3397 size_int (lbitsize - 1), 0);
3398 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3400 warning (0, "comparison is always %d due to width of bit-field",
3401 code == NE_EXPR);
3402 return constant_boolean_node (code == NE_EXPR, compare_type);
3406 /* Single-bit compares should always be against zero. */
3407 if (lbitsize == 1 && ! integer_zerop (rhs))
3409 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3410 rhs = build_int_cst (type, 0);
3413 /* Make a new bitfield reference, shift the constant over the
3414 appropriate number of bits and mask it with the computed mask
3415 (in case this was a signed field). If we changed it, make a new one. */
3416 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3417 if (lvolatilep)
3419 TREE_SIDE_EFFECTS (lhs) = 1;
3420 TREE_THIS_VOLATILE (lhs) = 1;
3423 rhs = const_binop (BIT_AND_EXPR,
3424 const_binop (LSHIFT_EXPR,
3425 fold_convert (unsigned_type, rhs),
3426 size_int (lbitpos), 0),
3427 mask, 0);
3429 return build2 (code, compare_type,
3430 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3431 rhs);
3434 /* Subroutine for fold_truthop: decode a field reference.
3436 If EXP is a comparison reference, we return the innermost reference.
3438 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3439 set to the starting bit number.
3441 If the innermost field can be completely contained in a mode-sized
3442 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3444 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3445 otherwise it is not changed.
3447 *PUNSIGNEDP is set to the signedness of the field.
3449 *PMASK is set to the mask used. This is either contained in a
3450 BIT_AND_EXPR or derived from the width of the field.
3452 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3454 Return 0 if this is not a component reference or is one that we can't
3455 do anything with. */
3457 static tree
3458 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3459 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3460 int *punsignedp, int *pvolatilep,
3461 tree *pmask, tree *pand_mask)
3463 tree outer_type = 0;
3464 tree and_mask = 0;
3465 tree mask, inner, offset;
3466 tree unsigned_type;
3467 unsigned int precision;
3469 /* All the optimizations using this function assume integer fields.
3470 There are problems with FP fields since the type_for_size call
3471 below can fail for, e.g., XFmode. */
3472 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3473 return 0;
3475 /* We are interested in the bare arrangement of bits, so strip everything
3476 that doesn't affect the machine mode. However, record the type of the
3477 outermost expression if it may matter below. */
3478 if (TREE_CODE (exp) == NOP_EXPR
3479 || TREE_CODE (exp) == CONVERT_EXPR
3480 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3481 outer_type = TREE_TYPE (exp);
3482 STRIP_NOPS (exp);
3484 if (TREE_CODE (exp) == BIT_AND_EXPR)
3486 and_mask = TREE_OPERAND (exp, 1);
3487 exp = TREE_OPERAND (exp, 0);
3488 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3489 if (TREE_CODE (and_mask) != INTEGER_CST)
3490 return 0;
3493 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3494 punsignedp, pvolatilep, false);
3495 if ((inner == exp && and_mask == 0)
3496 || *pbitsize < 0 || offset != 0
3497 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3498 return 0;
3500 /* If the number of bits in the reference is the same as the bitsize of
3501 the outer type, then the outer type gives the signedness. Otherwise
3502 (in case of a small bitfield) the signedness is unchanged. */
3503 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3504 *punsignedp = TYPE_UNSIGNED (outer_type);
3506 /* Compute the mask to access the bitfield. */
3507 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3508 precision = TYPE_PRECISION (unsigned_type);
3510 mask = build_int_cst (unsigned_type, -1);
3511 mask = force_fit_type (mask, 0, false, false);
3513 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3514 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3516 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3517 if (and_mask != 0)
3518 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3519 fold_convert (unsigned_type, and_mask), mask);
3521 *pmask = mask;
3522 *pand_mask = and_mask;
3523 return inner;
3526 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3527 bit positions. */
3529 static int
3530 all_ones_mask_p (tree mask, int size)
3532 tree type = TREE_TYPE (mask);
3533 unsigned int precision = TYPE_PRECISION (type);
3534 tree tmask;
3536 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3537 tmask = force_fit_type (tmask, 0, false, false);
3539 return
3540 tree_int_cst_equal (mask,
3541 const_binop (RSHIFT_EXPR,
3542 const_binop (LSHIFT_EXPR, tmask,
3543 size_int (precision - size),
3545 size_int (precision - size), 0));
3548 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3549 represents the sign bit of EXP's type. If EXP represents a sign
3550 or zero extension, also test VAL against the unextended type.
3551 The return value is the (sub)expression whose sign bit is VAL,
3552 or NULL_TREE otherwise. */
3554 static tree
3555 sign_bit_p (tree exp, tree val)
3557 unsigned HOST_WIDE_INT mask_lo, lo;
3558 HOST_WIDE_INT mask_hi, hi;
3559 int width;
3560 tree t;
3562 /* Tree EXP must have an integral type. */
3563 t = TREE_TYPE (exp);
3564 if (! INTEGRAL_TYPE_P (t))
3565 return NULL_TREE;
3567 /* Tree VAL must be an integer constant. */
3568 if (TREE_CODE (val) != INTEGER_CST
3569 || TREE_CONSTANT_OVERFLOW (val))
3570 return NULL_TREE;
3572 width = TYPE_PRECISION (t);
3573 if (width > HOST_BITS_PER_WIDE_INT)
3575 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3576 lo = 0;
3578 mask_hi = ((unsigned HOST_WIDE_INT) -1
3579 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3580 mask_lo = -1;
3582 else
3584 hi = 0;
3585 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3587 mask_hi = 0;
3588 mask_lo = ((unsigned HOST_WIDE_INT) -1
3589 >> (HOST_BITS_PER_WIDE_INT - width));
3592 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3593 treat VAL as if it were unsigned. */
3594 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3595 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3596 return exp;
3598 /* Handle extension from a narrower type. */
3599 if (TREE_CODE (exp) == NOP_EXPR
3600 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3601 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3603 return NULL_TREE;
3606 /* Subroutine for fold_truthop: determine if an operand is simple enough
3607 to be evaluated unconditionally. */
3609 static int
3610 simple_operand_p (tree exp)
3612 /* Strip any conversions that don't change the machine mode. */
3613 STRIP_NOPS (exp);
3615 return (CONSTANT_CLASS_P (exp)
3616 || TREE_CODE (exp) == SSA_NAME
3617 || (DECL_P (exp)
3618 && ! TREE_ADDRESSABLE (exp)
3619 && ! TREE_THIS_VOLATILE (exp)
3620 && ! DECL_NONLOCAL (exp)
3621 /* Don't regard global variables as simple. They may be
3622 allocated in ways unknown to the compiler (shared memory,
3623 #pragma weak, etc). */
3624 && ! TREE_PUBLIC (exp)
3625 && ! DECL_EXTERNAL (exp)
3626 /* Loading a static variable is unduly expensive, but global
3627 registers aren't expensive. */
3628 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3631 /* The following functions are subroutines to fold_range_test and allow it to
3632 try to change a logical combination of comparisons into a range test.
3634 For example, both
3635 X == 2 || X == 3 || X == 4 || X == 5
3637 X >= 2 && X <= 5
3638 are converted to
3639 (unsigned) (X - 2) <= 3
3641 We describe each set of comparisons as being either inside or outside
3642 a range, using a variable named like IN_P, and then describe the
3643 range with a lower and upper bound. If one of the bounds is omitted,
3644 it represents either the highest or lowest value of the type.
3646 In the comments below, we represent a range by two numbers in brackets
3647 preceded by a "+" to designate being inside that range, or a "-" to
3648 designate being outside that range, so the condition can be inverted by
3649 flipping the prefix. An omitted bound is represented by a "-". For
3650 example, "- [-, 10]" means being outside the range starting at the lowest
3651 possible value and ending at 10, in other words, being greater than 10.
3652 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3653 always false.
3655 We set up things so that the missing bounds are handled in a consistent
3656 manner so neither a missing bound nor "true" and "false" need to be
3657 handled using a special case. */
3659 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3660 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3661 and UPPER1_P are nonzero if the respective argument is an upper bound
3662 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3663 must be specified for a comparison. ARG1 will be converted to ARG0's
3664 type if both are specified. */
3666 static tree
3667 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3668 tree arg1, int upper1_p)
3670 tree tem;
3671 int result;
3672 int sgn0, sgn1;
3674 /* If neither arg represents infinity, do the normal operation.
3675 Else, if not a comparison, return infinity. Else handle the special
3676 comparison rules. Note that most of the cases below won't occur, but
3677 are handled for consistency. */
3679 if (arg0 != 0 && arg1 != 0)
3681 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3682 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3683 STRIP_NOPS (tem);
3684 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3687 if (TREE_CODE_CLASS (code) != tcc_comparison)
3688 return 0;
3690 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3691 for neither. In real maths, we cannot assume open ended ranges are
3692 the same. But, this is computer arithmetic, where numbers are finite.
3693 We can therefore make the transformation of any unbounded range with
3694 the value Z, Z being greater than any representable number. This permits
3695 us to treat unbounded ranges as equal. */
3696 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3697 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3698 switch (code)
3700 case EQ_EXPR:
3701 result = sgn0 == sgn1;
3702 break;
3703 case NE_EXPR:
3704 result = sgn0 != sgn1;
3705 break;
3706 case LT_EXPR:
3707 result = sgn0 < sgn1;
3708 break;
3709 case LE_EXPR:
3710 result = sgn0 <= sgn1;
3711 break;
3712 case GT_EXPR:
3713 result = sgn0 > sgn1;
3714 break;
3715 case GE_EXPR:
3716 result = sgn0 >= sgn1;
3717 break;
3718 default:
3719 gcc_unreachable ();
3722 return constant_boolean_node (result, type);
3725 /* Given EXP, a logical expression, set the range it is testing into
3726 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3727 actually being tested. *PLOW and *PHIGH will be made of the same type
3728 as the returned expression. If EXP is not a comparison, we will most
3729 likely not be returning a useful value and range. */
3731 static tree
3732 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3734 enum tree_code code;
3735 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3736 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3737 int in_p, n_in_p;
3738 tree low, high, n_low, n_high;
3740 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3741 and see if we can refine the range. Some of the cases below may not
3742 happen, but it doesn't seem worth worrying about this. We "continue"
3743 the outer loop when we've changed something; otherwise we "break"
3744 the switch, which will "break" the while. */
3746 in_p = 0;
3747 low = high = build_int_cst (TREE_TYPE (exp), 0);
3749 while (1)
3751 code = TREE_CODE (exp);
3752 exp_type = TREE_TYPE (exp);
3754 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3756 if (TREE_CODE_LENGTH (code) > 0)
3757 arg0 = TREE_OPERAND (exp, 0);
3758 if (TREE_CODE_CLASS (code) == tcc_comparison
3759 || TREE_CODE_CLASS (code) == tcc_unary
3760 || TREE_CODE_CLASS (code) == tcc_binary)
3761 arg0_type = TREE_TYPE (arg0);
3762 if (TREE_CODE_CLASS (code) == tcc_binary
3763 || TREE_CODE_CLASS (code) == tcc_comparison
3764 || (TREE_CODE_CLASS (code) == tcc_expression
3765 && TREE_CODE_LENGTH (code) > 1))
3766 arg1 = TREE_OPERAND (exp, 1);
3769 switch (code)
3771 case TRUTH_NOT_EXPR:
3772 in_p = ! in_p, exp = arg0;
3773 continue;
3775 case EQ_EXPR: case NE_EXPR:
3776 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3777 /* We can only do something if the range is testing for zero
3778 and if the second operand is an integer constant. Note that
3779 saying something is "in" the range we make is done by
3780 complementing IN_P since it will set in the initial case of
3781 being not equal to zero; "out" is leaving it alone. */
3782 if (low == 0 || high == 0
3783 || ! integer_zerop (low) || ! integer_zerop (high)
3784 || TREE_CODE (arg1) != INTEGER_CST)
3785 break;
3787 switch (code)
3789 case NE_EXPR: /* - [c, c] */
3790 low = high = arg1;
3791 break;
3792 case EQ_EXPR: /* + [c, c] */
3793 in_p = ! in_p, low = high = arg1;
3794 break;
3795 case GT_EXPR: /* - [-, c] */
3796 low = 0, high = arg1;
3797 break;
3798 case GE_EXPR: /* + [c, -] */
3799 in_p = ! in_p, low = arg1, high = 0;
3800 break;
3801 case LT_EXPR: /* - [c, -] */
3802 low = arg1, high = 0;
3803 break;
3804 case LE_EXPR: /* + [-, c] */
3805 in_p = ! in_p, low = 0, high = arg1;
3806 break;
3807 default:
3808 gcc_unreachable ();
3811 /* If this is an unsigned comparison, we also know that EXP is
3812 greater than or equal to zero. We base the range tests we make
3813 on that fact, so we record it here so we can parse existing
3814 range tests. We test arg0_type since often the return type
3815 of, e.g. EQ_EXPR, is boolean. */
3816 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3818 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3819 in_p, low, high, 1,
3820 build_int_cst (arg0_type, 0),
3821 NULL_TREE))
3822 break;
3824 in_p = n_in_p, low = n_low, high = n_high;
3826 /* If the high bound is missing, but we have a nonzero low
3827 bound, reverse the range so it goes from zero to the low bound
3828 minus 1. */
3829 if (high == 0 && low && ! integer_zerop (low))
3831 in_p = ! in_p;
3832 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3833 integer_one_node, 0);
3834 low = build_int_cst (arg0_type, 0);
3838 exp = arg0;
3839 continue;
3841 case NEGATE_EXPR:
3842 /* (-x) IN [a,b] -> x in [-b, -a] */
3843 n_low = range_binop (MINUS_EXPR, exp_type,
3844 build_int_cst (exp_type, 0),
3845 0, high, 1);
3846 n_high = range_binop (MINUS_EXPR, exp_type,
3847 build_int_cst (exp_type, 0),
3848 0, low, 0);
3849 low = n_low, high = n_high;
3850 exp = arg0;
3851 continue;
3853 case BIT_NOT_EXPR:
3854 /* ~ X -> -X - 1 */
3855 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3856 build_int_cst (exp_type, 1));
3857 continue;
3859 case PLUS_EXPR: case MINUS_EXPR:
3860 if (TREE_CODE (arg1) != INTEGER_CST)
3861 break;
3863 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3864 move a constant to the other side. */
3865 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3866 break;
3868 /* If EXP is signed, any overflow in the computation is undefined,
3869 so we don't worry about it so long as our computations on
3870 the bounds don't overflow. For unsigned, overflow is defined
3871 and this is exactly the right thing. */
3872 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3873 arg0_type, low, 0, arg1, 0);
3874 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3875 arg0_type, high, 1, arg1, 0);
3876 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3877 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3878 break;
3880 /* Check for an unsigned range which has wrapped around the maximum
3881 value thus making n_high < n_low, and normalize it. */
3882 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3884 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3885 integer_one_node, 0);
3886 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3887 integer_one_node, 0);
3889 /* If the range is of the form +/- [ x+1, x ], we won't
3890 be able to normalize it. But then, it represents the
3891 whole range or the empty set, so make it
3892 +/- [ -, - ]. */
3893 if (tree_int_cst_equal (n_low, low)
3894 && tree_int_cst_equal (n_high, high))
3895 low = high = 0;
3896 else
3897 in_p = ! in_p;
3899 else
3900 low = n_low, high = n_high;
3902 exp = arg0;
3903 continue;
3905 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3906 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3907 break;
3909 if (! INTEGRAL_TYPE_P (arg0_type)
3910 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3911 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3912 break;
3914 n_low = low, n_high = high;
3916 if (n_low != 0)
3917 n_low = fold_convert (arg0_type, n_low);
3919 if (n_high != 0)
3920 n_high = fold_convert (arg0_type, n_high);
3923 /* If we're converting arg0 from an unsigned type, to exp,
3924 a signed type, we will be doing the comparison as unsigned.
3925 The tests above have already verified that LOW and HIGH
3926 are both positive.
3928 So we have to ensure that we will handle large unsigned
3929 values the same way that the current signed bounds treat
3930 negative values. */
3932 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3934 tree high_positive;
3935 tree equiv_type = lang_hooks.types.type_for_mode
3936 (TYPE_MODE (arg0_type), 1);
3938 /* A range without an upper bound is, naturally, unbounded.
3939 Since convert would have cropped a very large value, use
3940 the max value for the destination type. */
3941 high_positive
3942 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3943 : TYPE_MAX_VALUE (arg0_type);
3945 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3946 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3947 fold_convert (arg0_type,
3948 high_positive),
3949 fold_convert (arg0_type,
3950 integer_one_node));
3952 /* If the low bound is specified, "and" the range with the
3953 range for which the original unsigned value will be
3954 positive. */
3955 if (low != 0)
3957 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3958 1, n_low, n_high, 1,
3959 fold_convert (arg0_type,
3960 integer_zero_node),
3961 high_positive))
3962 break;
3964 in_p = (n_in_p == in_p);
3966 else
3968 /* Otherwise, "or" the range with the range of the input
3969 that will be interpreted as negative. */
3970 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3971 0, n_low, n_high, 1,
3972 fold_convert (arg0_type,
3973 integer_zero_node),
3974 high_positive))
3975 break;
3977 in_p = (in_p != n_in_p);
3981 exp = arg0;
3982 low = n_low, high = n_high;
3983 continue;
3985 default:
3986 break;
3989 break;
3992 /* If EXP is a constant, we can evaluate whether this is true or false. */
3993 if (TREE_CODE (exp) == INTEGER_CST)
3995 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3996 exp, 0, low, 0))
3997 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3998 exp, 1, high, 1)));
3999 low = high = 0;
4000 exp = 0;
4003 *pin_p = in_p, *plow = low, *phigh = high;
4004 return exp;
4007 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4008 type, TYPE, return an expression to test if EXP is in (or out of, depending
4009 on IN_P) the range. Return 0 if the test couldn't be created. */
4011 static tree
4012 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4014 tree etype = TREE_TYPE (exp);
4015 tree value;
4017 #ifdef HAVE_canonicalize_funcptr_for_compare
4018 /* Disable this optimization for function pointer expressions
4019 on targets that require function pointer canonicalization. */
4020 if (HAVE_canonicalize_funcptr_for_compare
4021 && TREE_CODE (etype) == POINTER_TYPE
4022 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4023 return NULL_TREE;
4024 #endif
4026 if (! in_p)
4028 value = build_range_check (type, exp, 1, low, high);
4029 if (value != 0)
4030 return invert_truthvalue (value);
4032 return 0;
4035 if (low == 0 && high == 0)
4036 return build_int_cst (type, 1);
4038 if (low == 0)
4039 return fold_build2 (LE_EXPR, type, exp,
4040 fold_convert (etype, high));
4042 if (high == 0)
4043 return fold_build2 (GE_EXPR, type, exp,
4044 fold_convert (etype, low));
4046 if (operand_equal_p (low, high, 0))
4047 return fold_build2 (EQ_EXPR, type, exp,
4048 fold_convert (etype, low));
4050 if (integer_zerop (low))
4052 if (! TYPE_UNSIGNED (etype))
4054 etype = lang_hooks.types.unsigned_type (etype);
4055 high = fold_convert (etype, high);
4056 exp = fold_convert (etype, exp);
4058 return build_range_check (type, exp, 1, 0, high);
4061 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4062 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4064 unsigned HOST_WIDE_INT lo;
4065 HOST_WIDE_INT hi;
4066 int prec;
4068 prec = TYPE_PRECISION (etype);
4069 if (prec <= HOST_BITS_PER_WIDE_INT)
4071 hi = 0;
4072 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4074 else
4076 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4077 lo = (unsigned HOST_WIDE_INT) -1;
4080 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4082 if (TYPE_UNSIGNED (etype))
4084 etype = lang_hooks.types.signed_type (etype);
4085 exp = fold_convert (etype, exp);
4087 return fold_build2 (GT_EXPR, type, exp,
4088 build_int_cst (etype, 0));
4092 value = const_binop (MINUS_EXPR, high, low, 0);
4093 if (value != 0 && (!flag_wrapv || TREE_OVERFLOW (value))
4094 && ! TYPE_UNSIGNED (etype))
4096 tree utype, minv, maxv;
4098 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4099 for the type in question, as we rely on this here. */
4100 switch (TREE_CODE (etype))
4102 case INTEGER_TYPE:
4103 case ENUMERAL_TYPE:
4104 /* There is no requirement that LOW be within the range of ETYPE
4105 if the latter is a subtype. It must, however, be within the base
4106 type of ETYPE. So be sure we do the subtraction in that type. */
4107 if (TREE_TYPE (etype))
4108 etype = TREE_TYPE (etype);
4109 utype = lang_hooks.types.unsigned_type (etype);
4110 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4111 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4112 integer_one_node, 1);
4113 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4114 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4115 minv, 1, maxv, 1)))
4117 etype = utype;
4118 high = fold_convert (etype, high);
4119 low = fold_convert (etype, low);
4120 exp = fold_convert (etype, exp);
4121 value = const_binop (MINUS_EXPR, high, low, 0);
4123 break;
4124 default:
4125 break;
4129 if (value != 0 && ! TREE_OVERFLOW (value))
4131 /* There is no requirement that LOW be within the range of ETYPE
4132 if the latter is a subtype. It must, however, be within the base
4133 type of ETYPE. So be sure we do the subtraction in that type. */
4134 if (INTEGRAL_TYPE_P (etype) && TREE_TYPE (etype))
4136 etype = TREE_TYPE (etype);
4137 exp = fold_convert (etype, exp);
4138 low = fold_convert (etype, low);
4139 value = fold_convert (etype, value);
4142 return build_range_check (type,
4143 fold_build2 (MINUS_EXPR, etype, exp, low),
4144 1, build_int_cst (etype, 0), value);
4147 return 0;
4150 /* Given two ranges, see if we can merge them into one. Return 1 if we
4151 can, 0 if we can't. Set the output range into the specified parameters. */
4153 static int
4154 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4155 tree high0, int in1_p, tree low1, tree high1)
4157 int no_overlap;
4158 int subset;
4159 int temp;
4160 tree tem;
4161 int in_p;
4162 tree low, high;
4163 int lowequal = ((low0 == 0 && low1 == 0)
4164 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4165 low0, 0, low1, 0)));
4166 int highequal = ((high0 == 0 && high1 == 0)
4167 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4168 high0, 1, high1, 1)));
4170 /* Make range 0 be the range that starts first, or ends last if they
4171 start at the same value. Swap them if it isn't. */
4172 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4173 low0, 0, low1, 0))
4174 || (lowequal
4175 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4176 high1, 1, high0, 1))))
4178 temp = in0_p, in0_p = in1_p, in1_p = temp;
4179 tem = low0, low0 = low1, low1 = tem;
4180 tem = high0, high0 = high1, high1 = tem;
4183 /* Now flag two cases, whether the ranges are disjoint or whether the
4184 second range is totally subsumed in the first. Note that the tests
4185 below are simplified by the ones above. */
4186 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4187 high0, 1, low1, 0));
4188 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4189 high1, 1, high0, 1));
4191 /* We now have four cases, depending on whether we are including or
4192 excluding the two ranges. */
4193 if (in0_p && in1_p)
4195 /* If they don't overlap, the result is false. If the second range
4196 is a subset it is the result. Otherwise, the range is from the start
4197 of the second to the end of the first. */
4198 if (no_overlap)
4199 in_p = 0, low = high = 0;
4200 else if (subset)
4201 in_p = 1, low = low1, high = high1;
4202 else
4203 in_p = 1, low = low1, high = high0;
4206 else if (in0_p && ! in1_p)
4208 /* If they don't overlap, the result is the first range. If they are
4209 equal, the result is false. If the second range is a subset of the
4210 first, and the ranges begin at the same place, we go from just after
4211 the end of the first range to the end of the second. If the second
4212 range is not a subset of the first, or if it is a subset and both
4213 ranges end at the same place, the range starts at the start of the
4214 first range and ends just before the second range.
4215 Otherwise, we can't describe this as a single range. */
4216 if (no_overlap)
4217 in_p = 1, low = low0, high = high0;
4218 else if (lowequal && highequal)
4219 in_p = 0, low = high = 0;
4220 else if (subset && lowequal)
4222 in_p = 1, high = high0;
4223 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4224 integer_one_node, 0);
4226 else if (! subset || highequal)
4228 in_p = 1, low = low0;
4229 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4230 integer_one_node, 0);
4232 else
4233 return 0;
4236 else if (! in0_p && in1_p)
4238 /* If they don't overlap, the result is the second range. If the second
4239 is a subset of the first, the result is false. Otherwise,
4240 the range starts just after the first range and ends at the
4241 end of the second. */
4242 if (no_overlap)
4243 in_p = 1, low = low1, high = high1;
4244 else if (subset || highequal)
4245 in_p = 0, low = high = 0;
4246 else
4248 in_p = 1, high = high1;
4249 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4250 integer_one_node, 0);
4254 else
4256 /* The case where we are excluding both ranges. Here the complex case
4257 is if they don't overlap. In that case, the only time we have a
4258 range is if they are adjacent. If the second is a subset of the
4259 first, the result is the first. Otherwise, the range to exclude
4260 starts at the beginning of the first range and ends at the end of the
4261 second. */
4262 if (no_overlap)
4264 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4265 range_binop (PLUS_EXPR, NULL_TREE,
4266 high0, 1,
4267 integer_one_node, 1),
4268 1, low1, 0)))
4269 in_p = 0, low = low0, high = high1;
4270 else
4272 /* Canonicalize - [min, x] into - [-, x]. */
4273 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4274 switch (TREE_CODE (TREE_TYPE (low0)))
4276 case ENUMERAL_TYPE:
4277 if (TYPE_PRECISION (TREE_TYPE (low0))
4278 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4279 break;
4280 /* FALLTHROUGH */
4281 case INTEGER_TYPE:
4282 if (tree_int_cst_equal (low0,
4283 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4284 low0 = 0;
4285 break;
4286 case POINTER_TYPE:
4287 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4288 && integer_zerop (low0))
4289 low0 = 0;
4290 break;
4291 default:
4292 break;
4295 /* Canonicalize - [x, max] into - [x, -]. */
4296 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4297 switch (TREE_CODE (TREE_TYPE (high1)))
4299 case ENUMERAL_TYPE:
4300 if (TYPE_PRECISION (TREE_TYPE (high1))
4301 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4302 break;
4303 /* FALLTHROUGH */
4304 case INTEGER_TYPE:
4305 if (tree_int_cst_equal (high1,
4306 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4307 high1 = 0;
4308 break;
4309 case POINTER_TYPE:
4310 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4311 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4312 high1, 1,
4313 integer_one_node, 1)))
4314 high1 = 0;
4315 break;
4316 default:
4317 break;
4320 /* The ranges might be also adjacent between the maximum and
4321 minimum values of the given type. For
4322 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4323 return + [x + 1, y - 1]. */
4324 if (low0 == 0 && high1 == 0)
4326 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4327 integer_one_node, 1);
4328 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4329 integer_one_node, 0);
4330 if (low == 0 || high == 0)
4331 return 0;
4333 in_p = 1;
4335 else
4336 return 0;
4339 else if (subset)
4340 in_p = 0, low = low0, high = high0;
4341 else
4342 in_p = 0, low = low0, high = high1;
4345 *pin_p = in_p, *plow = low, *phigh = high;
4346 return 1;
4350 /* Subroutine of fold, looking inside expressions of the form
4351 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4352 of the COND_EXPR. This function is being used also to optimize
4353 A op B ? C : A, by reversing the comparison first.
4355 Return a folded expression whose code is not a COND_EXPR
4356 anymore, or NULL_TREE if no folding opportunity is found. */
4358 static tree
4359 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4361 enum tree_code comp_code = TREE_CODE (arg0);
4362 tree arg00 = TREE_OPERAND (arg0, 0);
4363 tree arg01 = TREE_OPERAND (arg0, 1);
4364 tree arg1_type = TREE_TYPE (arg1);
4365 tree tem;
4367 STRIP_NOPS (arg1);
4368 STRIP_NOPS (arg2);
4370 /* If we have A op 0 ? A : -A, consider applying the following
4371 transformations:
4373 A == 0? A : -A same as -A
4374 A != 0? A : -A same as A
4375 A >= 0? A : -A same as abs (A)
4376 A > 0? A : -A same as abs (A)
4377 A <= 0? A : -A same as -abs (A)
4378 A < 0? A : -A same as -abs (A)
4380 None of these transformations work for modes with signed
4381 zeros. If A is +/-0, the first two transformations will
4382 change the sign of the result (from +0 to -0, or vice
4383 versa). The last four will fix the sign of the result,
4384 even though the original expressions could be positive or
4385 negative, depending on the sign of A.
4387 Note that all these transformations are correct if A is
4388 NaN, since the two alternatives (A and -A) are also NaNs. */
4389 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4390 ? real_zerop (arg01)
4391 : integer_zerop (arg01))
4392 && ((TREE_CODE (arg2) == NEGATE_EXPR
4393 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4394 /* In the case that A is of the form X-Y, '-A' (arg2) may
4395 have already been folded to Y-X, check for that. */
4396 || (TREE_CODE (arg1) == MINUS_EXPR
4397 && TREE_CODE (arg2) == MINUS_EXPR
4398 && operand_equal_p (TREE_OPERAND (arg1, 0),
4399 TREE_OPERAND (arg2, 1), 0)
4400 && operand_equal_p (TREE_OPERAND (arg1, 1),
4401 TREE_OPERAND (arg2, 0), 0))))
4402 switch (comp_code)
4404 case EQ_EXPR:
4405 case UNEQ_EXPR:
4406 tem = fold_convert (arg1_type, arg1);
4407 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4408 case NE_EXPR:
4409 case LTGT_EXPR:
4410 return pedantic_non_lvalue (fold_convert (type, arg1));
4411 case UNGE_EXPR:
4412 case UNGT_EXPR:
4413 if (flag_trapping_math)
4414 break;
4415 /* Fall through. */
4416 case GE_EXPR:
4417 case GT_EXPR:
4418 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4419 arg1 = fold_convert (lang_hooks.types.signed_type
4420 (TREE_TYPE (arg1)), arg1);
4421 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4422 return pedantic_non_lvalue (fold_convert (type, tem));
4423 case UNLE_EXPR:
4424 case UNLT_EXPR:
4425 if (flag_trapping_math)
4426 break;
4427 case LE_EXPR:
4428 case LT_EXPR:
4429 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4430 arg1 = fold_convert (lang_hooks.types.signed_type
4431 (TREE_TYPE (arg1)), arg1);
4432 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4433 return negate_expr (fold_convert (type, tem));
4434 default:
4435 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4436 break;
4439 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4440 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4441 both transformations are correct when A is NaN: A != 0
4442 is then true, and A == 0 is false. */
4444 if (integer_zerop (arg01) && integer_zerop (arg2))
4446 if (comp_code == NE_EXPR)
4447 return pedantic_non_lvalue (fold_convert (type, arg1));
4448 else if (comp_code == EQ_EXPR)
4449 return build_int_cst (type, 0);
4452 /* Try some transformations of A op B ? A : B.
4454 A == B? A : B same as B
4455 A != B? A : B same as A
4456 A >= B? A : B same as max (A, B)
4457 A > B? A : B same as max (B, A)
4458 A <= B? A : B same as min (A, B)
4459 A < B? A : B same as min (B, A)
4461 As above, these transformations don't work in the presence
4462 of signed zeros. For example, if A and B are zeros of
4463 opposite sign, the first two transformations will change
4464 the sign of the result. In the last four, the original
4465 expressions give different results for (A=+0, B=-0) and
4466 (A=-0, B=+0), but the transformed expressions do not.
4468 The first two transformations are correct if either A or B
4469 is a NaN. In the first transformation, the condition will
4470 be false, and B will indeed be chosen. In the case of the
4471 second transformation, the condition A != B will be true,
4472 and A will be chosen.
4474 The conversions to max() and min() are not correct if B is
4475 a number and A is not. The conditions in the original
4476 expressions will be false, so all four give B. The min()
4477 and max() versions would give a NaN instead. */
4478 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4479 /* Avoid these transformations if the COND_EXPR may be used
4480 as an lvalue in the C++ front-end. PR c++/19199. */
4481 && (in_gimple_form
4482 || strcmp (lang_hooks.name, "GNU C++") != 0
4483 || ! maybe_lvalue_p (arg1)
4484 || ! maybe_lvalue_p (arg2)))
4486 tree comp_op0 = arg00;
4487 tree comp_op1 = arg01;
4488 tree comp_type = TREE_TYPE (comp_op0);
4490 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4491 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4493 comp_type = type;
4494 comp_op0 = arg1;
4495 comp_op1 = arg2;
4498 switch (comp_code)
4500 case EQ_EXPR:
4501 return pedantic_non_lvalue (fold_convert (type, arg2));
4502 case NE_EXPR:
4503 return pedantic_non_lvalue (fold_convert (type, arg1));
4504 case LE_EXPR:
4505 case LT_EXPR:
4506 case UNLE_EXPR:
4507 case UNLT_EXPR:
4508 /* In C++ a ?: expression can be an lvalue, so put the
4509 operand which will be used if they are equal first
4510 so that we can convert this back to the
4511 corresponding COND_EXPR. */
4512 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4514 comp_op0 = fold_convert (comp_type, comp_op0);
4515 comp_op1 = fold_convert (comp_type, comp_op1);
4516 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4517 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4518 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4519 return pedantic_non_lvalue (fold_convert (type, tem));
4521 break;
4522 case GE_EXPR:
4523 case GT_EXPR:
4524 case UNGE_EXPR:
4525 case UNGT_EXPR:
4526 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4528 comp_op0 = fold_convert (comp_type, comp_op0);
4529 comp_op1 = fold_convert (comp_type, comp_op1);
4530 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4531 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4532 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4533 return pedantic_non_lvalue (fold_convert (type, tem));
4535 break;
4536 case UNEQ_EXPR:
4537 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4538 return pedantic_non_lvalue (fold_convert (type, arg2));
4539 break;
4540 case LTGT_EXPR:
4541 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4542 return pedantic_non_lvalue (fold_convert (type, arg1));
4543 break;
4544 default:
4545 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4546 break;
4550 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4551 we might still be able to simplify this. For example,
4552 if C1 is one less or one more than C2, this might have started
4553 out as a MIN or MAX and been transformed by this function.
4554 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4556 if (INTEGRAL_TYPE_P (type)
4557 && TREE_CODE (arg01) == INTEGER_CST
4558 && TREE_CODE (arg2) == INTEGER_CST)
4559 switch (comp_code)
4561 case EQ_EXPR:
4562 /* We can replace A with C1 in this case. */
4563 arg1 = fold_convert (type, arg01);
4564 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4566 case LT_EXPR:
4567 /* If C1 is C2 + 1, this is min(A, C2). */
4568 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4569 OEP_ONLY_CONST)
4570 && operand_equal_p (arg01,
4571 const_binop (PLUS_EXPR, arg2,
4572 integer_one_node, 0),
4573 OEP_ONLY_CONST))
4574 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4575 type, arg1, arg2));
4576 break;
4578 case LE_EXPR:
4579 /* If C1 is C2 - 1, this is min(A, C2). */
4580 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4581 OEP_ONLY_CONST)
4582 && operand_equal_p (arg01,
4583 const_binop (MINUS_EXPR, arg2,
4584 integer_one_node, 0),
4585 OEP_ONLY_CONST))
4586 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4587 type, arg1, arg2));
4588 break;
4590 case GT_EXPR:
4591 /* If C1 is C2 - 1, this is max(A, C2). */
4592 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4593 OEP_ONLY_CONST)
4594 && operand_equal_p (arg01,
4595 const_binop (MINUS_EXPR, arg2,
4596 integer_one_node, 0),
4597 OEP_ONLY_CONST))
4598 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4599 type, arg1, arg2));
4600 break;
4602 case GE_EXPR:
4603 /* If C1 is C2 + 1, this is max(A, C2). */
4604 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4605 OEP_ONLY_CONST)
4606 && operand_equal_p (arg01,
4607 const_binop (PLUS_EXPR, arg2,
4608 integer_one_node, 0),
4609 OEP_ONLY_CONST))
4610 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4611 type, arg1, arg2));
4612 break;
4613 case NE_EXPR:
4614 break;
4615 default:
4616 gcc_unreachable ();
4619 return NULL_TREE;
4624 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4625 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4626 #endif
4628 /* EXP is some logical combination of boolean tests. See if we can
4629 merge it into some range test. Return the new tree if so. */
4631 static tree
4632 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4634 int or_op = (code == TRUTH_ORIF_EXPR
4635 || code == TRUTH_OR_EXPR);
4636 int in0_p, in1_p, in_p;
4637 tree low0, low1, low, high0, high1, high;
4638 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4639 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4640 tree tem;
4642 /* If this is an OR operation, invert both sides; we will invert
4643 again at the end. */
4644 if (or_op)
4645 in0_p = ! in0_p, in1_p = ! in1_p;
4647 /* If both expressions are the same, if we can merge the ranges, and we
4648 can build the range test, return it or it inverted. If one of the
4649 ranges is always true or always false, consider it to be the same
4650 expression as the other. */
4651 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4652 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4653 in1_p, low1, high1)
4654 && 0 != (tem = (build_range_check (type,
4655 lhs != 0 ? lhs
4656 : rhs != 0 ? rhs : integer_zero_node,
4657 in_p, low, high))))
4658 return or_op ? invert_truthvalue (tem) : tem;
4660 /* On machines where the branch cost is expensive, if this is a
4661 short-circuited branch and the underlying object on both sides
4662 is the same, make a non-short-circuit operation. */
4663 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4664 && lhs != 0 && rhs != 0
4665 && (code == TRUTH_ANDIF_EXPR
4666 || code == TRUTH_ORIF_EXPR)
4667 && operand_equal_p (lhs, rhs, 0))
4669 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4670 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4671 which cases we can't do this. */
4672 if (simple_operand_p (lhs))
4673 return build2 (code == TRUTH_ANDIF_EXPR
4674 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4675 type, op0, op1);
4677 else if (lang_hooks.decls.global_bindings_p () == 0
4678 && ! CONTAINS_PLACEHOLDER_P (lhs))
4680 tree common = save_expr (lhs);
4682 if (0 != (lhs = build_range_check (type, common,
4683 or_op ? ! in0_p : in0_p,
4684 low0, high0))
4685 && (0 != (rhs = build_range_check (type, common,
4686 or_op ? ! in1_p : in1_p,
4687 low1, high1))))
4688 return build2 (code == TRUTH_ANDIF_EXPR
4689 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4690 type, lhs, rhs);
4694 return 0;
4697 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4698 bit value. Arrange things so the extra bits will be set to zero if and
4699 only if C is signed-extended to its full width. If MASK is nonzero,
4700 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4702 static tree
4703 unextend (tree c, int p, int unsignedp, tree mask)
4705 tree type = TREE_TYPE (c);
4706 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4707 tree temp;
4709 if (p == modesize || unsignedp)
4710 return c;
4712 /* We work by getting just the sign bit into the low-order bit, then
4713 into the high-order bit, then sign-extend. We then XOR that value
4714 with C. */
4715 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4716 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4718 /* We must use a signed type in order to get an arithmetic right shift.
4719 However, we must also avoid introducing accidental overflows, so that
4720 a subsequent call to integer_zerop will work. Hence we must
4721 do the type conversion here. At this point, the constant is either
4722 zero or one, and the conversion to a signed type can never overflow.
4723 We could get an overflow if this conversion is done anywhere else. */
4724 if (TYPE_UNSIGNED (type))
4725 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4727 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4728 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4729 if (mask != 0)
4730 temp = const_binop (BIT_AND_EXPR, temp,
4731 fold_convert (TREE_TYPE (c), mask), 0);
4732 /* If necessary, convert the type back to match the type of C. */
4733 if (TYPE_UNSIGNED (type))
4734 temp = fold_convert (type, temp);
4736 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4739 /* Find ways of folding logical expressions of LHS and RHS:
4740 Try to merge two comparisons to the same innermost item.
4741 Look for range tests like "ch >= '0' && ch <= '9'".
4742 Look for combinations of simple terms on machines with expensive branches
4743 and evaluate the RHS unconditionally.
4745 For example, if we have p->a == 2 && p->b == 4 and we can make an
4746 object large enough to span both A and B, we can do this with a comparison
4747 against the object ANDed with the a mask.
4749 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4750 operations to do this with one comparison.
4752 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4753 function and the one above.
4755 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4756 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4758 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4759 two operands.
4761 We return the simplified tree or 0 if no optimization is possible. */
4763 static tree
4764 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4766 /* If this is the "or" of two comparisons, we can do something if
4767 the comparisons are NE_EXPR. If this is the "and", we can do something
4768 if the comparisons are EQ_EXPR. I.e.,
4769 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4771 WANTED_CODE is this operation code. For single bit fields, we can
4772 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4773 comparison for one-bit fields. */
4775 enum tree_code wanted_code;
4776 enum tree_code lcode, rcode;
4777 tree ll_arg, lr_arg, rl_arg, rr_arg;
4778 tree ll_inner, lr_inner, rl_inner, rr_inner;
4779 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4780 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4781 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4782 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4783 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4784 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4785 enum machine_mode lnmode, rnmode;
4786 tree ll_mask, lr_mask, rl_mask, rr_mask;
4787 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4788 tree l_const, r_const;
4789 tree lntype, rntype, result;
4790 int first_bit, end_bit;
4791 int volatilep;
4793 /* Start by getting the comparison codes. Fail if anything is volatile.
4794 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4795 it were surrounded with a NE_EXPR. */
4797 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4798 return 0;
4800 lcode = TREE_CODE (lhs);
4801 rcode = TREE_CODE (rhs);
4803 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4805 lhs = build2 (NE_EXPR, truth_type, lhs,
4806 build_int_cst (TREE_TYPE (lhs), 0));
4807 lcode = NE_EXPR;
4810 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4812 rhs = build2 (NE_EXPR, truth_type, rhs,
4813 build_int_cst (TREE_TYPE (rhs), 0));
4814 rcode = NE_EXPR;
4817 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4818 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4819 return 0;
4821 ll_arg = TREE_OPERAND (lhs, 0);
4822 lr_arg = TREE_OPERAND (lhs, 1);
4823 rl_arg = TREE_OPERAND (rhs, 0);
4824 rr_arg = TREE_OPERAND (rhs, 1);
4826 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4827 if (simple_operand_p (ll_arg)
4828 && simple_operand_p (lr_arg))
4830 tree result;
4831 if (operand_equal_p (ll_arg, rl_arg, 0)
4832 && operand_equal_p (lr_arg, rr_arg, 0))
4834 result = combine_comparisons (code, lcode, rcode,
4835 truth_type, ll_arg, lr_arg);
4836 if (result)
4837 return result;
4839 else if (operand_equal_p (ll_arg, rr_arg, 0)
4840 && operand_equal_p (lr_arg, rl_arg, 0))
4842 result = combine_comparisons (code, lcode,
4843 swap_tree_comparison (rcode),
4844 truth_type, ll_arg, lr_arg);
4845 if (result)
4846 return result;
4850 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4851 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4853 /* If the RHS can be evaluated unconditionally and its operands are
4854 simple, it wins to evaluate the RHS unconditionally on machines
4855 with expensive branches. In this case, this isn't a comparison
4856 that can be merged. Avoid doing this if the RHS is a floating-point
4857 comparison since those can trap. */
4859 if (BRANCH_COST >= 2
4860 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4861 && simple_operand_p (rl_arg)
4862 && simple_operand_p (rr_arg))
4864 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4865 if (code == TRUTH_OR_EXPR
4866 && lcode == NE_EXPR && integer_zerop (lr_arg)
4867 && rcode == NE_EXPR && integer_zerop (rr_arg)
4868 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4869 return build2 (NE_EXPR, truth_type,
4870 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4871 ll_arg, rl_arg),
4872 build_int_cst (TREE_TYPE (ll_arg), 0));
4874 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4875 if (code == TRUTH_AND_EXPR
4876 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4877 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4878 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4879 return build2 (EQ_EXPR, truth_type,
4880 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4881 ll_arg, rl_arg),
4882 build_int_cst (TREE_TYPE (ll_arg), 0));
4884 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4885 return build2 (code, truth_type, lhs, rhs);
4888 /* See if the comparisons can be merged. Then get all the parameters for
4889 each side. */
4891 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4892 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4893 return 0;
4895 volatilep = 0;
4896 ll_inner = decode_field_reference (ll_arg,
4897 &ll_bitsize, &ll_bitpos, &ll_mode,
4898 &ll_unsignedp, &volatilep, &ll_mask,
4899 &ll_and_mask);
4900 lr_inner = decode_field_reference (lr_arg,
4901 &lr_bitsize, &lr_bitpos, &lr_mode,
4902 &lr_unsignedp, &volatilep, &lr_mask,
4903 &lr_and_mask);
4904 rl_inner = decode_field_reference (rl_arg,
4905 &rl_bitsize, &rl_bitpos, &rl_mode,
4906 &rl_unsignedp, &volatilep, &rl_mask,
4907 &rl_and_mask);
4908 rr_inner = decode_field_reference (rr_arg,
4909 &rr_bitsize, &rr_bitpos, &rr_mode,
4910 &rr_unsignedp, &volatilep, &rr_mask,
4911 &rr_and_mask);
4913 /* It must be true that the inner operation on the lhs of each
4914 comparison must be the same if we are to be able to do anything.
4915 Then see if we have constants. If not, the same must be true for
4916 the rhs's. */
4917 if (volatilep || ll_inner == 0 || rl_inner == 0
4918 || ! operand_equal_p (ll_inner, rl_inner, 0))
4919 return 0;
4921 if (TREE_CODE (lr_arg) == INTEGER_CST
4922 && TREE_CODE (rr_arg) == INTEGER_CST)
4923 l_const = lr_arg, r_const = rr_arg;
4924 else if (lr_inner == 0 || rr_inner == 0
4925 || ! operand_equal_p (lr_inner, rr_inner, 0))
4926 return 0;
4927 else
4928 l_const = r_const = 0;
4930 /* If either comparison code is not correct for our logical operation,
4931 fail. However, we can convert a one-bit comparison against zero into
4932 the opposite comparison against that bit being set in the field. */
4934 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4935 if (lcode != wanted_code)
4937 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4939 /* Make the left operand unsigned, since we are only interested
4940 in the value of one bit. Otherwise we are doing the wrong
4941 thing below. */
4942 ll_unsignedp = 1;
4943 l_const = ll_mask;
4945 else
4946 return 0;
4949 /* This is analogous to the code for l_const above. */
4950 if (rcode != wanted_code)
4952 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4954 rl_unsignedp = 1;
4955 r_const = rl_mask;
4957 else
4958 return 0;
4961 /* After this point all optimizations will generate bit-field
4962 references, which we might not want. */
4963 if (! lang_hooks.can_use_bit_fields_p ())
4964 return 0;
4966 /* See if we can find a mode that contains both fields being compared on
4967 the left. If we can't, fail. Otherwise, update all constants and masks
4968 to be relative to a field of that size. */
4969 first_bit = MIN (ll_bitpos, rl_bitpos);
4970 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4971 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4972 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4973 volatilep);
4974 if (lnmode == VOIDmode)
4975 return 0;
4977 lnbitsize = GET_MODE_BITSIZE (lnmode);
4978 lnbitpos = first_bit & ~ (lnbitsize - 1);
4979 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4980 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4982 if (BYTES_BIG_ENDIAN)
4984 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4985 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4988 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4989 size_int (xll_bitpos), 0);
4990 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4991 size_int (xrl_bitpos), 0);
4993 if (l_const)
4995 l_const = fold_convert (lntype, l_const);
4996 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4997 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4998 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4999 fold_build1 (BIT_NOT_EXPR,
5000 lntype, ll_mask),
5001 0)))
5003 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5005 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5008 if (r_const)
5010 r_const = fold_convert (lntype, r_const);
5011 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5012 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5013 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5014 fold_build1 (BIT_NOT_EXPR,
5015 lntype, rl_mask),
5016 0)))
5018 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5020 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5024 /* If the right sides are not constant, do the same for it. Also,
5025 disallow this optimization if a size or signedness mismatch occurs
5026 between the left and right sides. */
5027 if (l_const == 0)
5029 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5030 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5031 /* Make sure the two fields on the right
5032 correspond to the left without being swapped. */
5033 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5034 return 0;
5036 first_bit = MIN (lr_bitpos, rr_bitpos);
5037 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5038 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5039 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5040 volatilep);
5041 if (rnmode == VOIDmode)
5042 return 0;
5044 rnbitsize = GET_MODE_BITSIZE (rnmode);
5045 rnbitpos = first_bit & ~ (rnbitsize - 1);
5046 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5047 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5049 if (BYTES_BIG_ENDIAN)
5051 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5052 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5055 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5056 size_int (xlr_bitpos), 0);
5057 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5058 size_int (xrr_bitpos), 0);
5060 /* Make a mask that corresponds to both fields being compared.
5061 Do this for both items being compared. If the operands are the
5062 same size and the bits being compared are in the same position
5063 then we can do this by masking both and comparing the masked
5064 results. */
5065 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5066 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5067 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5069 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5070 ll_unsignedp || rl_unsignedp);
5071 if (! all_ones_mask_p (ll_mask, lnbitsize))
5072 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5074 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5075 lr_unsignedp || rr_unsignedp);
5076 if (! all_ones_mask_p (lr_mask, rnbitsize))
5077 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5079 return build2 (wanted_code, truth_type, lhs, rhs);
5082 /* There is still another way we can do something: If both pairs of
5083 fields being compared are adjacent, we may be able to make a wider
5084 field containing them both.
5086 Note that we still must mask the lhs/rhs expressions. Furthermore,
5087 the mask must be shifted to account for the shift done by
5088 make_bit_field_ref. */
5089 if ((ll_bitsize + ll_bitpos == rl_bitpos
5090 && lr_bitsize + lr_bitpos == rr_bitpos)
5091 || (ll_bitpos == rl_bitpos + rl_bitsize
5092 && lr_bitpos == rr_bitpos + rr_bitsize))
5094 tree type;
5096 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5097 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5098 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5099 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5101 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5102 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5103 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5104 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5106 /* Convert to the smaller type before masking out unwanted bits. */
5107 type = lntype;
5108 if (lntype != rntype)
5110 if (lnbitsize > rnbitsize)
5112 lhs = fold_convert (rntype, lhs);
5113 ll_mask = fold_convert (rntype, ll_mask);
5114 type = rntype;
5116 else if (lnbitsize < rnbitsize)
5118 rhs = fold_convert (lntype, rhs);
5119 lr_mask = fold_convert (lntype, lr_mask);
5120 type = lntype;
5124 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5125 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5127 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5128 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5130 return build2 (wanted_code, truth_type, lhs, rhs);
5133 return 0;
5136 /* Handle the case of comparisons with constants. If there is something in
5137 common between the masks, those bits of the constants must be the same.
5138 If not, the condition is always false. Test for this to avoid generating
5139 incorrect code below. */
5140 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5141 if (! integer_zerop (result)
5142 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5143 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5145 if (wanted_code == NE_EXPR)
5147 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5148 return constant_boolean_node (true, truth_type);
5150 else
5152 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5153 return constant_boolean_node (false, truth_type);
5157 /* Construct the expression we will return. First get the component
5158 reference we will make. Unless the mask is all ones the width of
5159 that field, perform the mask operation. Then compare with the
5160 merged constant. */
5161 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5162 ll_unsignedp || rl_unsignedp);
5164 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5165 if (! all_ones_mask_p (ll_mask, lnbitsize))
5166 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5168 return build2 (wanted_code, truth_type, result,
5169 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5172 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5173 constant. */
5175 static tree
5176 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5178 tree arg0 = op0;
5179 enum tree_code op_code;
5180 tree comp_const = op1;
5181 tree minmax_const;
5182 int consts_equal, consts_lt;
5183 tree inner;
5185 STRIP_SIGN_NOPS (arg0);
5187 op_code = TREE_CODE (arg0);
5188 minmax_const = TREE_OPERAND (arg0, 1);
5189 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5190 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5191 inner = TREE_OPERAND (arg0, 0);
5193 /* If something does not permit us to optimize, return the original tree. */
5194 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5195 || TREE_CODE (comp_const) != INTEGER_CST
5196 || TREE_CONSTANT_OVERFLOW (comp_const)
5197 || TREE_CODE (minmax_const) != INTEGER_CST
5198 || TREE_CONSTANT_OVERFLOW (minmax_const))
5199 return NULL_TREE;
5201 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5202 and GT_EXPR, doing the rest with recursive calls using logical
5203 simplifications. */
5204 switch (code)
5206 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5208 /* FIXME: We should be able to invert code without building a
5209 scratch tree node, but doing so would require us to
5210 duplicate a part of invert_truthvalue here. */
5211 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5212 tem = optimize_minmax_comparison (TREE_CODE (tem),
5213 TREE_TYPE (tem),
5214 TREE_OPERAND (tem, 0),
5215 TREE_OPERAND (tem, 1));
5216 return invert_truthvalue (tem);
5219 case GE_EXPR:
5220 return
5221 fold_build2 (TRUTH_ORIF_EXPR, type,
5222 optimize_minmax_comparison
5223 (EQ_EXPR, type, arg0, comp_const),
5224 optimize_minmax_comparison
5225 (GT_EXPR, type, arg0, comp_const));
5227 case EQ_EXPR:
5228 if (op_code == MAX_EXPR && consts_equal)
5229 /* MAX (X, 0) == 0 -> X <= 0 */
5230 return fold_build2 (LE_EXPR, type, inner, comp_const);
5232 else if (op_code == MAX_EXPR && consts_lt)
5233 /* MAX (X, 0) == 5 -> X == 5 */
5234 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5236 else if (op_code == MAX_EXPR)
5237 /* MAX (X, 0) == -1 -> false */
5238 return omit_one_operand (type, integer_zero_node, inner);
5240 else if (consts_equal)
5241 /* MIN (X, 0) == 0 -> X >= 0 */
5242 return fold_build2 (GE_EXPR, type, inner, comp_const);
5244 else if (consts_lt)
5245 /* MIN (X, 0) == 5 -> false */
5246 return omit_one_operand (type, integer_zero_node, inner);
5248 else
5249 /* MIN (X, 0) == -1 -> X == -1 */
5250 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5252 case GT_EXPR:
5253 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5254 /* MAX (X, 0) > 0 -> X > 0
5255 MAX (X, 0) > 5 -> X > 5 */
5256 return fold_build2 (GT_EXPR, type, inner, comp_const);
5258 else if (op_code == MAX_EXPR)
5259 /* MAX (X, 0) > -1 -> true */
5260 return omit_one_operand (type, integer_one_node, inner);
5262 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5263 /* MIN (X, 0) > 0 -> false
5264 MIN (X, 0) > 5 -> false */
5265 return omit_one_operand (type, integer_zero_node, inner);
5267 else
5268 /* MIN (X, 0) > -1 -> X > -1 */
5269 return fold_build2 (GT_EXPR, type, inner, comp_const);
5271 default:
5272 return NULL_TREE;
5276 /* T is an integer expression that is being multiplied, divided, or taken a
5277 modulus (CODE says which and what kind of divide or modulus) by a
5278 constant C. See if we can eliminate that operation by folding it with
5279 other operations already in T. WIDE_TYPE, if non-null, is a type that
5280 should be used for the computation if wider than our type.
5282 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5283 (X * 2) + (Y * 4). We must, however, be assured that either the original
5284 expression would not overflow or that overflow is undefined for the type
5285 in the language in question.
5287 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5288 the machine has a multiply-accumulate insn or that this is part of an
5289 addressing calculation.
5291 If we return a non-null expression, it is an equivalent form of the
5292 original computation, but need not be in the original type. */
5294 static tree
5295 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5297 /* To avoid exponential search depth, refuse to allow recursion past
5298 three levels. Beyond that (1) it's highly unlikely that we'll find
5299 something interesting and (2) we've probably processed it before
5300 when we built the inner expression. */
5302 static int depth;
5303 tree ret;
5305 if (depth > 3)
5306 return NULL;
5308 depth++;
5309 ret = extract_muldiv_1 (t, c, code, wide_type);
5310 depth--;
5312 return ret;
5315 static tree
5316 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5318 tree type = TREE_TYPE (t);
5319 enum tree_code tcode = TREE_CODE (t);
5320 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5321 > GET_MODE_SIZE (TYPE_MODE (type)))
5322 ? wide_type : type);
5323 tree t1, t2;
5324 int same_p = tcode == code;
5325 tree op0 = NULL_TREE, op1 = NULL_TREE;
5327 /* Don't deal with constants of zero here; they confuse the code below. */
5328 if (integer_zerop (c))
5329 return NULL_TREE;
5331 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5332 op0 = TREE_OPERAND (t, 0);
5334 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5335 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5337 /* Note that we need not handle conditional operations here since fold
5338 already handles those cases. So just do arithmetic here. */
5339 switch (tcode)
5341 case INTEGER_CST:
5342 /* For a constant, we can always simplify if we are a multiply
5343 or (for divide and modulus) if it is a multiple of our constant. */
5344 if (code == MULT_EXPR
5345 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5346 return const_binop (code, fold_convert (ctype, t),
5347 fold_convert (ctype, c), 0);
5348 break;
5350 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5351 /* If op0 is an expression ... */
5352 if ((COMPARISON_CLASS_P (op0)
5353 || UNARY_CLASS_P (op0)
5354 || BINARY_CLASS_P (op0)
5355 || EXPRESSION_CLASS_P (op0))
5356 /* ... and is unsigned, and its type is smaller than ctype,
5357 then we cannot pass through as widening. */
5358 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5359 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5360 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5361 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5362 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5363 /* ... or this is a truncation (t is narrower than op0),
5364 then we cannot pass through this narrowing. */
5365 || (GET_MODE_SIZE (TYPE_MODE (type))
5366 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5367 /* ... or signedness changes for division or modulus,
5368 then we cannot pass through this conversion. */
5369 || (code != MULT_EXPR
5370 && (TYPE_UNSIGNED (ctype)
5371 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5372 break;
5374 /* Pass the constant down and see if we can make a simplification. If
5375 we can, replace this expression with the inner simplification for
5376 possible later conversion to our or some other type. */
5377 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5378 && TREE_CODE (t2) == INTEGER_CST
5379 && ! TREE_CONSTANT_OVERFLOW (t2)
5380 && (0 != (t1 = extract_muldiv (op0, t2, code,
5381 code == MULT_EXPR
5382 ? ctype : NULL_TREE))))
5383 return t1;
5384 break;
5386 case ABS_EXPR:
5387 /* If widening the type changes it from signed to unsigned, then we
5388 must avoid building ABS_EXPR itself as unsigned. */
5389 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5391 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5392 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5394 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5395 return fold_convert (ctype, t1);
5397 break;
5399 /* FALLTHROUGH */
5400 case NEGATE_EXPR:
5401 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5402 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5403 break;
5405 case MIN_EXPR: case MAX_EXPR:
5406 /* If widening the type changes the signedness, then we can't perform
5407 this optimization as that changes the result. */
5408 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5409 break;
5411 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5412 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5413 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5415 if (tree_int_cst_sgn (c) < 0)
5416 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5418 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5419 fold_convert (ctype, t2));
5421 break;
5423 case LSHIFT_EXPR: case RSHIFT_EXPR:
5424 /* If the second operand is constant, this is a multiplication
5425 or floor division, by a power of two, so we can treat it that
5426 way unless the multiplier or divisor overflows. Signed
5427 left-shift overflow is implementation-defined rather than
5428 undefined in C90, so do not convert signed left shift into
5429 multiplication. */
5430 if (TREE_CODE (op1) == INTEGER_CST
5431 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5432 /* const_binop may not detect overflow correctly,
5433 so check for it explicitly here. */
5434 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5435 && TREE_INT_CST_HIGH (op1) == 0
5436 && 0 != (t1 = fold_convert (ctype,
5437 const_binop (LSHIFT_EXPR,
5438 size_one_node,
5439 op1, 0)))
5440 && ! TREE_OVERFLOW (t1))
5441 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5442 ? MULT_EXPR : FLOOR_DIV_EXPR,
5443 ctype, fold_convert (ctype, op0), t1),
5444 c, code, wide_type);
5445 break;
5447 case PLUS_EXPR: case MINUS_EXPR:
5448 /* See if we can eliminate the operation on both sides. If we can, we
5449 can return a new PLUS or MINUS. If we can't, the only remaining
5450 cases where we can do anything are if the second operand is a
5451 constant. */
5452 t1 = extract_muldiv (op0, c, code, wide_type);
5453 t2 = extract_muldiv (op1, c, code, wide_type);
5454 if (t1 != 0 && t2 != 0
5455 && (code == MULT_EXPR
5456 /* If not multiplication, we can only do this if both operands
5457 are divisible by c. */
5458 || (multiple_of_p (ctype, op0, c)
5459 && multiple_of_p (ctype, op1, c))))
5460 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5461 fold_convert (ctype, t2));
5463 /* If this was a subtraction, negate OP1 and set it to be an addition.
5464 This simplifies the logic below. */
5465 if (tcode == MINUS_EXPR)
5466 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5468 if (TREE_CODE (op1) != INTEGER_CST)
5469 break;
5471 /* If either OP1 or C are negative, this optimization is not safe for
5472 some of the division and remainder types while for others we need
5473 to change the code. */
5474 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5476 if (code == CEIL_DIV_EXPR)
5477 code = FLOOR_DIV_EXPR;
5478 else if (code == FLOOR_DIV_EXPR)
5479 code = CEIL_DIV_EXPR;
5480 else if (code != MULT_EXPR
5481 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5482 break;
5485 /* If it's a multiply or a division/modulus operation of a multiple
5486 of our constant, do the operation and verify it doesn't overflow. */
5487 if (code == MULT_EXPR
5488 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5490 op1 = const_binop (code, fold_convert (ctype, op1),
5491 fold_convert (ctype, c), 0);
5492 /* We allow the constant to overflow with wrapping semantics. */
5493 if (op1 == 0
5494 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5495 break;
5497 else
5498 break;
5500 /* If we have an unsigned type is not a sizetype, we cannot widen
5501 the operation since it will change the result if the original
5502 computation overflowed. */
5503 if (TYPE_UNSIGNED (ctype)
5504 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5505 && ctype != type)
5506 break;
5508 /* If we were able to eliminate our operation from the first side,
5509 apply our operation to the second side and reform the PLUS. */
5510 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5511 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5513 /* The last case is if we are a multiply. In that case, we can
5514 apply the distributive law to commute the multiply and addition
5515 if the multiplication of the constants doesn't overflow. */
5516 if (code == MULT_EXPR)
5517 return fold_build2 (tcode, ctype,
5518 fold_build2 (code, ctype,
5519 fold_convert (ctype, op0),
5520 fold_convert (ctype, c)),
5521 op1);
5523 break;
5525 case MULT_EXPR:
5526 /* We have a special case here if we are doing something like
5527 (C * 8) % 4 since we know that's zero. */
5528 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5529 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5530 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5531 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5532 return omit_one_operand (type, integer_zero_node, op0);
5534 /* ... fall through ... */
5536 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5537 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5538 /* If we can extract our operation from the LHS, do so and return a
5539 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5540 do something only if the second operand is a constant. */
5541 if (same_p
5542 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5543 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5544 fold_convert (ctype, op1));
5545 else if (tcode == MULT_EXPR && code == MULT_EXPR
5546 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5547 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5548 fold_convert (ctype, t1));
5549 else if (TREE_CODE (op1) != INTEGER_CST)
5550 return 0;
5552 /* If these are the same operation types, we can associate them
5553 assuming no overflow. */
5554 if (tcode == code
5555 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5556 fold_convert (ctype, c), 0))
5557 && ! TREE_OVERFLOW (t1))
5558 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5560 /* If these operations "cancel" each other, we have the main
5561 optimizations of this pass, which occur when either constant is a
5562 multiple of the other, in which case we replace this with either an
5563 operation or CODE or TCODE.
5565 If we have an unsigned type that is not a sizetype, we cannot do
5566 this since it will change the result if the original computation
5567 overflowed. */
5568 if ((! TYPE_UNSIGNED (ctype)
5569 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5570 && ! flag_wrapv
5571 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5572 || (tcode == MULT_EXPR
5573 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5574 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5576 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5577 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5578 fold_convert (ctype,
5579 const_binop (TRUNC_DIV_EXPR,
5580 op1, c, 0)));
5581 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5582 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5583 fold_convert (ctype,
5584 const_binop (TRUNC_DIV_EXPR,
5585 c, op1, 0)));
5587 break;
5589 default:
5590 break;
5593 return 0;
5596 /* Return a node which has the indicated constant VALUE (either 0 or
5597 1), and is of the indicated TYPE. */
5599 tree
5600 constant_boolean_node (int value, tree type)
5602 if (type == integer_type_node)
5603 return value ? integer_one_node : integer_zero_node;
5604 else if (type == boolean_type_node)
5605 return value ? boolean_true_node : boolean_false_node;
5606 else
5607 return build_int_cst (type, value);
5611 /* Return true if expr looks like an ARRAY_REF and set base and
5612 offset to the appropriate trees. If there is no offset,
5613 offset is set to NULL_TREE. Base will be canonicalized to
5614 something you can get the element type from using
5615 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5616 in bytes to the base. */
5618 static bool
5619 extract_array_ref (tree expr, tree *base, tree *offset)
5621 /* One canonical form is a PLUS_EXPR with the first
5622 argument being an ADDR_EXPR with a possible NOP_EXPR
5623 attached. */
5624 if (TREE_CODE (expr) == PLUS_EXPR)
5626 tree op0 = TREE_OPERAND (expr, 0);
5627 tree inner_base, dummy1;
5628 /* Strip NOP_EXPRs here because the C frontends and/or
5629 folders present us (int *)&x.a + 4B possibly. */
5630 STRIP_NOPS (op0);
5631 if (extract_array_ref (op0, &inner_base, &dummy1))
5633 *base = inner_base;
5634 if (dummy1 == NULL_TREE)
5635 *offset = TREE_OPERAND (expr, 1);
5636 else
5637 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5638 dummy1, TREE_OPERAND (expr, 1));
5639 return true;
5642 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5643 which we transform into an ADDR_EXPR with appropriate
5644 offset. For other arguments to the ADDR_EXPR we assume
5645 zero offset and as such do not care about the ADDR_EXPR
5646 type and strip possible nops from it. */
5647 else if (TREE_CODE (expr) == ADDR_EXPR)
5649 tree op0 = TREE_OPERAND (expr, 0);
5650 if (TREE_CODE (op0) == ARRAY_REF)
5652 tree idx = TREE_OPERAND (op0, 1);
5653 *base = TREE_OPERAND (op0, 0);
5654 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5655 array_ref_element_size (op0));
5657 else
5659 /* Handle array-to-pointer decay as &a. */
5660 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5661 *base = TREE_OPERAND (expr, 0);
5662 else
5663 *base = expr;
5664 *offset = NULL_TREE;
5666 return true;
5668 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5669 else if (SSA_VAR_P (expr)
5670 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5672 *base = expr;
5673 *offset = NULL_TREE;
5674 return true;
5677 return false;
5681 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5682 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5683 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5684 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5685 COND is the first argument to CODE; otherwise (as in the example
5686 given here), it is the second argument. TYPE is the type of the
5687 original expression. Return NULL_TREE if no simplification is
5688 possible. */
5690 static tree
5691 fold_binary_op_with_conditional_arg (enum tree_code code,
5692 tree type, tree op0, tree op1,
5693 tree cond, tree arg, int cond_first_p)
5695 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5696 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5697 tree test, true_value, false_value;
5698 tree lhs = NULL_TREE;
5699 tree rhs = NULL_TREE;
5701 /* This transformation is only worthwhile if we don't have to wrap
5702 arg in a SAVE_EXPR, and the operation can be simplified on at least
5703 one of the branches once its pushed inside the COND_EXPR. */
5704 if (!TREE_CONSTANT (arg))
5705 return NULL_TREE;
5707 if (TREE_CODE (cond) == COND_EXPR)
5709 test = TREE_OPERAND (cond, 0);
5710 true_value = TREE_OPERAND (cond, 1);
5711 false_value = TREE_OPERAND (cond, 2);
5712 /* If this operand throws an expression, then it does not make
5713 sense to try to perform a logical or arithmetic operation
5714 involving it. */
5715 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5716 lhs = true_value;
5717 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5718 rhs = false_value;
5720 else
5722 tree testtype = TREE_TYPE (cond);
5723 test = cond;
5724 true_value = constant_boolean_node (true, testtype);
5725 false_value = constant_boolean_node (false, testtype);
5728 arg = fold_convert (arg_type, arg);
5729 if (lhs == 0)
5731 true_value = fold_convert (cond_type, true_value);
5732 if (cond_first_p)
5733 lhs = fold_build2 (code, type, true_value, arg);
5734 else
5735 lhs = fold_build2 (code, type, arg, true_value);
5737 if (rhs == 0)
5739 false_value = fold_convert (cond_type, false_value);
5740 if (cond_first_p)
5741 rhs = fold_build2 (code, type, false_value, arg);
5742 else
5743 rhs = fold_build2 (code, type, arg, false_value);
5746 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5747 return fold_convert (type, test);
5751 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5753 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5754 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5755 ADDEND is the same as X.
5757 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5758 and finite. The problematic cases are when X is zero, and its mode
5759 has signed zeros. In the case of rounding towards -infinity,
5760 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5761 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5763 static bool
5764 fold_real_zero_addition_p (tree type, tree addend, int negate)
5766 if (!real_zerop (addend))
5767 return false;
5769 /* Don't allow the fold with -fsignaling-nans. */
5770 if (HONOR_SNANS (TYPE_MODE (type)))
5771 return false;
5773 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5774 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5775 return true;
5777 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5778 if (TREE_CODE (addend) == REAL_CST
5779 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5780 negate = !negate;
5782 /* The mode has signed zeros, and we have to honor their sign.
5783 In this situation, there is only one case we can return true for.
5784 X - 0 is the same as X unless rounding towards -infinity is
5785 supported. */
5786 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5789 /* Subroutine of fold() that checks comparisons of built-in math
5790 functions against real constants.
5792 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5793 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5794 is the type of the result and ARG0 and ARG1 are the operands of the
5795 comparison. ARG1 must be a TREE_REAL_CST.
5797 The function returns the constant folded tree if a simplification
5798 can be made, and NULL_TREE otherwise. */
5800 static tree
5801 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5802 tree type, tree arg0, tree arg1)
5804 REAL_VALUE_TYPE c;
5806 if (BUILTIN_SQRT_P (fcode))
5808 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5809 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5811 c = TREE_REAL_CST (arg1);
5812 if (REAL_VALUE_NEGATIVE (c))
5814 /* sqrt(x) < y is always false, if y is negative. */
5815 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5816 return omit_one_operand (type, integer_zero_node, arg);
5818 /* sqrt(x) > y is always true, if y is negative and we
5819 don't care about NaNs, i.e. negative values of x. */
5820 if (code == NE_EXPR || !HONOR_NANS (mode))
5821 return omit_one_operand (type, integer_one_node, arg);
5823 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5824 return fold_build2 (GE_EXPR, type, arg,
5825 build_real (TREE_TYPE (arg), dconst0));
5827 else if (code == GT_EXPR || code == GE_EXPR)
5829 REAL_VALUE_TYPE c2;
5831 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5832 real_convert (&c2, mode, &c2);
5834 if (REAL_VALUE_ISINF (c2))
5836 /* sqrt(x) > y is x == +Inf, when y is very large. */
5837 if (HONOR_INFINITIES (mode))
5838 return fold_build2 (EQ_EXPR, type, arg,
5839 build_real (TREE_TYPE (arg), c2));
5841 /* sqrt(x) > y is always false, when y is very large
5842 and we don't care about infinities. */
5843 return omit_one_operand (type, integer_zero_node, arg);
5846 /* sqrt(x) > c is the same as x > c*c. */
5847 return fold_build2 (code, type, arg,
5848 build_real (TREE_TYPE (arg), c2));
5850 else if (code == LT_EXPR || code == LE_EXPR)
5852 REAL_VALUE_TYPE c2;
5854 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5855 real_convert (&c2, mode, &c2);
5857 if (REAL_VALUE_ISINF (c2))
5859 /* sqrt(x) < y is always true, when y is a very large
5860 value and we don't care about NaNs or Infinities. */
5861 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5862 return omit_one_operand (type, integer_one_node, arg);
5864 /* sqrt(x) < y is x != +Inf when y is very large and we
5865 don't care about NaNs. */
5866 if (! HONOR_NANS (mode))
5867 return fold_build2 (NE_EXPR, type, arg,
5868 build_real (TREE_TYPE (arg), c2));
5870 /* sqrt(x) < y is x >= 0 when y is very large and we
5871 don't care about Infinities. */
5872 if (! HONOR_INFINITIES (mode))
5873 return fold_build2 (GE_EXPR, type, arg,
5874 build_real (TREE_TYPE (arg), dconst0));
5876 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5877 if (lang_hooks.decls.global_bindings_p () != 0
5878 || CONTAINS_PLACEHOLDER_P (arg))
5879 return NULL_TREE;
5881 arg = save_expr (arg);
5882 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5883 fold_build2 (GE_EXPR, type, arg,
5884 build_real (TREE_TYPE (arg),
5885 dconst0)),
5886 fold_build2 (NE_EXPR, type, arg,
5887 build_real (TREE_TYPE (arg),
5888 c2)));
5891 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5892 if (! HONOR_NANS (mode))
5893 return fold_build2 (code, type, arg,
5894 build_real (TREE_TYPE (arg), c2));
5896 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5897 if (lang_hooks.decls.global_bindings_p () == 0
5898 && ! CONTAINS_PLACEHOLDER_P (arg))
5900 arg = save_expr (arg);
5901 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5902 fold_build2 (GE_EXPR, type, arg,
5903 build_real (TREE_TYPE (arg),
5904 dconst0)),
5905 fold_build2 (code, type, arg,
5906 build_real (TREE_TYPE (arg),
5907 c2)));
5912 return NULL_TREE;
5915 /* Subroutine of fold() that optimizes comparisons against Infinities,
5916 either +Inf or -Inf.
5918 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5919 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5920 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5922 The function returns the constant folded tree if a simplification
5923 can be made, and NULL_TREE otherwise. */
5925 static tree
5926 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5928 enum machine_mode mode;
5929 REAL_VALUE_TYPE max;
5930 tree temp;
5931 bool neg;
5933 mode = TYPE_MODE (TREE_TYPE (arg0));
5935 /* For negative infinity swap the sense of the comparison. */
5936 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5937 if (neg)
5938 code = swap_tree_comparison (code);
5940 switch (code)
5942 case GT_EXPR:
5943 /* x > +Inf is always false, if with ignore sNANs. */
5944 if (HONOR_SNANS (mode))
5945 return NULL_TREE;
5946 return omit_one_operand (type, integer_zero_node, arg0);
5948 case LE_EXPR:
5949 /* x <= +Inf is always true, if we don't case about NaNs. */
5950 if (! HONOR_NANS (mode))
5951 return omit_one_operand (type, integer_one_node, arg0);
5953 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5954 if (lang_hooks.decls.global_bindings_p () == 0
5955 && ! CONTAINS_PLACEHOLDER_P (arg0))
5957 arg0 = save_expr (arg0);
5958 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5960 break;
5962 case EQ_EXPR:
5963 case GE_EXPR:
5964 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5965 real_maxval (&max, neg, mode);
5966 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5967 arg0, build_real (TREE_TYPE (arg0), max));
5969 case LT_EXPR:
5970 /* x < +Inf is always equal to x <= DBL_MAX. */
5971 real_maxval (&max, neg, mode);
5972 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5973 arg0, build_real (TREE_TYPE (arg0), max));
5975 case NE_EXPR:
5976 /* x != +Inf is always equal to !(x > DBL_MAX). */
5977 real_maxval (&max, neg, mode);
5978 if (! HONOR_NANS (mode))
5979 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5980 arg0, build_real (TREE_TYPE (arg0), max));
5982 /* The transformation below creates non-gimple code and thus is
5983 not appropriate if we are in gimple form. */
5984 if (in_gimple_form)
5985 return NULL_TREE;
5987 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5988 arg0, build_real (TREE_TYPE (arg0), max));
5989 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5991 default:
5992 break;
5995 return NULL_TREE;
5998 /* Subroutine of fold() that optimizes comparisons of a division by
5999 a nonzero integer constant against an integer constant, i.e.
6000 X/C1 op C2.
6002 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6003 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6004 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6006 The function returns the constant folded tree if a simplification
6007 can be made, and NULL_TREE otherwise. */
6009 static tree
6010 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6012 tree prod, tmp, hi, lo;
6013 tree arg00 = TREE_OPERAND (arg0, 0);
6014 tree arg01 = TREE_OPERAND (arg0, 1);
6015 unsigned HOST_WIDE_INT lpart;
6016 HOST_WIDE_INT hpart;
6017 int overflow;
6019 /* We have to do this the hard way to detect unsigned overflow.
6020 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6021 overflow = mul_double (TREE_INT_CST_LOW (arg01),
6022 TREE_INT_CST_HIGH (arg01),
6023 TREE_INT_CST_LOW (arg1),
6024 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
6025 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6026 prod = force_fit_type (prod, -1, overflow, false);
6028 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
6030 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6031 lo = prod;
6033 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6034 overflow = add_double (TREE_INT_CST_LOW (prod),
6035 TREE_INT_CST_HIGH (prod),
6036 TREE_INT_CST_LOW (tmp),
6037 TREE_INT_CST_HIGH (tmp),
6038 &lpart, &hpart);
6039 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6040 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6041 TREE_CONSTANT_OVERFLOW (prod));
6043 else if (tree_int_cst_sgn (arg01) >= 0)
6045 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6046 switch (tree_int_cst_sgn (arg1))
6048 case -1:
6049 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6050 hi = prod;
6051 break;
6053 case 0:
6054 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6055 hi = tmp;
6056 break;
6058 case 1:
6059 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6060 lo = prod;
6061 break;
6063 default:
6064 gcc_unreachable ();
6067 else
6069 /* A negative divisor reverses the relational operators. */
6070 code = swap_tree_comparison (code);
6072 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6073 switch (tree_int_cst_sgn (arg1))
6075 case -1:
6076 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6077 lo = prod;
6078 break;
6080 case 0:
6081 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6082 lo = tmp;
6083 break;
6085 case 1:
6086 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6087 hi = prod;
6088 break;
6090 default:
6091 gcc_unreachable ();
6095 switch (code)
6097 case EQ_EXPR:
6098 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6099 return omit_one_operand (type, integer_zero_node, arg00);
6100 if (TREE_OVERFLOW (hi))
6101 return fold_build2 (GE_EXPR, type, arg00, lo);
6102 if (TREE_OVERFLOW (lo))
6103 return fold_build2 (LE_EXPR, type, arg00, hi);
6104 return build_range_check (type, arg00, 1, lo, hi);
6106 case NE_EXPR:
6107 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6108 return omit_one_operand (type, integer_one_node, arg00);
6109 if (TREE_OVERFLOW (hi))
6110 return fold_build2 (LT_EXPR, type, arg00, lo);
6111 if (TREE_OVERFLOW (lo))
6112 return fold_build2 (GT_EXPR, type, arg00, hi);
6113 return build_range_check (type, arg00, 0, lo, hi);
6115 case LT_EXPR:
6116 if (TREE_OVERFLOW (lo))
6117 return omit_one_operand (type, integer_zero_node, arg00);
6118 return fold_build2 (LT_EXPR, type, arg00, lo);
6120 case LE_EXPR:
6121 if (TREE_OVERFLOW (hi))
6122 return omit_one_operand (type, integer_one_node, arg00);
6123 return fold_build2 (LE_EXPR, type, arg00, hi);
6125 case GT_EXPR:
6126 if (TREE_OVERFLOW (hi))
6127 return omit_one_operand (type, integer_zero_node, arg00);
6128 return fold_build2 (GT_EXPR, type, arg00, hi);
6130 case GE_EXPR:
6131 if (TREE_OVERFLOW (lo))
6132 return omit_one_operand (type, integer_one_node, arg00);
6133 return fold_build2 (GE_EXPR, type, arg00, lo);
6135 default:
6136 break;
6139 return NULL_TREE;
6143 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6144 equality/inequality test, then return a simplified form of the test
6145 using a sign testing. Otherwise return NULL. TYPE is the desired
6146 result type. */
6148 static tree
6149 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6150 tree result_type)
6152 /* If this is testing a single bit, we can optimize the test. */
6153 if ((code == NE_EXPR || code == EQ_EXPR)
6154 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6155 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6157 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6158 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6159 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6161 if (arg00 != NULL_TREE
6162 /* This is only a win if casting to a signed type is cheap,
6163 i.e. when arg00's type is not a partial mode. */
6164 && TYPE_PRECISION (TREE_TYPE (arg00))
6165 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6167 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6168 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6169 result_type, fold_convert (stype, arg00),
6170 build_int_cst (stype, 0));
6174 return NULL_TREE;
6177 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6178 equality/inequality test, then return a simplified form of
6179 the test using shifts and logical operations. Otherwise return
6180 NULL. TYPE is the desired result type. */
6182 tree
6183 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6184 tree result_type)
6186 /* If this is testing a single bit, we can optimize the test. */
6187 if ((code == NE_EXPR || code == EQ_EXPR)
6188 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6189 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6191 tree inner = TREE_OPERAND (arg0, 0);
6192 tree type = TREE_TYPE (arg0);
6193 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6194 enum machine_mode operand_mode = TYPE_MODE (type);
6195 int ops_unsigned;
6196 tree signed_type, unsigned_type, intermediate_type;
6197 tree tem;
6199 /* First, see if we can fold the single bit test into a sign-bit
6200 test. */
6201 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6202 result_type);
6203 if (tem)
6204 return tem;
6206 /* Otherwise we have (A & C) != 0 where C is a single bit,
6207 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6208 Similarly for (A & C) == 0. */
6210 /* If INNER is a right shift of a constant and it plus BITNUM does
6211 not overflow, adjust BITNUM and INNER. */
6212 if (TREE_CODE (inner) == RSHIFT_EXPR
6213 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6214 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6215 && bitnum < TYPE_PRECISION (type)
6216 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6217 bitnum - TYPE_PRECISION (type)))
6219 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6220 inner = TREE_OPERAND (inner, 0);
6223 /* If we are going to be able to omit the AND below, we must do our
6224 operations as unsigned. If we must use the AND, we have a choice.
6225 Normally unsigned is faster, but for some machines signed is. */
6226 #ifdef LOAD_EXTEND_OP
6227 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6228 && !flag_syntax_only) ? 0 : 1;
6229 #else
6230 ops_unsigned = 1;
6231 #endif
6233 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6234 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6235 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6236 inner = fold_convert (intermediate_type, inner);
6238 if (bitnum != 0)
6239 inner = build2 (RSHIFT_EXPR, intermediate_type,
6240 inner, size_int (bitnum));
6242 if (code == EQ_EXPR)
6243 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6244 inner, integer_one_node);
6246 /* Put the AND last so it can combine with more things. */
6247 inner = build2 (BIT_AND_EXPR, intermediate_type,
6248 inner, integer_one_node);
6250 /* Make sure to return the proper type. */
6251 inner = fold_convert (result_type, inner);
6253 return inner;
6255 return NULL_TREE;
6258 /* Check whether we are allowed to reorder operands arg0 and arg1,
6259 such that the evaluation of arg1 occurs before arg0. */
6261 static bool
6262 reorder_operands_p (tree arg0, tree arg1)
6264 if (! flag_evaluation_order)
6265 return true;
6266 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6267 return true;
6268 return ! TREE_SIDE_EFFECTS (arg0)
6269 && ! TREE_SIDE_EFFECTS (arg1);
6272 /* Test whether it is preferable two swap two operands, ARG0 and
6273 ARG1, for example because ARG0 is an integer constant and ARG1
6274 isn't. If REORDER is true, only recommend swapping if we can
6275 evaluate the operands in reverse order. */
6277 bool
6278 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6280 STRIP_SIGN_NOPS (arg0);
6281 STRIP_SIGN_NOPS (arg1);
6283 if (TREE_CODE (arg1) == INTEGER_CST)
6284 return 0;
6285 if (TREE_CODE (arg0) == INTEGER_CST)
6286 return 1;
6288 if (TREE_CODE (arg1) == REAL_CST)
6289 return 0;
6290 if (TREE_CODE (arg0) == REAL_CST)
6291 return 1;
6293 if (TREE_CODE (arg1) == COMPLEX_CST)
6294 return 0;
6295 if (TREE_CODE (arg0) == COMPLEX_CST)
6296 return 1;
6298 if (TREE_CONSTANT (arg1))
6299 return 0;
6300 if (TREE_CONSTANT (arg0))
6301 return 1;
6303 if (optimize_size)
6304 return 0;
6306 if (reorder && flag_evaluation_order
6307 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6308 return 0;
6310 if (DECL_P (arg1))
6311 return 0;
6312 if (DECL_P (arg0))
6313 return 1;
6315 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6316 for commutative and comparison operators. Ensuring a canonical
6317 form allows the optimizers to find additional redundancies without
6318 having to explicitly check for both orderings. */
6319 if (TREE_CODE (arg0) == SSA_NAME
6320 && TREE_CODE (arg1) == SSA_NAME
6321 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6322 return 1;
6324 return 0;
6327 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6328 ARG0 is extended to a wider type. */
6330 static tree
6331 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6333 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6334 tree arg1_unw;
6335 tree shorter_type, outer_type;
6336 tree min, max;
6337 bool above, below;
6339 if (arg0_unw == arg0)
6340 return NULL_TREE;
6341 shorter_type = TREE_TYPE (arg0_unw);
6343 #ifdef HAVE_canonicalize_funcptr_for_compare
6344 /* Disable this optimization if we're casting a function pointer
6345 type on targets that require function pointer canonicalization. */
6346 if (HAVE_canonicalize_funcptr_for_compare
6347 && TREE_CODE (shorter_type) == POINTER_TYPE
6348 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6349 return NULL_TREE;
6350 #endif
6352 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6353 return NULL_TREE;
6355 arg1_unw = get_unwidened (arg1, shorter_type);
6357 /* If possible, express the comparison in the shorter mode. */
6358 if ((code == EQ_EXPR || code == NE_EXPR
6359 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6360 && (TREE_TYPE (arg1_unw) == shorter_type
6361 || (TREE_CODE (arg1_unw) == INTEGER_CST
6362 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6363 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6364 && int_fits_type_p (arg1_unw, shorter_type))))
6365 return fold_build2 (code, type, arg0_unw,
6366 fold_convert (shorter_type, arg1_unw));
6368 if (TREE_CODE (arg1_unw) != INTEGER_CST
6369 || TREE_CODE (shorter_type) != INTEGER_TYPE
6370 || !int_fits_type_p (arg1_unw, shorter_type))
6371 return NULL_TREE;
6373 /* If we are comparing with the integer that does not fit into the range
6374 of the shorter type, the result is known. */
6375 outer_type = TREE_TYPE (arg1_unw);
6376 min = lower_bound_in_type (outer_type, shorter_type);
6377 max = upper_bound_in_type (outer_type, shorter_type);
6379 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6380 max, arg1_unw));
6381 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6382 arg1_unw, min));
6384 switch (code)
6386 case EQ_EXPR:
6387 if (above || below)
6388 return omit_one_operand (type, integer_zero_node, arg0);
6389 break;
6391 case NE_EXPR:
6392 if (above || below)
6393 return omit_one_operand (type, integer_one_node, arg0);
6394 break;
6396 case LT_EXPR:
6397 case LE_EXPR:
6398 if (above)
6399 return omit_one_operand (type, integer_one_node, arg0);
6400 else if (below)
6401 return omit_one_operand (type, integer_zero_node, arg0);
6403 case GT_EXPR:
6404 case GE_EXPR:
6405 if (above)
6406 return omit_one_operand (type, integer_zero_node, arg0);
6407 else if (below)
6408 return omit_one_operand (type, integer_one_node, arg0);
6410 default:
6411 break;
6414 return NULL_TREE;
6417 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6418 ARG0 just the signedness is changed. */
6420 static tree
6421 fold_sign_changed_comparison (enum tree_code code, tree type,
6422 tree arg0, tree arg1)
6424 tree arg0_inner, tmp;
6425 tree inner_type, outer_type;
6427 if (TREE_CODE (arg0) != NOP_EXPR
6428 && TREE_CODE (arg0) != CONVERT_EXPR)
6429 return NULL_TREE;
6431 outer_type = TREE_TYPE (arg0);
6432 arg0_inner = TREE_OPERAND (arg0, 0);
6433 inner_type = TREE_TYPE (arg0_inner);
6435 #ifdef HAVE_canonicalize_funcptr_for_compare
6436 /* Disable this optimization if we're casting a function pointer
6437 type on targets that require function pointer canonicalization. */
6438 if (HAVE_canonicalize_funcptr_for_compare
6439 && TREE_CODE (inner_type) == POINTER_TYPE
6440 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6441 return NULL_TREE;
6442 #endif
6444 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6445 return NULL_TREE;
6447 if (TREE_CODE (arg1) != INTEGER_CST
6448 && !((TREE_CODE (arg1) == NOP_EXPR
6449 || TREE_CODE (arg1) == CONVERT_EXPR)
6450 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6451 return NULL_TREE;
6453 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6454 && code != NE_EXPR
6455 && code != EQ_EXPR)
6456 return NULL_TREE;
6458 if (TREE_CODE (arg1) == INTEGER_CST)
6460 tmp = build_int_cst_wide (inner_type,
6461 TREE_INT_CST_LOW (arg1),
6462 TREE_INT_CST_HIGH (arg1));
6463 arg1 = force_fit_type (tmp, 0,
6464 TREE_OVERFLOW (arg1),
6465 TREE_CONSTANT_OVERFLOW (arg1));
6467 else
6468 arg1 = fold_convert (inner_type, arg1);
6470 return fold_build2 (code, type, arg0_inner, arg1);
6473 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6474 step of the array. Reconstructs s and delta in the case of s * delta
6475 being an integer constant (and thus already folded).
6476 ADDR is the address. MULT is the multiplicative expression.
6477 If the function succeeds, the new address expression is returned. Otherwise
6478 NULL_TREE is returned. */
6480 static tree
6481 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6483 tree s, delta, step;
6484 tree ref = TREE_OPERAND (addr, 0), pref;
6485 tree ret, pos;
6486 tree itype;
6488 /* Canonicalize op1 into a possibly non-constant delta
6489 and an INTEGER_CST s. */
6490 if (TREE_CODE (op1) == MULT_EXPR)
6492 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6494 STRIP_NOPS (arg0);
6495 STRIP_NOPS (arg1);
6497 if (TREE_CODE (arg0) == INTEGER_CST)
6499 s = arg0;
6500 delta = arg1;
6502 else if (TREE_CODE (arg1) == INTEGER_CST)
6504 s = arg1;
6505 delta = arg0;
6507 else
6508 return NULL_TREE;
6510 else if (TREE_CODE (op1) == INTEGER_CST)
6512 delta = op1;
6513 s = NULL_TREE;
6515 else
6517 /* Simulate we are delta * 1. */
6518 delta = op1;
6519 s = integer_one_node;
6522 for (;; ref = TREE_OPERAND (ref, 0))
6524 if (TREE_CODE (ref) == ARRAY_REF)
6526 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6527 if (! itype)
6528 continue;
6530 step = array_ref_element_size (ref);
6531 if (TREE_CODE (step) != INTEGER_CST)
6532 continue;
6534 if (s)
6536 if (! tree_int_cst_equal (step, s))
6537 continue;
6539 else
6541 /* Try if delta is a multiple of step. */
6542 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6543 if (! tmp)
6544 continue;
6545 delta = tmp;
6548 break;
6551 if (!handled_component_p (ref))
6552 return NULL_TREE;
6555 /* We found the suitable array reference. So copy everything up to it,
6556 and replace the index. */
6558 pref = TREE_OPERAND (addr, 0);
6559 ret = copy_node (pref);
6560 pos = ret;
6562 while (pref != ref)
6564 pref = TREE_OPERAND (pref, 0);
6565 TREE_OPERAND (pos, 0) = copy_node (pref);
6566 pos = TREE_OPERAND (pos, 0);
6569 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6570 fold_convert (itype,
6571 TREE_OPERAND (pos, 1)),
6572 fold_convert (itype, delta));
6574 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6578 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6579 means A >= Y && A != MAX, but in this case we know that
6580 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6582 static tree
6583 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6585 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6587 if (TREE_CODE (bound) == LT_EXPR)
6588 a = TREE_OPERAND (bound, 0);
6589 else if (TREE_CODE (bound) == GT_EXPR)
6590 a = TREE_OPERAND (bound, 1);
6591 else
6592 return NULL_TREE;
6594 typea = TREE_TYPE (a);
6595 if (!INTEGRAL_TYPE_P (typea)
6596 && !POINTER_TYPE_P (typea))
6597 return NULL_TREE;
6599 if (TREE_CODE (ineq) == LT_EXPR)
6601 a1 = TREE_OPERAND (ineq, 1);
6602 y = TREE_OPERAND (ineq, 0);
6604 else if (TREE_CODE (ineq) == GT_EXPR)
6606 a1 = TREE_OPERAND (ineq, 0);
6607 y = TREE_OPERAND (ineq, 1);
6609 else
6610 return NULL_TREE;
6612 if (TREE_TYPE (a1) != typea)
6613 return NULL_TREE;
6615 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6616 if (!integer_onep (diff))
6617 return NULL_TREE;
6619 return fold_build2 (GE_EXPR, type, a, y);
6622 /* Fold a sum or difference of at least one multiplication.
6623 Returns the folded tree or NULL if no simplification could be made. */
6625 static tree
6626 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6628 tree arg00, arg01, arg10, arg11;
6629 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6631 /* (A * C) +- (B * C) -> (A+-B) * C.
6632 (A * C) +- A -> A * (C+-1).
6633 We are most concerned about the case where C is a constant,
6634 but other combinations show up during loop reduction. Since
6635 it is not difficult, try all four possibilities. */
6637 if (TREE_CODE (arg0) == MULT_EXPR)
6639 arg00 = TREE_OPERAND (arg0, 0);
6640 arg01 = TREE_OPERAND (arg0, 1);
6642 else
6644 arg00 = arg0;
6645 if (!FLOAT_TYPE_P (type))
6646 arg01 = build_int_cst (type, 1);
6647 else
6648 arg01 = build_real (type, dconst1);
6650 if (TREE_CODE (arg1) == MULT_EXPR)
6652 arg10 = TREE_OPERAND (arg1, 0);
6653 arg11 = TREE_OPERAND (arg1, 1);
6655 else
6657 arg10 = arg1;
6658 if (!FLOAT_TYPE_P (type))
6659 arg11 = build_int_cst (type, 1);
6660 else
6661 arg11 = build_real (type, dconst1);
6663 same = NULL_TREE;
6665 if (operand_equal_p (arg01, arg11, 0))
6666 same = arg01, alt0 = arg00, alt1 = arg10;
6667 else if (operand_equal_p (arg00, arg10, 0))
6668 same = arg00, alt0 = arg01, alt1 = arg11;
6669 else if (operand_equal_p (arg00, arg11, 0))
6670 same = arg00, alt0 = arg01, alt1 = arg10;
6671 else if (operand_equal_p (arg01, arg10, 0))
6672 same = arg01, alt0 = arg00, alt1 = arg11;
6674 /* No identical multiplicands; see if we can find a common
6675 power-of-two factor in non-power-of-two multiplies. This
6676 can help in multi-dimensional array access. */
6677 else if (host_integerp (arg01, 0)
6678 && host_integerp (arg11, 0))
6680 HOST_WIDE_INT int01, int11, tmp;
6681 bool swap = false;
6682 tree maybe_same;
6683 int01 = TREE_INT_CST_LOW (arg01);
6684 int11 = TREE_INT_CST_LOW (arg11);
6686 /* Move min of absolute values to int11. */
6687 if ((int01 >= 0 ? int01 : -int01)
6688 < (int11 >= 0 ? int11 : -int11))
6690 tmp = int01, int01 = int11, int11 = tmp;
6691 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6692 maybe_same = arg01;
6693 swap = true;
6695 else
6696 maybe_same = arg11;
6698 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6700 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6701 build_int_cst (TREE_TYPE (arg00),
6702 int01 / int11));
6703 alt1 = arg10;
6704 same = maybe_same;
6705 if (swap)
6706 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6710 if (same)
6711 return fold_build2 (MULT_EXPR, type,
6712 fold_build2 (code, type,
6713 fold_convert (type, alt0),
6714 fold_convert (type, alt1)),
6715 fold_convert (type, same));
6717 return NULL_TREE;
6720 /* Fold a unary expression of code CODE and type TYPE with operand
6721 OP0. Return the folded expression if folding is successful.
6722 Otherwise, return NULL_TREE. */
6724 tree
6725 fold_unary (enum tree_code code, tree type, tree op0)
6727 tree tem;
6728 tree arg0;
6729 enum tree_code_class kind = TREE_CODE_CLASS (code);
6731 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6732 && TREE_CODE_LENGTH (code) == 1);
6734 arg0 = op0;
6735 if (arg0)
6737 if (code == NOP_EXPR || code == CONVERT_EXPR
6738 || code == FLOAT_EXPR || code == ABS_EXPR)
6740 /* Don't use STRIP_NOPS, because signedness of argument type
6741 matters. */
6742 STRIP_SIGN_NOPS (arg0);
6744 else
6746 /* Strip any conversions that don't change the mode. This
6747 is safe for every expression, except for a comparison
6748 expression because its signedness is derived from its
6749 operands.
6751 Note that this is done as an internal manipulation within
6752 the constant folder, in order to find the simplest
6753 representation of the arguments so that their form can be
6754 studied. In any cases, the appropriate type conversions
6755 should be put back in the tree that will get out of the
6756 constant folder. */
6757 STRIP_NOPS (arg0);
6761 if (TREE_CODE_CLASS (code) == tcc_unary)
6763 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6764 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6765 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6766 else if (TREE_CODE (arg0) == COND_EXPR)
6768 tree arg01 = TREE_OPERAND (arg0, 1);
6769 tree arg02 = TREE_OPERAND (arg0, 2);
6770 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6771 arg01 = fold_build1 (code, type, arg01);
6772 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6773 arg02 = fold_build1 (code, type, arg02);
6774 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6775 arg01, arg02);
6777 /* If this was a conversion, and all we did was to move into
6778 inside the COND_EXPR, bring it back out. But leave it if
6779 it is a conversion from integer to integer and the
6780 result precision is no wider than a word since such a
6781 conversion is cheap and may be optimized away by combine,
6782 while it couldn't if it were outside the COND_EXPR. Then return
6783 so we don't get into an infinite recursion loop taking the
6784 conversion out and then back in. */
6786 if ((code == NOP_EXPR || code == CONVERT_EXPR
6787 || code == NON_LVALUE_EXPR)
6788 && TREE_CODE (tem) == COND_EXPR
6789 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6790 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6791 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6792 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6793 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6794 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6795 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6796 && (INTEGRAL_TYPE_P
6797 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6798 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6799 || flag_syntax_only))
6800 tem = build1 (code, type,
6801 build3 (COND_EXPR,
6802 TREE_TYPE (TREE_OPERAND
6803 (TREE_OPERAND (tem, 1), 0)),
6804 TREE_OPERAND (tem, 0),
6805 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6806 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6807 return tem;
6809 else if (COMPARISON_CLASS_P (arg0))
6811 if (TREE_CODE (type) == BOOLEAN_TYPE)
6813 arg0 = copy_node (arg0);
6814 TREE_TYPE (arg0) = type;
6815 return arg0;
6817 else if (TREE_CODE (type) != INTEGER_TYPE)
6818 return fold_build3 (COND_EXPR, type, arg0,
6819 fold_build1 (code, type,
6820 integer_one_node),
6821 fold_build1 (code, type,
6822 integer_zero_node));
6826 switch (code)
6828 case NOP_EXPR:
6829 case FLOAT_EXPR:
6830 case CONVERT_EXPR:
6831 case FIX_TRUNC_EXPR:
6832 case FIX_CEIL_EXPR:
6833 case FIX_FLOOR_EXPR:
6834 case FIX_ROUND_EXPR:
6835 if (TREE_TYPE (op0) == type)
6836 return op0;
6838 /* If we have (type) (a CMP b) and type is an integral type, return
6839 new expression involving the new type. */
6840 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
6841 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
6842 TREE_OPERAND (op0, 1));
6844 /* Handle cases of two conversions in a row. */
6845 if (TREE_CODE (op0) == NOP_EXPR
6846 || TREE_CODE (op0) == CONVERT_EXPR)
6848 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6849 tree inter_type = TREE_TYPE (op0);
6850 int inside_int = INTEGRAL_TYPE_P (inside_type);
6851 int inside_ptr = POINTER_TYPE_P (inside_type);
6852 int inside_float = FLOAT_TYPE_P (inside_type);
6853 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6854 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6855 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6856 int inter_int = INTEGRAL_TYPE_P (inter_type);
6857 int inter_ptr = POINTER_TYPE_P (inter_type);
6858 int inter_float = FLOAT_TYPE_P (inter_type);
6859 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6860 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6861 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6862 int final_int = INTEGRAL_TYPE_P (type);
6863 int final_ptr = POINTER_TYPE_P (type);
6864 int final_float = FLOAT_TYPE_P (type);
6865 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6866 unsigned int final_prec = TYPE_PRECISION (type);
6867 int final_unsignedp = TYPE_UNSIGNED (type);
6869 /* In addition to the cases of two conversions in a row
6870 handled below, if we are converting something to its own
6871 type via an object of identical or wider precision, neither
6872 conversion is needed. */
6873 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6874 && ((inter_int && final_int) || (inter_float && final_float))
6875 && inter_prec >= final_prec)
6876 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6878 /* Likewise, if the intermediate and final types are either both
6879 float or both integer, we don't need the middle conversion if
6880 it is wider than the final type and doesn't change the signedness
6881 (for integers). Avoid this if the final type is a pointer
6882 since then we sometimes need the inner conversion. Likewise if
6883 the outer has a precision not equal to the size of its mode. */
6884 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6885 || (inter_float && inside_float)
6886 || (inter_vec && inside_vec))
6887 && inter_prec >= inside_prec
6888 && (inter_float || inter_vec
6889 || inter_unsignedp == inside_unsignedp)
6890 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6891 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6892 && ! final_ptr
6893 && (! final_vec || inter_prec == inside_prec))
6894 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6896 /* If we have a sign-extension of a zero-extended value, we can
6897 replace that by a single zero-extension. */
6898 if (inside_int && inter_int && final_int
6899 && inside_prec < inter_prec && inter_prec < final_prec
6900 && inside_unsignedp && !inter_unsignedp)
6901 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6903 /* Two conversions in a row are not needed unless:
6904 - some conversion is floating-point (overstrict for now), or
6905 - some conversion is a vector (overstrict for now), or
6906 - the intermediate type is narrower than both initial and
6907 final, or
6908 - the intermediate type and innermost type differ in signedness,
6909 and the outermost type is wider than the intermediate, or
6910 - the initial type is a pointer type and the precisions of the
6911 intermediate and final types differ, or
6912 - the final type is a pointer type and the precisions of the
6913 initial and intermediate types differ. */
6914 if (! inside_float && ! inter_float && ! final_float
6915 && ! inside_vec && ! inter_vec && ! final_vec
6916 && (inter_prec > inside_prec || inter_prec > final_prec)
6917 && ! (inside_int && inter_int
6918 && inter_unsignedp != inside_unsignedp
6919 && inter_prec < final_prec)
6920 && ((inter_unsignedp && inter_prec > inside_prec)
6921 == (final_unsignedp && final_prec > inter_prec))
6922 && ! (inside_ptr && inter_prec != final_prec)
6923 && ! (final_ptr && inside_prec != inter_prec)
6924 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6925 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6926 && ! final_ptr)
6927 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6930 /* Handle (T *)&A.B.C for A being of type T and B and C
6931 living at offset zero. This occurs frequently in
6932 C++ upcasting and then accessing the base. */
6933 if (TREE_CODE (op0) == ADDR_EXPR
6934 && POINTER_TYPE_P (type)
6935 && handled_component_p (TREE_OPERAND (op0, 0)))
6937 HOST_WIDE_INT bitsize, bitpos;
6938 tree offset;
6939 enum machine_mode mode;
6940 int unsignedp, volatilep;
6941 tree base = TREE_OPERAND (op0, 0);
6942 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6943 &mode, &unsignedp, &volatilep, false);
6944 /* If the reference was to a (constant) zero offset, we can use
6945 the address of the base if it has the same base type
6946 as the result type. */
6947 if (! offset && bitpos == 0
6948 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
6949 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
6950 return fold_convert (type, build_fold_addr_expr (base));
6953 if (TREE_CODE (op0) == MODIFY_EXPR
6954 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6955 /* Detect assigning a bitfield. */
6956 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6957 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6959 /* Don't leave an assignment inside a conversion
6960 unless assigning a bitfield. */
6961 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6962 /* First do the assignment, then return converted constant. */
6963 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6964 TREE_NO_WARNING (tem) = 1;
6965 TREE_USED (tem) = 1;
6966 return tem;
6969 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6970 constants (if x has signed type, the sign bit cannot be set
6971 in c). This folds extension into the BIT_AND_EXPR. */
6972 if (INTEGRAL_TYPE_P (type)
6973 && TREE_CODE (type) != BOOLEAN_TYPE
6974 && TREE_CODE (op0) == BIT_AND_EXPR
6975 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6977 tree and = op0;
6978 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6979 int change = 0;
6981 if (TYPE_UNSIGNED (TREE_TYPE (and))
6982 || (TYPE_PRECISION (type)
6983 <= TYPE_PRECISION (TREE_TYPE (and))))
6984 change = 1;
6985 else if (TYPE_PRECISION (TREE_TYPE (and1))
6986 <= HOST_BITS_PER_WIDE_INT
6987 && host_integerp (and1, 1))
6989 unsigned HOST_WIDE_INT cst;
6991 cst = tree_low_cst (and1, 1);
6992 cst &= (HOST_WIDE_INT) -1
6993 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6994 change = (cst == 0);
6995 #ifdef LOAD_EXTEND_OP
6996 if (change
6997 && !flag_syntax_only
6998 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6999 == ZERO_EXTEND))
7001 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7002 and0 = fold_convert (uns, and0);
7003 and1 = fold_convert (uns, and1);
7005 #endif
7007 if (change)
7009 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7010 TREE_INT_CST_HIGH (and1));
7011 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7012 TREE_CONSTANT_OVERFLOW (and1));
7013 return fold_build2 (BIT_AND_EXPR, type,
7014 fold_convert (type, and0), tem);
7018 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7019 T2 being pointers to types of the same size. */
7020 if (POINTER_TYPE_P (type)
7021 && BINARY_CLASS_P (arg0)
7022 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7023 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7025 tree arg00 = TREE_OPERAND (arg0, 0);
7026 tree t0 = type;
7027 tree t1 = TREE_TYPE (arg00);
7028 tree tt0 = TREE_TYPE (t0);
7029 tree tt1 = TREE_TYPE (t1);
7030 tree s0 = TYPE_SIZE (tt0);
7031 tree s1 = TYPE_SIZE (tt1);
7033 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7034 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7035 TREE_OPERAND (arg0, 1));
7038 tem = fold_convert_const (code, type, arg0);
7039 return tem ? tem : NULL_TREE;
7041 case VIEW_CONVERT_EXPR:
7042 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7043 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7044 return NULL_TREE;
7046 case NEGATE_EXPR:
7047 if (negate_expr_p (arg0))
7048 return fold_convert (type, negate_expr (arg0));
7049 return NULL_TREE;
7051 case ABS_EXPR:
7052 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7053 return fold_abs_const (arg0, type);
7054 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7055 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7056 /* Convert fabs((double)float) into (double)fabsf(float). */
7057 else if (TREE_CODE (arg0) == NOP_EXPR
7058 && TREE_CODE (type) == REAL_TYPE)
7060 tree targ0 = strip_float_extensions (arg0);
7061 if (targ0 != arg0)
7062 return fold_convert (type, fold_build1 (ABS_EXPR,
7063 TREE_TYPE (targ0),
7064 targ0));
7066 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7067 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7068 return arg0;
7070 /* Strip sign ops from argument. */
7071 if (TREE_CODE (type) == REAL_TYPE)
7073 tem = fold_strip_sign_ops (arg0);
7074 if (tem)
7075 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7077 return NULL_TREE;
7079 case CONJ_EXPR:
7080 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7081 return fold_convert (type, arg0);
7082 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7083 return build2 (COMPLEX_EXPR, type,
7084 TREE_OPERAND (arg0, 0),
7085 negate_expr (TREE_OPERAND (arg0, 1)));
7086 else if (TREE_CODE (arg0) == COMPLEX_CST)
7087 return build_complex (type, TREE_REALPART (arg0),
7088 negate_expr (TREE_IMAGPART (arg0)));
7089 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7090 return fold_build2 (TREE_CODE (arg0), type,
7091 fold_build1 (CONJ_EXPR, type,
7092 TREE_OPERAND (arg0, 0)),
7093 fold_build1 (CONJ_EXPR, type,
7094 TREE_OPERAND (arg0, 1)));
7095 else if (TREE_CODE (arg0) == CONJ_EXPR)
7096 return TREE_OPERAND (arg0, 0);
7097 return NULL_TREE;
7099 case BIT_NOT_EXPR:
7100 if (TREE_CODE (arg0) == INTEGER_CST)
7101 return fold_not_const (arg0, type);
7102 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7103 return TREE_OPERAND (arg0, 0);
7104 /* Convert ~ (-A) to A - 1. */
7105 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7106 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7107 build_int_cst (type, 1));
7108 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7109 else if (INTEGRAL_TYPE_P (type)
7110 && ((TREE_CODE (arg0) == MINUS_EXPR
7111 && integer_onep (TREE_OPERAND (arg0, 1)))
7112 || (TREE_CODE (arg0) == PLUS_EXPR
7113 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7114 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7115 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7116 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7117 && (tem = fold_unary (BIT_NOT_EXPR, type,
7118 fold_convert (type,
7119 TREE_OPERAND (arg0, 0)))))
7120 return fold_build2 (BIT_XOR_EXPR, type, tem,
7121 fold_convert (type, TREE_OPERAND (arg0, 1)));
7122 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7123 && (tem = fold_unary (BIT_NOT_EXPR, type,
7124 fold_convert (type,
7125 TREE_OPERAND (arg0, 1)))))
7126 return fold_build2 (BIT_XOR_EXPR, type,
7127 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7129 return NULL_TREE;
7131 case TRUTH_NOT_EXPR:
7132 /* The argument to invert_truthvalue must have Boolean type. */
7133 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7134 arg0 = fold_convert (boolean_type_node, arg0);
7136 /* Note that the operand of this must be an int
7137 and its values must be 0 or 1.
7138 ("true" is a fixed value perhaps depending on the language,
7139 but we don't handle values other than 1 correctly yet.) */
7140 tem = invert_truthvalue (arg0);
7141 /* Avoid infinite recursion. */
7142 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7143 return NULL_TREE;
7144 return fold_convert (type, tem);
7146 case REALPART_EXPR:
7147 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7148 return NULL_TREE;
7149 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7150 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7151 TREE_OPERAND (arg0, 1));
7152 else if (TREE_CODE (arg0) == COMPLEX_CST)
7153 return TREE_REALPART (arg0);
7154 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7155 return fold_build2 (TREE_CODE (arg0), type,
7156 fold_build1 (REALPART_EXPR, type,
7157 TREE_OPERAND (arg0, 0)),
7158 fold_build1 (REALPART_EXPR, type,
7159 TREE_OPERAND (arg0, 1)));
7160 return NULL_TREE;
7162 case IMAGPART_EXPR:
7163 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7164 return fold_convert (type, integer_zero_node);
7165 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7166 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7167 TREE_OPERAND (arg0, 0));
7168 else if (TREE_CODE (arg0) == COMPLEX_CST)
7169 return TREE_IMAGPART (arg0);
7170 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7171 return fold_build2 (TREE_CODE (arg0), type,
7172 fold_build1 (IMAGPART_EXPR, type,
7173 TREE_OPERAND (arg0, 0)),
7174 fold_build1 (IMAGPART_EXPR, type,
7175 TREE_OPERAND (arg0, 1)));
7176 return NULL_TREE;
7178 default:
7179 return NULL_TREE;
7180 } /* switch (code) */
7183 /* Fold a binary expression of code CODE and type TYPE with operands
7184 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7185 Return the folded expression if folding is successful. Otherwise,
7186 return NULL_TREE. */
7188 static tree
7189 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7191 enum tree_code compl_code;
7193 if (code == MIN_EXPR)
7194 compl_code = MAX_EXPR;
7195 else if (code == MAX_EXPR)
7196 compl_code = MIN_EXPR;
7197 else
7198 gcc_unreachable ();
7200 /* MIN (MAX (a, b), b) == b.  */
7201 if (TREE_CODE (op0) == compl_code
7202 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7203 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7205 /* MIN (MAX (b, a), b) == b.  */
7206 if (TREE_CODE (op0) == compl_code
7207 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7208 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7209 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7211 /* MIN (a, MAX (a, b)) == a.  */
7212 if (TREE_CODE (op1) == compl_code
7213 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7214 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7215 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7217 /* MIN (a, MAX (b, a)) == a.  */
7218 if (TREE_CODE (op1) == compl_code
7219 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7220 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7221 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7223 return NULL_TREE;
7226 /* Fold a binary expression of code CODE and type TYPE with operands
7227 OP0 and OP1. Return the folded expression if folding is
7228 successful. Otherwise, return NULL_TREE. */
7230 tree
7231 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7233 tree t1 = NULL_TREE;
7234 tree tem;
7235 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7236 enum tree_code_class kind = TREE_CODE_CLASS (code);
7238 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7239 && TREE_CODE_LENGTH (code) == 2
7240 && op0 != NULL_TREE
7241 && op1 != NULL_TREE);
7243 arg0 = op0;
7244 arg1 = op1;
7246 /* Strip any conversions that don't change the mode. This is
7247 safe for every expression, except for a comparison expression
7248 because its signedness is derived from its operands. So, in
7249 the latter case, only strip conversions that don't change the
7250 signedness.
7252 Note that this is done as an internal manipulation within the
7253 constant folder, in order to find the simplest representation
7254 of the arguments so that their form can be studied. In any
7255 cases, the appropriate type conversions should be put back in
7256 the tree that will get out of the constant folder. */
7258 if (kind == tcc_comparison)
7260 STRIP_SIGN_NOPS (arg0);
7261 STRIP_SIGN_NOPS (arg1);
7263 else
7265 STRIP_NOPS (arg0);
7266 STRIP_NOPS (arg1);
7269 /* Note that TREE_CONSTANT isn't enough: static var addresses are
7270 constant but we can't do arithmetic on them. */
7271 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7272 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7273 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
7274 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
7276 if (kind == tcc_binary)
7277 tem = const_binop (code, arg0, arg1, 0);
7278 else if (kind == tcc_comparison)
7279 tem = fold_relational_const (code, type, arg0, arg1);
7280 else
7281 tem = NULL_TREE;
7283 if (tem != NULL_TREE)
7285 if (TREE_TYPE (tem) != type)
7286 tem = fold_convert (type, tem);
7287 return tem;
7291 /* If this is a commutative operation, and ARG0 is a constant, move it
7292 to ARG1 to reduce the number of tests below. */
7293 if (commutative_tree_code (code)
7294 && tree_swap_operands_p (arg0, arg1, true))
7295 return fold_build2 (code, type, op1, op0);
7297 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
7299 First check for cases where an arithmetic operation is applied to a
7300 compound, conditional, or comparison operation. Push the arithmetic
7301 operation inside the compound or conditional to see if any folding
7302 can then be done. Convert comparison to conditional for this purpose.
7303 The also optimizes non-constant cases that used to be done in
7304 expand_expr.
7306 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7307 one of the operands is a comparison and the other is a comparison, a
7308 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7309 code below would make the expression more complex. Change it to a
7310 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7311 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7313 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7314 || code == EQ_EXPR || code == NE_EXPR)
7315 && ((truth_value_p (TREE_CODE (arg0))
7316 && (truth_value_p (TREE_CODE (arg1))
7317 || (TREE_CODE (arg1) == BIT_AND_EXPR
7318 && integer_onep (TREE_OPERAND (arg1, 1)))))
7319 || (truth_value_p (TREE_CODE (arg1))
7320 && (truth_value_p (TREE_CODE (arg0))
7321 || (TREE_CODE (arg0) == BIT_AND_EXPR
7322 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7324 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7325 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7326 : TRUTH_XOR_EXPR,
7327 boolean_type_node,
7328 fold_convert (boolean_type_node, arg0),
7329 fold_convert (boolean_type_node, arg1));
7331 if (code == EQ_EXPR)
7332 tem = invert_truthvalue (tem);
7334 return fold_convert (type, tem);
7337 if (TREE_CODE_CLASS (code) == tcc_binary
7338 || TREE_CODE_CLASS (code) == tcc_comparison)
7340 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7341 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7342 fold_build2 (code, type,
7343 TREE_OPERAND (arg0, 1), op1));
7344 if (TREE_CODE (arg1) == COMPOUND_EXPR
7345 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7346 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7347 fold_build2 (code, type,
7348 op0, TREE_OPERAND (arg1, 1)));
7350 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7352 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7353 arg0, arg1,
7354 /*cond_first_p=*/1);
7355 if (tem != NULL_TREE)
7356 return tem;
7359 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7361 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7362 arg1, arg0,
7363 /*cond_first_p=*/0);
7364 if (tem != NULL_TREE)
7365 return tem;
7369 switch (code)
7371 case PLUS_EXPR:
7372 /* A + (-B) -> A - B */
7373 if (TREE_CODE (arg1) == NEGATE_EXPR)
7374 return fold_build2 (MINUS_EXPR, type,
7375 fold_convert (type, arg0),
7376 fold_convert (type, TREE_OPERAND (arg1, 0)));
7377 /* (-A) + B -> B - A */
7378 if (TREE_CODE (arg0) == NEGATE_EXPR
7379 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7380 return fold_build2 (MINUS_EXPR, type,
7381 fold_convert (type, arg1),
7382 fold_convert (type, TREE_OPERAND (arg0, 0)));
7383 /* Convert ~A + 1 to -A. */
7384 if (INTEGRAL_TYPE_P (type)
7385 && TREE_CODE (arg0) == BIT_NOT_EXPR
7386 && integer_onep (arg1))
7387 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7389 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
7390 same or one. */
7391 if ((TREE_CODE (arg0) == MULT_EXPR
7392 || TREE_CODE (arg1) == MULT_EXPR)
7393 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7395 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
7396 if (tem)
7397 return tem;
7400 if (! FLOAT_TYPE_P (type))
7402 if (integer_zerop (arg1))
7403 return non_lvalue (fold_convert (type, arg0));
7405 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7406 with a constant, and the two constants have no bits in common,
7407 we should treat this as a BIT_IOR_EXPR since this may produce more
7408 simplifications. */
7409 if (TREE_CODE (arg0) == BIT_AND_EXPR
7410 && TREE_CODE (arg1) == BIT_AND_EXPR
7411 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7412 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7413 && integer_zerop (const_binop (BIT_AND_EXPR,
7414 TREE_OPERAND (arg0, 1),
7415 TREE_OPERAND (arg1, 1), 0)))
7417 code = BIT_IOR_EXPR;
7418 goto bit_ior;
7421 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7422 (plus (plus (mult) (mult)) (foo)) so that we can
7423 take advantage of the factoring cases below. */
7424 if (((TREE_CODE (arg0) == PLUS_EXPR
7425 || TREE_CODE (arg0) == MINUS_EXPR)
7426 && TREE_CODE (arg1) == MULT_EXPR)
7427 || ((TREE_CODE (arg1) == PLUS_EXPR
7428 || TREE_CODE (arg1) == MINUS_EXPR)
7429 && TREE_CODE (arg0) == MULT_EXPR))
7431 tree parg0, parg1, parg, marg;
7432 enum tree_code pcode;
7434 if (TREE_CODE (arg1) == MULT_EXPR)
7435 parg = arg0, marg = arg1;
7436 else
7437 parg = arg1, marg = arg0;
7438 pcode = TREE_CODE (parg);
7439 parg0 = TREE_OPERAND (parg, 0);
7440 parg1 = TREE_OPERAND (parg, 1);
7441 STRIP_NOPS (parg0);
7442 STRIP_NOPS (parg1);
7444 if (TREE_CODE (parg0) == MULT_EXPR
7445 && TREE_CODE (parg1) != MULT_EXPR)
7446 return fold_build2 (pcode, type,
7447 fold_build2 (PLUS_EXPR, type,
7448 fold_convert (type, parg0),
7449 fold_convert (type, marg)),
7450 fold_convert (type, parg1));
7451 if (TREE_CODE (parg0) != MULT_EXPR
7452 && TREE_CODE (parg1) == MULT_EXPR)
7453 return fold_build2 (PLUS_EXPR, type,
7454 fold_convert (type, parg0),
7455 fold_build2 (pcode, type,
7456 fold_convert (type, marg),
7457 fold_convert (type,
7458 parg1)));
7461 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7462 of the array. Loop optimizer sometimes produce this type of
7463 expressions. */
7464 if (TREE_CODE (arg0) == ADDR_EXPR)
7466 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7467 if (tem)
7468 return fold_convert (type, tem);
7470 else if (TREE_CODE (arg1) == ADDR_EXPR)
7472 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7473 if (tem)
7474 return fold_convert (type, tem);
7477 else
7479 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7480 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7481 return non_lvalue (fold_convert (type, arg0));
7483 /* Likewise if the operands are reversed. */
7484 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7485 return non_lvalue (fold_convert (type, arg1));
7487 /* Convert X + -C into X - C. */
7488 if (TREE_CODE (arg1) == REAL_CST
7489 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7491 tem = fold_negate_const (arg1, type);
7492 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7493 return fold_build2 (MINUS_EXPR, type,
7494 fold_convert (type, arg0),
7495 fold_convert (type, tem));
7498 if (flag_unsafe_math_optimizations
7499 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7500 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7501 && (tem = distribute_real_division (code, type, arg0, arg1)))
7502 return tem;
7504 /* Convert x+x into x*2.0. */
7505 if (operand_equal_p (arg0, arg1, 0)
7506 && SCALAR_FLOAT_TYPE_P (type))
7507 return fold_build2 (MULT_EXPR, type, arg0,
7508 build_real (type, dconst2));
7510 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7511 if (flag_unsafe_math_optimizations
7512 && TREE_CODE (arg1) == PLUS_EXPR
7513 && TREE_CODE (arg0) != MULT_EXPR)
7515 tree tree10 = TREE_OPERAND (arg1, 0);
7516 tree tree11 = TREE_OPERAND (arg1, 1);
7517 if (TREE_CODE (tree11) == MULT_EXPR
7518 && TREE_CODE (tree10) == MULT_EXPR)
7520 tree tree0;
7521 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7522 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7525 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7526 if (flag_unsafe_math_optimizations
7527 && TREE_CODE (arg0) == PLUS_EXPR
7528 && TREE_CODE (arg1) != MULT_EXPR)
7530 tree tree00 = TREE_OPERAND (arg0, 0);
7531 tree tree01 = TREE_OPERAND (arg0, 1);
7532 if (TREE_CODE (tree01) == MULT_EXPR
7533 && TREE_CODE (tree00) == MULT_EXPR)
7535 tree tree0;
7536 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7537 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7542 bit_rotate:
7543 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7544 is a rotate of A by C1 bits. */
7545 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7546 is a rotate of A by B bits. */
7548 enum tree_code code0, code1;
7549 code0 = TREE_CODE (arg0);
7550 code1 = TREE_CODE (arg1);
7551 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7552 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7553 && operand_equal_p (TREE_OPERAND (arg0, 0),
7554 TREE_OPERAND (arg1, 0), 0)
7555 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7557 tree tree01, tree11;
7558 enum tree_code code01, code11;
7560 tree01 = TREE_OPERAND (arg0, 1);
7561 tree11 = TREE_OPERAND (arg1, 1);
7562 STRIP_NOPS (tree01);
7563 STRIP_NOPS (tree11);
7564 code01 = TREE_CODE (tree01);
7565 code11 = TREE_CODE (tree11);
7566 if (code01 == INTEGER_CST
7567 && code11 == INTEGER_CST
7568 && TREE_INT_CST_HIGH (tree01) == 0
7569 && TREE_INT_CST_HIGH (tree11) == 0
7570 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7571 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7572 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7573 code0 == LSHIFT_EXPR ? tree01 : tree11);
7574 else if (code11 == MINUS_EXPR)
7576 tree tree110, tree111;
7577 tree110 = TREE_OPERAND (tree11, 0);
7578 tree111 = TREE_OPERAND (tree11, 1);
7579 STRIP_NOPS (tree110);
7580 STRIP_NOPS (tree111);
7581 if (TREE_CODE (tree110) == INTEGER_CST
7582 && 0 == compare_tree_int (tree110,
7583 TYPE_PRECISION
7584 (TREE_TYPE (TREE_OPERAND
7585 (arg0, 0))))
7586 && operand_equal_p (tree01, tree111, 0))
7587 return build2 ((code0 == LSHIFT_EXPR
7588 ? LROTATE_EXPR
7589 : RROTATE_EXPR),
7590 type, TREE_OPERAND (arg0, 0), tree01);
7592 else if (code01 == MINUS_EXPR)
7594 tree tree010, tree011;
7595 tree010 = TREE_OPERAND (tree01, 0);
7596 tree011 = TREE_OPERAND (tree01, 1);
7597 STRIP_NOPS (tree010);
7598 STRIP_NOPS (tree011);
7599 if (TREE_CODE (tree010) == INTEGER_CST
7600 && 0 == compare_tree_int (tree010,
7601 TYPE_PRECISION
7602 (TREE_TYPE (TREE_OPERAND
7603 (arg0, 0))))
7604 && operand_equal_p (tree11, tree011, 0))
7605 return build2 ((code0 != LSHIFT_EXPR
7606 ? LROTATE_EXPR
7607 : RROTATE_EXPR),
7608 type, TREE_OPERAND (arg0, 0), tree11);
7613 associate:
7614 /* In most languages, can't associate operations on floats through
7615 parentheses. Rather than remember where the parentheses were, we
7616 don't associate floats at all, unless the user has specified
7617 -funsafe-math-optimizations. */
7619 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7621 tree var0, con0, lit0, minus_lit0;
7622 tree var1, con1, lit1, minus_lit1;
7624 /* Split both trees into variables, constants, and literals. Then
7625 associate each group together, the constants with literals,
7626 then the result with variables. This increases the chances of
7627 literals being recombined later and of generating relocatable
7628 expressions for the sum of a constant and literal. */
7629 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7630 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7631 code == MINUS_EXPR);
7633 /* Only do something if we found more than two objects. Otherwise,
7634 nothing has changed and we risk infinite recursion. */
7635 if (2 < ((var0 != 0) + (var1 != 0)
7636 + (con0 != 0) + (con1 != 0)
7637 + (lit0 != 0) + (lit1 != 0)
7638 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7640 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7641 if (code == MINUS_EXPR)
7642 code = PLUS_EXPR;
7644 var0 = associate_trees (var0, var1, code, type);
7645 con0 = associate_trees (con0, con1, code, type);
7646 lit0 = associate_trees (lit0, lit1, code, type);
7647 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7649 /* Preserve the MINUS_EXPR if the negative part of the literal is
7650 greater than the positive part. Otherwise, the multiplicative
7651 folding code (i.e extract_muldiv) may be fooled in case
7652 unsigned constants are subtracted, like in the following
7653 example: ((X*2 + 4) - 8U)/2. */
7654 if (minus_lit0 && lit0)
7656 if (TREE_CODE (lit0) == INTEGER_CST
7657 && TREE_CODE (minus_lit0) == INTEGER_CST
7658 && tree_int_cst_lt (lit0, minus_lit0))
7660 minus_lit0 = associate_trees (minus_lit0, lit0,
7661 MINUS_EXPR, type);
7662 lit0 = 0;
7664 else
7666 lit0 = associate_trees (lit0, minus_lit0,
7667 MINUS_EXPR, type);
7668 minus_lit0 = 0;
7671 if (minus_lit0)
7673 if (con0 == 0)
7674 return fold_convert (type,
7675 associate_trees (var0, minus_lit0,
7676 MINUS_EXPR, type));
7677 else
7679 con0 = associate_trees (con0, minus_lit0,
7680 MINUS_EXPR, type);
7681 return fold_convert (type,
7682 associate_trees (var0, con0,
7683 PLUS_EXPR, type));
7687 con0 = associate_trees (con0, lit0, code, type);
7688 return fold_convert (type, associate_trees (var0, con0,
7689 code, type));
7693 return NULL_TREE;
7695 case MINUS_EXPR:
7696 /* A - (-B) -> A + B */
7697 if (TREE_CODE (arg1) == NEGATE_EXPR)
7698 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7699 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7700 if (TREE_CODE (arg0) == NEGATE_EXPR
7701 && (FLOAT_TYPE_P (type)
7702 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7703 && negate_expr_p (arg1)
7704 && reorder_operands_p (arg0, arg1))
7705 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7706 TREE_OPERAND (arg0, 0));
7707 /* Convert -A - 1 to ~A. */
7708 if (INTEGRAL_TYPE_P (type)
7709 && TREE_CODE (arg0) == NEGATE_EXPR
7710 && integer_onep (arg1))
7711 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7713 /* Convert -1 - A to ~A. */
7714 if (INTEGRAL_TYPE_P (type)
7715 && integer_all_onesp (arg0))
7716 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7718 if (! FLOAT_TYPE_P (type))
7720 if (integer_zerop (arg0))
7721 return negate_expr (fold_convert (type, arg1));
7722 if (integer_zerop (arg1))
7723 return non_lvalue (fold_convert (type, arg0));
7725 /* Fold A - (A & B) into ~B & A. */
7726 if (!TREE_SIDE_EFFECTS (arg0)
7727 && TREE_CODE (arg1) == BIT_AND_EXPR)
7729 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7730 return fold_build2 (BIT_AND_EXPR, type,
7731 fold_build1 (BIT_NOT_EXPR, type,
7732 TREE_OPERAND (arg1, 0)),
7733 arg0);
7734 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7735 return fold_build2 (BIT_AND_EXPR, type,
7736 fold_build1 (BIT_NOT_EXPR, type,
7737 TREE_OPERAND (arg1, 1)),
7738 arg0);
7741 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7742 any power of 2 minus 1. */
7743 if (TREE_CODE (arg0) == BIT_AND_EXPR
7744 && TREE_CODE (arg1) == BIT_AND_EXPR
7745 && operand_equal_p (TREE_OPERAND (arg0, 0),
7746 TREE_OPERAND (arg1, 0), 0))
7748 tree mask0 = TREE_OPERAND (arg0, 1);
7749 tree mask1 = TREE_OPERAND (arg1, 1);
7750 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7752 if (operand_equal_p (tem, mask1, 0))
7754 tem = fold_build2 (BIT_XOR_EXPR, type,
7755 TREE_OPERAND (arg0, 0), mask1);
7756 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7761 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7762 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7763 return non_lvalue (fold_convert (type, arg0));
7765 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7766 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7767 (-ARG1 + ARG0) reduces to -ARG1. */
7768 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7769 return negate_expr (fold_convert (type, arg1));
7771 /* Fold &x - &x. This can happen from &x.foo - &x.
7772 This is unsafe for certain floats even in non-IEEE formats.
7773 In IEEE, it is unsafe because it does wrong for NaNs.
7774 Also note that operand_equal_p is always false if an operand
7775 is volatile. */
7777 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7778 && operand_equal_p (arg0, arg1, 0))
7779 return fold_convert (type, integer_zero_node);
7781 /* A - B -> A + (-B) if B is easily negatable. */
7782 if (negate_expr_p (arg1)
7783 && ((FLOAT_TYPE_P (type)
7784 /* Avoid this transformation if B is a positive REAL_CST. */
7785 && (TREE_CODE (arg1) != REAL_CST
7786 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7787 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7788 return fold_build2 (PLUS_EXPR, type,
7789 fold_convert (type, arg0),
7790 fold_convert (type, negate_expr (arg1)));
7792 /* Try folding difference of addresses. */
7794 HOST_WIDE_INT diff;
7796 if ((TREE_CODE (arg0) == ADDR_EXPR
7797 || TREE_CODE (arg1) == ADDR_EXPR)
7798 && ptr_difference_const (arg0, arg1, &diff))
7799 return build_int_cst_type (type, diff);
7802 /* Fold &a[i] - &a[j] to i-j. */
7803 if (TREE_CODE (arg0) == ADDR_EXPR
7804 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7805 && TREE_CODE (arg1) == ADDR_EXPR
7806 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7808 tree aref0 = TREE_OPERAND (arg0, 0);
7809 tree aref1 = TREE_OPERAND (arg1, 0);
7810 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7811 TREE_OPERAND (aref1, 0), 0))
7813 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7814 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7815 tree esz = array_ref_element_size (aref0);
7816 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7817 return fold_build2 (MULT_EXPR, type, diff,
7818 fold_convert (type, esz));
7823 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7824 of the array. Loop optimizer sometimes produce this type of
7825 expressions. */
7826 if (TREE_CODE (arg0) == ADDR_EXPR)
7828 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7829 if (tem)
7830 return fold_convert (type, tem);
7833 if (flag_unsafe_math_optimizations
7834 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7835 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7836 && (tem = distribute_real_division (code, type, arg0, arg1)))
7837 return tem;
7839 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
7840 same or one. */
7841 if ((TREE_CODE (arg0) == MULT_EXPR
7842 || TREE_CODE (arg1) == MULT_EXPR)
7843 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7845 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
7846 if (tem)
7847 return tem;
7850 goto associate;
7852 case MULT_EXPR:
7853 /* (-A) * (-B) -> A * B */
7854 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7855 return fold_build2 (MULT_EXPR, type,
7856 TREE_OPERAND (arg0, 0),
7857 negate_expr (arg1));
7858 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7859 return fold_build2 (MULT_EXPR, type,
7860 negate_expr (arg0),
7861 TREE_OPERAND (arg1, 0));
7863 if (! FLOAT_TYPE_P (type))
7865 if (integer_zerop (arg1))
7866 return omit_one_operand (type, arg1, arg0);
7867 if (integer_onep (arg1))
7868 return non_lvalue (fold_convert (type, arg0));
7869 /* Transform x * -1 into -x. */
7870 if (integer_all_onesp (arg1))
7871 return fold_convert (type, negate_expr (arg0));
7873 /* (a * (1 << b)) is (a << b) */
7874 if (TREE_CODE (arg1) == LSHIFT_EXPR
7875 && integer_onep (TREE_OPERAND (arg1, 0)))
7876 return fold_build2 (LSHIFT_EXPR, type, arg0,
7877 TREE_OPERAND (arg1, 1));
7878 if (TREE_CODE (arg0) == LSHIFT_EXPR
7879 && integer_onep (TREE_OPERAND (arg0, 0)))
7880 return fold_build2 (LSHIFT_EXPR, type, arg1,
7881 TREE_OPERAND (arg0, 1));
7883 if (TREE_CODE (arg1) == INTEGER_CST
7884 && 0 != (tem = extract_muldiv (op0,
7885 fold_convert (type, arg1),
7886 code, NULL_TREE)))
7887 return fold_convert (type, tem);
7890 else
7892 /* Maybe fold x * 0 to 0. The expressions aren't the same
7893 when x is NaN, since x * 0 is also NaN. Nor are they the
7894 same in modes with signed zeros, since multiplying a
7895 negative value by 0 gives -0, not +0. */
7896 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7897 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7898 && real_zerop (arg1))
7899 return omit_one_operand (type, arg1, arg0);
7900 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7901 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7902 && real_onep (arg1))
7903 return non_lvalue (fold_convert (type, arg0));
7905 /* Transform x * -1.0 into -x. */
7906 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7907 && real_minus_onep (arg1))
7908 return fold_convert (type, negate_expr (arg0));
7910 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7911 if (flag_unsafe_math_optimizations
7912 && TREE_CODE (arg0) == RDIV_EXPR
7913 && TREE_CODE (arg1) == REAL_CST
7914 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7916 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7917 arg1, 0);
7918 if (tem)
7919 return fold_build2 (RDIV_EXPR, type, tem,
7920 TREE_OPERAND (arg0, 1));
7923 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7924 if (operand_equal_p (arg0, arg1, 0))
7926 tree tem = fold_strip_sign_ops (arg0);
7927 if (tem != NULL_TREE)
7929 tem = fold_convert (type, tem);
7930 return fold_build2 (MULT_EXPR, type, tem, tem);
7934 if (flag_unsafe_math_optimizations)
7936 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7937 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7939 /* Optimizations of root(...)*root(...). */
7940 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7942 tree rootfn, arg, arglist;
7943 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7944 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7946 /* Optimize sqrt(x)*sqrt(x) as x. */
7947 if (BUILTIN_SQRT_P (fcode0)
7948 && operand_equal_p (arg00, arg10, 0)
7949 && ! HONOR_SNANS (TYPE_MODE (type)))
7950 return arg00;
7952 /* Optimize root(x)*root(y) as root(x*y). */
7953 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7954 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7955 arglist = build_tree_list (NULL_TREE, arg);
7956 return build_function_call_expr (rootfn, arglist);
7959 /* Optimize expN(x)*expN(y) as expN(x+y). */
7960 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7962 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7963 tree arg = fold_build2 (PLUS_EXPR, type,
7964 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7965 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7966 tree arglist = build_tree_list (NULL_TREE, arg);
7967 return build_function_call_expr (expfn, arglist);
7970 /* Optimizations of pow(...)*pow(...). */
7971 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7972 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7973 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7975 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7976 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7977 1)));
7978 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7979 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7980 1)));
7982 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7983 if (operand_equal_p (arg01, arg11, 0))
7985 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7986 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7987 tree arglist = tree_cons (NULL_TREE, arg,
7988 build_tree_list (NULL_TREE,
7989 arg01));
7990 return build_function_call_expr (powfn, arglist);
7993 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7994 if (operand_equal_p (arg00, arg10, 0))
7996 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7997 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7998 tree arglist = tree_cons (NULL_TREE, arg00,
7999 build_tree_list (NULL_TREE,
8000 arg));
8001 return build_function_call_expr (powfn, arglist);
8005 /* Optimize tan(x)*cos(x) as sin(x). */
8006 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
8007 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
8008 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
8009 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
8010 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
8011 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
8012 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8013 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8015 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
8017 if (sinfn != NULL_TREE)
8018 return build_function_call_expr (sinfn,
8019 TREE_OPERAND (arg0, 1));
8022 /* Optimize x*pow(x,c) as pow(x,c+1). */
8023 if (fcode1 == BUILT_IN_POW
8024 || fcode1 == BUILT_IN_POWF
8025 || fcode1 == BUILT_IN_POWL)
8027 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8028 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8029 1)));
8030 if (TREE_CODE (arg11) == REAL_CST
8031 && ! TREE_CONSTANT_OVERFLOW (arg11)
8032 && operand_equal_p (arg0, arg10, 0))
8034 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8035 REAL_VALUE_TYPE c;
8036 tree arg, arglist;
8038 c = TREE_REAL_CST (arg11);
8039 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8040 arg = build_real (type, c);
8041 arglist = build_tree_list (NULL_TREE, arg);
8042 arglist = tree_cons (NULL_TREE, arg0, arglist);
8043 return build_function_call_expr (powfn, arglist);
8047 /* Optimize pow(x,c)*x as pow(x,c+1). */
8048 if (fcode0 == BUILT_IN_POW
8049 || fcode0 == BUILT_IN_POWF
8050 || fcode0 == BUILT_IN_POWL)
8052 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8053 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8054 1)));
8055 if (TREE_CODE (arg01) == REAL_CST
8056 && ! TREE_CONSTANT_OVERFLOW (arg01)
8057 && operand_equal_p (arg1, arg00, 0))
8059 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8060 REAL_VALUE_TYPE c;
8061 tree arg, arglist;
8063 c = TREE_REAL_CST (arg01);
8064 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8065 arg = build_real (type, c);
8066 arglist = build_tree_list (NULL_TREE, arg);
8067 arglist = tree_cons (NULL_TREE, arg1, arglist);
8068 return build_function_call_expr (powfn, arglist);
8072 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8073 if (! optimize_size
8074 && operand_equal_p (arg0, arg1, 0))
8076 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8078 if (powfn)
8080 tree arg = build_real (type, dconst2);
8081 tree arglist = build_tree_list (NULL_TREE, arg);
8082 arglist = tree_cons (NULL_TREE, arg0, arglist);
8083 return build_function_call_expr (powfn, arglist);
8088 goto associate;
8090 case BIT_IOR_EXPR:
8091 bit_ior:
8092 if (integer_all_onesp (arg1))
8093 return omit_one_operand (type, arg1, arg0);
8094 if (integer_zerop (arg1))
8095 return non_lvalue (fold_convert (type, arg0));
8096 if (operand_equal_p (arg0, arg1, 0))
8097 return non_lvalue (fold_convert (type, arg0));
8099 /* ~X | X is -1. */
8100 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8101 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8103 t1 = build_int_cst (type, -1);
8104 t1 = force_fit_type (t1, 0, false, false);
8105 return omit_one_operand (type, t1, arg1);
8108 /* X | ~X is -1. */
8109 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8110 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8112 t1 = build_int_cst (type, -1);
8113 t1 = force_fit_type (t1, 0, false, false);
8114 return omit_one_operand (type, t1, arg0);
8117 /* Canonicalize (X & C1) | C2. */
8118 if (TREE_CODE (arg0) == BIT_AND_EXPR
8119 && TREE_CODE (arg1) == INTEGER_CST
8120 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8122 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
8123 int width = TYPE_PRECISION (type);
8124 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
8125 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8126 hi2 = TREE_INT_CST_HIGH (arg1);
8127 lo2 = TREE_INT_CST_LOW (arg1);
8129 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
8130 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
8131 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
8133 if (width > HOST_BITS_PER_WIDE_INT)
8135 mhi = (unsigned HOST_WIDE_INT) -1
8136 >> (2 * HOST_BITS_PER_WIDE_INT - width);
8137 mlo = -1;
8139 else
8141 mhi = 0;
8142 mlo = (unsigned HOST_WIDE_INT) -1
8143 >> (HOST_BITS_PER_WIDE_INT - width);
8146 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
8147 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
8148 return fold_build2 (BIT_IOR_EXPR, type,
8149 TREE_OPERAND (arg0, 0), arg1);
8151 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
8152 hi1 &= mhi;
8153 lo1 &= mlo;
8154 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
8155 return fold_build2 (BIT_IOR_EXPR, type,
8156 fold_build2 (BIT_AND_EXPR, type,
8157 TREE_OPERAND (arg0, 0),
8158 build_int_cst_wide (type,
8159 lo1 & ~lo2,
8160 hi1 & ~hi2)),
8161 arg1);
8164 /* (X & Y) | Y is (X, Y). */
8165 if (TREE_CODE (arg0) == BIT_AND_EXPR
8166 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8167 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
8168 /* (X & Y) | X is (Y, X). */
8169 if (TREE_CODE (arg0) == BIT_AND_EXPR
8170 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8171 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
8172 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
8173 /* X | (X & Y) is (Y, X). */
8174 if (TREE_CODE (arg1) == BIT_AND_EXPR
8175 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
8176 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
8177 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
8178 /* X | (Y & X) is (Y, X). */
8179 if (TREE_CODE (arg1) == BIT_AND_EXPR
8180 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
8181 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8182 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
8184 t1 = distribute_bit_expr (code, type, arg0, arg1);
8185 if (t1 != NULL_TREE)
8186 return t1;
8188 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8190 This results in more efficient code for machines without a NAND
8191 instruction. Combine will canonicalize to the first form
8192 which will allow use of NAND instructions provided by the
8193 backend if they exist. */
8194 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8195 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8197 return fold_build1 (BIT_NOT_EXPR, type,
8198 build2 (BIT_AND_EXPR, type,
8199 TREE_OPERAND (arg0, 0),
8200 TREE_OPERAND (arg1, 0)));
8203 /* See if this can be simplified into a rotate first. If that
8204 is unsuccessful continue in the association code. */
8205 goto bit_rotate;
8207 case BIT_XOR_EXPR:
8208 if (integer_zerop (arg1))
8209 return non_lvalue (fold_convert (type, arg0));
8210 if (integer_all_onesp (arg1))
8211 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8212 if (operand_equal_p (arg0, arg1, 0))
8213 return omit_one_operand (type, integer_zero_node, arg0);
8215 /* ~X ^ X is -1. */
8216 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8217 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8219 t1 = build_int_cst (type, -1);
8220 t1 = force_fit_type (t1, 0, false, false);
8221 return omit_one_operand (type, t1, arg1);
8224 /* X ^ ~X is -1. */
8225 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8226 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8228 t1 = build_int_cst (type, -1);
8229 t1 = force_fit_type (t1, 0, false, false);
8230 return omit_one_operand (type, t1, arg0);
8233 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8234 with a constant, and the two constants have no bits in common,
8235 we should treat this as a BIT_IOR_EXPR since this may produce more
8236 simplifications. */
8237 if (TREE_CODE (arg0) == BIT_AND_EXPR
8238 && TREE_CODE (arg1) == BIT_AND_EXPR
8239 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8240 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8241 && integer_zerop (const_binop (BIT_AND_EXPR,
8242 TREE_OPERAND (arg0, 1),
8243 TREE_OPERAND (arg1, 1), 0)))
8245 code = BIT_IOR_EXPR;
8246 goto bit_ior;
8249 /* (X | Y) ^ X -> Y & ~ X*/
8250 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8251 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8253 tree t2 = TREE_OPERAND (arg0, 1);
8254 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8255 arg1);
8256 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8257 fold_convert (type, t1));
8258 return t1;
8261 /* (Y | X) ^ X -> Y & ~ X*/
8262 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8263 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8265 tree t2 = TREE_OPERAND (arg0, 0);
8266 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8267 arg1);
8268 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8269 fold_convert (type, t1));
8270 return t1;
8273 /* X ^ (X | Y) -> Y & ~ X*/
8274 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8275 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8277 tree t2 = TREE_OPERAND (arg1, 1);
8278 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8279 arg0);
8280 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8281 fold_convert (type, t1));
8282 return t1;
8285 /* X ^ (Y | X) -> Y & ~ X*/
8286 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8287 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8289 tree t2 = TREE_OPERAND (arg1, 0);
8290 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8291 arg0);
8292 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8293 fold_convert (type, t1));
8294 return t1;
8297 /* Convert ~X ^ ~Y to X ^ Y. */
8298 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8299 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8300 return fold_build2 (code, type,
8301 fold_convert (type, TREE_OPERAND (arg0, 0)),
8302 fold_convert (type, TREE_OPERAND (arg1, 0)));
8304 /* See if this can be simplified into a rotate first. If that
8305 is unsuccessful continue in the association code. */
8306 goto bit_rotate;
8308 case BIT_AND_EXPR:
8309 if (integer_all_onesp (arg1))
8310 return non_lvalue (fold_convert (type, arg0));
8311 if (integer_zerop (arg1))
8312 return omit_one_operand (type, arg1, arg0);
8313 if (operand_equal_p (arg0, arg1, 0))
8314 return non_lvalue (fold_convert (type, arg0));
8316 /* ~X & X is always zero. */
8317 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8318 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8319 return omit_one_operand (type, integer_zero_node, arg1);
8321 /* X & ~X is always zero. */
8322 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8323 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8324 return omit_one_operand (type, integer_zero_node, arg0);
8326 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
8327 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8328 && TREE_CODE (arg1) == INTEGER_CST
8329 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8330 return fold_build2 (BIT_IOR_EXPR, type,
8331 fold_build2 (BIT_AND_EXPR, type,
8332 TREE_OPERAND (arg0, 0), arg1),
8333 fold_build2 (BIT_AND_EXPR, type,
8334 TREE_OPERAND (arg0, 1), arg1));
8336 /* (X | Y) & Y is (X, Y). */
8337 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8338 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8339 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
8340 /* (X | Y) & X is (Y, X). */
8341 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8342 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8343 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
8344 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
8345 /* X & (X | Y) is (Y, X). */
8346 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8347 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
8348 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
8349 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
8350 /* X & (Y | X) is (Y, X). */
8351 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8352 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
8353 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8354 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
8356 t1 = distribute_bit_expr (code, type, arg0, arg1);
8357 if (t1 != NULL_TREE)
8358 return t1;
8359 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8360 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8361 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8363 unsigned int prec
8364 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8366 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8367 && (~TREE_INT_CST_LOW (arg1)
8368 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8369 return fold_convert (type, TREE_OPERAND (arg0, 0));
8372 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8374 This results in more efficient code for machines without a NOR
8375 instruction. Combine will canonicalize to the first form
8376 which will allow use of NOR instructions provided by the
8377 backend if they exist. */
8378 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8379 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8381 return fold_build1 (BIT_NOT_EXPR, type,
8382 build2 (BIT_IOR_EXPR, type,
8383 TREE_OPERAND (arg0, 0),
8384 TREE_OPERAND (arg1, 0)));
8387 goto associate;
8389 case RDIV_EXPR:
8390 /* Don't touch a floating-point divide by zero unless the mode
8391 of the constant can represent infinity. */
8392 if (TREE_CODE (arg1) == REAL_CST
8393 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8394 && real_zerop (arg1))
8395 return NULL_TREE;
8397 /* Optimize A / A to 1.0 if we don't care about
8398 NaNs or Infinities. */
8399 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8400 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
8401 && operand_equal_p (arg0, arg1, 0))
8403 tree r = build_real (TREE_TYPE (arg0), dconst1);
8405 return omit_two_operands (type, r, arg0, arg1);
8408 /* (-A) / (-B) -> A / B */
8409 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8410 return fold_build2 (RDIV_EXPR, type,
8411 TREE_OPERAND (arg0, 0),
8412 negate_expr (arg1));
8413 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8414 return fold_build2 (RDIV_EXPR, type,
8415 negate_expr (arg0),
8416 TREE_OPERAND (arg1, 0));
8418 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8419 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8420 && real_onep (arg1))
8421 return non_lvalue (fold_convert (type, arg0));
8423 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8424 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8425 && real_minus_onep (arg1))
8426 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8428 /* If ARG1 is a constant, we can convert this to a multiply by the
8429 reciprocal. This does not have the same rounding properties,
8430 so only do this if -funsafe-math-optimizations. We can actually
8431 always safely do it if ARG1 is a power of two, but it's hard to
8432 tell if it is or not in a portable manner. */
8433 if (TREE_CODE (arg1) == REAL_CST)
8435 if (flag_unsafe_math_optimizations
8436 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8437 arg1, 0)))
8438 return fold_build2 (MULT_EXPR, type, arg0, tem);
8439 /* Find the reciprocal if optimizing and the result is exact. */
8440 if (optimize)
8442 REAL_VALUE_TYPE r;
8443 r = TREE_REAL_CST (arg1);
8444 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8446 tem = build_real (type, r);
8447 return fold_build2 (MULT_EXPR, type,
8448 fold_convert (type, arg0), tem);
8452 /* Convert A/B/C to A/(B*C). */
8453 if (flag_unsafe_math_optimizations
8454 && TREE_CODE (arg0) == RDIV_EXPR)
8455 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8456 fold_build2 (MULT_EXPR, type,
8457 TREE_OPERAND (arg0, 1), arg1));
8459 /* Convert A/(B/C) to (A/B)*C. */
8460 if (flag_unsafe_math_optimizations
8461 && TREE_CODE (arg1) == RDIV_EXPR)
8462 return fold_build2 (MULT_EXPR, type,
8463 fold_build2 (RDIV_EXPR, type, arg0,
8464 TREE_OPERAND (arg1, 0)),
8465 TREE_OPERAND (arg1, 1));
8467 /* Convert C1/(X*C2) into (C1/C2)/X. */
8468 if (flag_unsafe_math_optimizations
8469 && TREE_CODE (arg1) == MULT_EXPR
8470 && TREE_CODE (arg0) == REAL_CST
8471 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8473 tree tem = const_binop (RDIV_EXPR, arg0,
8474 TREE_OPERAND (arg1, 1), 0);
8475 if (tem)
8476 return fold_build2 (RDIV_EXPR, type, tem,
8477 TREE_OPERAND (arg1, 0));
8480 if (flag_unsafe_math_optimizations)
8482 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8483 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8485 /* Optimize sin(x)/cos(x) as tan(x). */
8486 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8487 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8488 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8489 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8490 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8492 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8494 if (tanfn != NULL_TREE)
8495 return build_function_call_expr (tanfn,
8496 TREE_OPERAND (arg0, 1));
8499 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8500 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8501 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8502 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8503 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8504 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8506 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8508 if (tanfn != NULL_TREE)
8510 tree tmp = TREE_OPERAND (arg0, 1);
8511 tmp = build_function_call_expr (tanfn, tmp);
8512 return fold_build2 (RDIV_EXPR, type,
8513 build_real (type, dconst1), tmp);
8517 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
8518 NaNs or Infinities. */
8519 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
8520 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
8521 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
8523 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8524 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8526 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
8527 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
8528 && operand_equal_p (arg00, arg01, 0))
8530 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
8532 if (cosfn != NULL_TREE)
8533 return build_function_call_expr (cosfn,
8534 TREE_OPERAND (arg0, 1));
8538 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
8539 NaNs or Infinities. */
8540 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
8541 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
8542 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
8544 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8545 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8547 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
8548 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
8549 && operand_equal_p (arg00, arg01, 0))
8551 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
8553 if (cosfn != NULL_TREE)
8555 tree tmp = TREE_OPERAND (arg0, 1);
8556 tmp = build_function_call_expr (cosfn, tmp);
8557 return fold_build2 (RDIV_EXPR, type,
8558 build_real (type, dconst1),
8559 tmp);
8564 /* Optimize pow(x,c)/x as pow(x,c-1). */
8565 if (fcode0 == BUILT_IN_POW
8566 || fcode0 == BUILT_IN_POWF
8567 || fcode0 == BUILT_IN_POWL)
8569 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8570 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8571 if (TREE_CODE (arg01) == REAL_CST
8572 && ! TREE_CONSTANT_OVERFLOW (arg01)
8573 && operand_equal_p (arg1, arg00, 0))
8575 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8576 REAL_VALUE_TYPE c;
8577 tree arg, arglist;
8579 c = TREE_REAL_CST (arg01);
8580 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8581 arg = build_real (type, c);
8582 arglist = build_tree_list (NULL_TREE, arg);
8583 arglist = tree_cons (NULL_TREE, arg1, arglist);
8584 return build_function_call_expr (powfn, arglist);
8588 /* Optimize x/expN(y) into x*expN(-y). */
8589 if (BUILTIN_EXPONENT_P (fcode1))
8591 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8592 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8593 tree arglist = build_tree_list (NULL_TREE,
8594 fold_convert (type, arg));
8595 arg1 = build_function_call_expr (expfn, arglist);
8596 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8599 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8600 if (fcode1 == BUILT_IN_POW
8601 || fcode1 == BUILT_IN_POWF
8602 || fcode1 == BUILT_IN_POWL)
8604 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8605 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8606 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8607 tree neg11 = fold_convert (type, negate_expr (arg11));
8608 tree arglist = tree_cons(NULL_TREE, arg10,
8609 build_tree_list (NULL_TREE, neg11));
8610 arg1 = build_function_call_expr (powfn, arglist);
8611 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8614 return NULL_TREE;
8616 case TRUNC_DIV_EXPR:
8617 case ROUND_DIV_EXPR:
8618 case FLOOR_DIV_EXPR:
8619 case CEIL_DIV_EXPR:
8620 case EXACT_DIV_EXPR:
8621 if (integer_onep (arg1))
8622 return non_lvalue (fold_convert (type, arg0));
8623 if (integer_zerop (arg1))
8624 return NULL_TREE;
8625 /* X / -1 is -X. */
8626 if (!TYPE_UNSIGNED (type)
8627 && TREE_CODE (arg1) == INTEGER_CST
8628 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8629 && TREE_INT_CST_HIGH (arg1) == -1)
8630 return fold_convert (type, negate_expr (arg0));
8632 /* Convert -A / -B to A / B when the type is signed and overflow is
8633 undefined. */
8634 if (!TYPE_UNSIGNED (type) && !flag_wrapv
8635 && TREE_CODE (arg0) == NEGATE_EXPR
8636 && negate_expr_p (arg1))
8637 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8638 negate_expr (arg1));
8639 if (!TYPE_UNSIGNED (type) && !flag_wrapv
8640 && TREE_CODE (arg1) == NEGATE_EXPR
8641 && negate_expr_p (arg0))
8642 return fold_build2 (code, type, negate_expr (arg0),
8643 TREE_OPERAND (arg1, 0));
8645 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8646 operation, EXACT_DIV_EXPR.
8648 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8649 At one time others generated faster code, it's not clear if they do
8650 after the last round to changes to the DIV code in expmed.c. */
8651 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8652 && multiple_of_p (type, arg0, arg1))
8653 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8655 if (TREE_CODE (arg1) == INTEGER_CST
8656 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8657 return fold_convert (type, tem);
8659 return NULL_TREE;
8661 case CEIL_MOD_EXPR:
8662 case FLOOR_MOD_EXPR:
8663 case ROUND_MOD_EXPR:
8664 case TRUNC_MOD_EXPR:
8665 /* X % 1 is always zero, but be sure to preserve any side
8666 effects in X. */
8667 if (integer_onep (arg1))
8668 return omit_one_operand (type, integer_zero_node, arg0);
8670 /* X % 0, return X % 0 unchanged so that we can get the
8671 proper warnings and errors. */
8672 if (integer_zerop (arg1))
8673 return NULL_TREE;
8675 /* 0 % X is always zero, but be sure to preserve any side
8676 effects in X. Place this after checking for X == 0. */
8677 if (integer_zerop (arg0))
8678 return omit_one_operand (type, integer_zero_node, arg1);
8680 /* X % -1 is zero. */
8681 if (!TYPE_UNSIGNED (type)
8682 && TREE_CODE (arg1) == INTEGER_CST
8683 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8684 && TREE_INT_CST_HIGH (arg1) == -1)
8685 return omit_one_operand (type, integer_zero_node, arg0);
8687 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8688 i.e. "X % C" into "X & C2", if X and C are positive. */
8689 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8690 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8691 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8693 unsigned HOST_WIDE_INT high, low;
8694 tree mask;
8695 int l;
8697 l = tree_log2 (arg1);
8698 if (l >= HOST_BITS_PER_WIDE_INT)
8700 high = ((unsigned HOST_WIDE_INT) 1
8701 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8702 low = -1;
8704 else
8706 high = 0;
8707 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8710 mask = build_int_cst_wide (type, low, high);
8711 return fold_build2 (BIT_AND_EXPR, type,
8712 fold_convert (type, arg0), mask);
8715 /* X % -C is the same as X % C. */
8716 if (code == TRUNC_MOD_EXPR
8717 && !TYPE_UNSIGNED (type)
8718 && TREE_CODE (arg1) == INTEGER_CST
8719 && !TREE_CONSTANT_OVERFLOW (arg1)
8720 && TREE_INT_CST_HIGH (arg1) < 0
8721 && !flag_trapv
8722 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8723 && !sign_bit_p (arg1, arg1))
8724 return fold_build2 (code, type, fold_convert (type, arg0),
8725 fold_convert (type, negate_expr (arg1)));
8727 /* X % -Y is the same as X % Y. */
8728 if (code == TRUNC_MOD_EXPR
8729 && !TYPE_UNSIGNED (type)
8730 && TREE_CODE (arg1) == NEGATE_EXPR
8731 && !flag_trapv)
8732 return fold_build2 (code, type, fold_convert (type, arg0),
8733 fold_convert (type, TREE_OPERAND (arg1, 0)));
8735 if (TREE_CODE (arg1) == INTEGER_CST
8736 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8737 return fold_convert (type, tem);
8739 return NULL_TREE;
8741 case LROTATE_EXPR:
8742 case RROTATE_EXPR:
8743 if (integer_all_onesp (arg0))
8744 return omit_one_operand (type, arg0, arg1);
8745 goto shift;
8747 case RSHIFT_EXPR:
8748 /* Optimize -1 >> x for arithmetic right shifts. */
8749 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8750 return omit_one_operand (type, arg0, arg1);
8751 /* ... fall through ... */
8753 case LSHIFT_EXPR:
8754 shift:
8755 if (integer_zerop (arg1))
8756 return non_lvalue (fold_convert (type, arg0));
8757 if (integer_zerop (arg0))
8758 return omit_one_operand (type, arg0, arg1);
8760 /* Since negative shift count is not well-defined,
8761 don't try to compute it in the compiler. */
8762 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8763 return NULL_TREE;
8765 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8766 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
8767 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8768 && host_integerp (TREE_OPERAND (arg0, 1), false)
8769 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8771 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8772 + TREE_INT_CST_LOW (arg1));
8774 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8775 being well defined. */
8776 if (low >= TYPE_PRECISION (type))
8778 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8779 low = low % TYPE_PRECISION (type);
8780 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8781 return build_int_cst (type, 0);
8782 else
8783 low = TYPE_PRECISION (type) - 1;
8786 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8787 build_int_cst (type, low));
8790 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8791 into x & ((unsigned)-1 >> c) for unsigned types. */
8792 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8793 || (TYPE_UNSIGNED (type)
8794 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8795 && host_integerp (arg1, false)
8796 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8797 && host_integerp (TREE_OPERAND (arg0, 1), false)
8798 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8800 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8801 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8802 tree lshift;
8803 tree arg00;
8805 if (low0 == low1)
8807 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8809 lshift = build_int_cst (type, -1);
8810 lshift = int_const_binop (code, lshift, arg1, 0);
8812 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8816 /* Rewrite an LROTATE_EXPR by a constant into an
8817 RROTATE_EXPR by a new constant. */
8818 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8820 tree tem = build_int_cst (NULL_TREE,
8821 GET_MODE_BITSIZE (TYPE_MODE (type)));
8822 tem = fold_convert (TREE_TYPE (arg1), tem);
8823 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8824 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8827 /* If we have a rotate of a bit operation with the rotate count and
8828 the second operand of the bit operation both constant,
8829 permute the two operations. */
8830 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8831 && (TREE_CODE (arg0) == BIT_AND_EXPR
8832 || TREE_CODE (arg0) == BIT_IOR_EXPR
8833 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8834 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8835 return fold_build2 (TREE_CODE (arg0), type,
8836 fold_build2 (code, type,
8837 TREE_OPERAND (arg0, 0), arg1),
8838 fold_build2 (code, type,
8839 TREE_OPERAND (arg0, 1), arg1));
8841 /* Two consecutive rotates adding up to the width of the mode can
8842 be ignored. */
8843 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8844 && TREE_CODE (arg0) == RROTATE_EXPR
8845 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8846 && TREE_INT_CST_HIGH (arg1) == 0
8847 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8848 && ((TREE_INT_CST_LOW (arg1)
8849 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8850 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8851 return TREE_OPERAND (arg0, 0);
8853 return NULL_TREE;
8855 case MIN_EXPR:
8856 if (operand_equal_p (arg0, arg1, 0))
8857 return omit_one_operand (type, arg0, arg1);
8858 if (INTEGRAL_TYPE_P (type)
8859 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8860 return omit_one_operand (type, arg1, arg0);
8861 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
8862 if (tem)
8863 return tem;
8864 goto associate;
8866 case MAX_EXPR:
8867 if (operand_equal_p (arg0, arg1, 0))
8868 return omit_one_operand (type, arg0, arg1);
8869 if (INTEGRAL_TYPE_P (type)
8870 && TYPE_MAX_VALUE (type)
8871 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8872 return omit_one_operand (type, arg1, arg0);
8873 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
8874 if (tem)
8875 return tem;
8876 goto associate;
8878 case TRUTH_ANDIF_EXPR:
8879 /* Note that the operands of this must be ints
8880 and their values must be 0 or 1.
8881 ("true" is a fixed value perhaps depending on the language.) */
8882 /* If first arg is constant zero, return it. */
8883 if (integer_zerop (arg0))
8884 return fold_convert (type, arg0);
8885 case TRUTH_AND_EXPR:
8886 /* If either arg is constant true, drop it. */
8887 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8888 return non_lvalue (fold_convert (type, arg1));
8889 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8890 /* Preserve sequence points. */
8891 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8892 return non_lvalue (fold_convert (type, arg0));
8893 /* If second arg is constant zero, result is zero, but first arg
8894 must be evaluated. */
8895 if (integer_zerop (arg1))
8896 return omit_one_operand (type, arg1, arg0);
8897 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8898 case will be handled here. */
8899 if (integer_zerop (arg0))
8900 return omit_one_operand (type, arg0, arg1);
8902 /* !X && X is always false. */
8903 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8904 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8905 return omit_one_operand (type, integer_zero_node, arg1);
8906 /* X && !X is always false. */
8907 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8908 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8909 return omit_one_operand (type, integer_zero_node, arg0);
8911 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8912 means A >= Y && A != MAX, but in this case we know that
8913 A < X <= MAX. */
8915 if (!TREE_SIDE_EFFECTS (arg0)
8916 && !TREE_SIDE_EFFECTS (arg1))
8918 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8919 if (tem && !operand_equal_p (tem, arg0, 0))
8920 return fold_build2 (code, type, tem, arg1);
8922 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8923 if (tem && !operand_equal_p (tem, arg1, 0))
8924 return fold_build2 (code, type, arg0, tem);
8927 truth_andor:
8928 /* We only do these simplifications if we are optimizing. */
8929 if (!optimize)
8930 return NULL_TREE;
8932 /* Check for things like (A || B) && (A || C). We can convert this
8933 to A || (B && C). Note that either operator can be any of the four
8934 truth and/or operations and the transformation will still be
8935 valid. Also note that we only care about order for the
8936 ANDIF and ORIF operators. If B contains side effects, this
8937 might change the truth-value of A. */
8938 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8939 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8940 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8941 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8942 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8943 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8945 tree a00 = TREE_OPERAND (arg0, 0);
8946 tree a01 = TREE_OPERAND (arg0, 1);
8947 tree a10 = TREE_OPERAND (arg1, 0);
8948 tree a11 = TREE_OPERAND (arg1, 1);
8949 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8950 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8951 && (code == TRUTH_AND_EXPR
8952 || code == TRUTH_OR_EXPR));
8954 if (operand_equal_p (a00, a10, 0))
8955 return fold_build2 (TREE_CODE (arg0), type, a00,
8956 fold_build2 (code, type, a01, a11));
8957 else if (commutative && operand_equal_p (a00, a11, 0))
8958 return fold_build2 (TREE_CODE (arg0), type, a00,
8959 fold_build2 (code, type, a01, a10));
8960 else if (commutative && operand_equal_p (a01, a10, 0))
8961 return fold_build2 (TREE_CODE (arg0), type, a01,
8962 fold_build2 (code, type, a00, a11));
8964 /* This case if tricky because we must either have commutative
8965 operators or else A10 must not have side-effects. */
8967 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8968 && operand_equal_p (a01, a11, 0))
8969 return fold_build2 (TREE_CODE (arg0), type,
8970 fold_build2 (code, type, a00, a10),
8971 a01);
8974 /* See if we can build a range comparison. */
8975 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8976 return tem;
8978 /* Check for the possibility of merging component references. If our
8979 lhs is another similar operation, try to merge its rhs with our
8980 rhs. Then try to merge our lhs and rhs. */
8981 if (TREE_CODE (arg0) == code
8982 && 0 != (tem = fold_truthop (code, type,
8983 TREE_OPERAND (arg0, 1), arg1)))
8984 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8986 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8987 return tem;
8989 return NULL_TREE;
8991 case TRUTH_ORIF_EXPR:
8992 /* Note that the operands of this must be ints
8993 and their values must be 0 or true.
8994 ("true" is a fixed value perhaps depending on the language.) */
8995 /* If first arg is constant true, return it. */
8996 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8997 return fold_convert (type, arg0);
8998 case TRUTH_OR_EXPR:
8999 /* If either arg is constant zero, drop it. */
9000 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
9001 return non_lvalue (fold_convert (type, arg1));
9002 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
9003 /* Preserve sequence points. */
9004 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
9005 return non_lvalue (fold_convert (type, arg0));
9006 /* If second arg is constant true, result is true, but we must
9007 evaluate first arg. */
9008 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
9009 return omit_one_operand (type, arg1, arg0);
9010 /* Likewise for first arg, but note this only occurs here for
9011 TRUTH_OR_EXPR. */
9012 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
9013 return omit_one_operand (type, arg0, arg1);
9015 /* !X || X is always true. */
9016 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9017 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9018 return omit_one_operand (type, integer_one_node, arg1);
9019 /* X || !X is always true. */
9020 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
9021 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9022 return omit_one_operand (type, integer_one_node, arg0);
9024 goto truth_andor;
9026 case TRUTH_XOR_EXPR:
9027 /* If the second arg is constant zero, drop it. */
9028 if (integer_zerop (arg1))
9029 return non_lvalue (fold_convert (type, arg0));
9030 /* If the second arg is constant true, this is a logical inversion. */
9031 if (integer_onep (arg1))
9033 /* Only call invert_truthvalue if operand is a truth value. */
9034 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
9035 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
9036 else
9037 tem = invert_truthvalue (arg0);
9038 return non_lvalue (fold_convert (type, tem));
9040 /* Identical arguments cancel to zero. */
9041 if (operand_equal_p (arg0, arg1, 0))
9042 return omit_one_operand (type, integer_zero_node, arg0);
9044 /* !X ^ X is always true. */
9045 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9046 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9047 return omit_one_operand (type, integer_one_node, arg1);
9049 /* X ^ !X is always true. */
9050 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
9051 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9052 return omit_one_operand (type, integer_one_node, arg0);
9054 return NULL_TREE;
9056 case EQ_EXPR:
9057 case NE_EXPR:
9058 case LT_EXPR:
9059 case GT_EXPR:
9060 case LE_EXPR:
9061 case GE_EXPR:
9062 /* If one arg is a real or integer constant, put it last. */
9063 if (tree_swap_operands_p (arg0, arg1, true))
9064 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
9066 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
9067 if (TREE_CODE (arg0) == BIT_NOT_EXPR && TREE_CODE (arg1) == INTEGER_CST
9068 && (code == NE_EXPR || code == EQ_EXPR))
9069 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9070 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9071 arg1));
9073 /* bool_var != 0 becomes bool_var. */
9074 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
9075 && code == NE_EXPR)
9076 return non_lvalue (fold_convert (type, arg0));
9078 /* bool_var == 1 becomes bool_var. */
9079 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
9080 && code == EQ_EXPR)
9081 return non_lvalue (fold_convert (type, arg0));
9083 /* bool_var != 1 becomes !bool_var. */
9084 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
9085 && code == NE_EXPR)
9086 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
9088 /* bool_var == 0 becomes !bool_var. */
9089 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
9090 && code == EQ_EXPR)
9091 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
9093 /* If this is an equality comparison of the address of a non-weak
9094 object against zero, then we know the result. */
9095 if ((code == EQ_EXPR || code == NE_EXPR)
9096 && TREE_CODE (arg0) == ADDR_EXPR
9097 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
9098 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
9099 && integer_zerop (arg1))
9100 return constant_boolean_node (code != EQ_EXPR, type);
9102 /* If this is an equality comparison of the address of two non-weak,
9103 unaliased symbols neither of which are extern (since we do not
9104 have access to attributes for externs), then we know the result. */
9105 if ((code == EQ_EXPR || code == NE_EXPR)
9106 && TREE_CODE (arg0) == ADDR_EXPR
9107 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
9108 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
9109 && ! lookup_attribute ("alias",
9110 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
9111 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
9112 && TREE_CODE (arg1) == ADDR_EXPR
9113 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
9114 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
9115 && ! lookup_attribute ("alias",
9116 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
9117 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
9119 /* We know that we're looking at the address of two
9120 non-weak, unaliased, static _DECL nodes.
9122 It is both wasteful and incorrect to call operand_equal_p
9123 to compare the two ADDR_EXPR nodes. It is wasteful in that
9124 all we need to do is test pointer equality for the arguments
9125 to the two ADDR_EXPR nodes. It is incorrect to use
9126 operand_equal_p as that function is NOT equivalent to a
9127 C equality test. It can in fact return false for two
9128 objects which would test as equal using the C equality
9129 operator. */
9130 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
9131 return constant_boolean_node (equal
9132 ? code == EQ_EXPR : code != EQ_EXPR,
9133 type);
9136 /* If this is a comparison of two exprs that look like an
9137 ARRAY_REF of the same object, then we can fold this to a
9138 comparison of the two offsets. */
9139 if (TREE_CODE_CLASS (code) == tcc_comparison)
9141 tree base0, offset0, base1, offset1;
9143 if (extract_array_ref (arg0, &base0, &offset0)
9144 && extract_array_ref (arg1, &base1, &offset1)
9145 && operand_equal_p (base0, base1, 0))
9147 /* Handle no offsets on both sides specially. */
9148 if (offset0 == NULL_TREE
9149 && offset1 == NULL_TREE)
9150 return fold_build2 (code, type, integer_zero_node,
9151 integer_zero_node);
9153 if (!offset0 || !offset1
9154 || TREE_TYPE (offset0) == TREE_TYPE (offset1))
9156 if (offset0 == NULL_TREE)
9157 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
9158 if (offset1 == NULL_TREE)
9159 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
9160 return fold_build2 (code, type, offset0, offset1);
9165 /* Transform comparisons of the form X +- C CMP X. */
9166 if ((code != EQ_EXPR && code != NE_EXPR)
9167 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9168 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9169 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9170 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
9171 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9172 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9173 && !(flag_wrapv || flag_trapv))))
9175 tree arg01 = TREE_OPERAND (arg0, 1);
9176 enum tree_code code0 = TREE_CODE (arg0);
9177 int is_positive;
9179 if (TREE_CODE (arg01) == REAL_CST)
9180 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
9181 else
9182 is_positive = tree_int_cst_sgn (arg01);
9184 /* (X - c) > X becomes false. */
9185 if (code == GT_EXPR
9186 && ((code0 == MINUS_EXPR && is_positive >= 0)
9187 || (code0 == PLUS_EXPR && is_positive <= 0)))
9188 return constant_boolean_node (0, type);
9190 /* Likewise (X + c) < X becomes false. */
9191 if (code == LT_EXPR
9192 && ((code0 == PLUS_EXPR && is_positive >= 0)
9193 || (code0 == MINUS_EXPR && is_positive <= 0)))
9194 return constant_boolean_node (0, type);
9196 /* Convert (X - c) <= X to true. */
9197 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9198 && code == LE_EXPR
9199 && ((code0 == MINUS_EXPR && is_positive >= 0)
9200 || (code0 == PLUS_EXPR && is_positive <= 0)))
9201 return constant_boolean_node (1, type);
9203 /* Convert (X + c) >= X to true. */
9204 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9205 && code == GE_EXPR
9206 && ((code0 == PLUS_EXPR && is_positive >= 0)
9207 || (code0 == MINUS_EXPR && is_positive <= 0)))
9208 return constant_boolean_node (1, type);
9210 if (TREE_CODE (arg01) == INTEGER_CST)
9212 /* Convert X + c > X and X - c < X to true for integers. */
9213 if (code == GT_EXPR
9214 && ((code0 == PLUS_EXPR && is_positive > 0)
9215 || (code0 == MINUS_EXPR && is_positive < 0)))
9216 return constant_boolean_node (1, type);
9218 if (code == LT_EXPR
9219 && ((code0 == MINUS_EXPR && is_positive > 0)
9220 || (code0 == PLUS_EXPR && is_positive < 0)))
9221 return constant_boolean_node (1, type);
9223 /* Convert X + c <= X and X - c >= X to false for integers. */
9224 if (code == LE_EXPR
9225 && ((code0 == PLUS_EXPR && is_positive > 0)
9226 || (code0 == MINUS_EXPR && is_positive < 0)))
9227 return constant_boolean_node (0, type);
9229 if (code == GE_EXPR
9230 && ((code0 == MINUS_EXPR && is_positive > 0)
9231 || (code0 == PLUS_EXPR && is_positive < 0)))
9232 return constant_boolean_node (0, type);
9236 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9237 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9238 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9239 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9240 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9241 && !(flag_wrapv || flag_trapv))
9242 && (TREE_CODE (arg1) == INTEGER_CST
9243 && !TREE_OVERFLOW (arg1)))
9245 tree const1 = TREE_OPERAND (arg0, 1);
9246 tree const2 = arg1;
9247 tree variable = TREE_OPERAND (arg0, 0);
9248 tree lhs;
9249 int lhs_add;
9250 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9252 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
9253 TREE_TYPE (arg1), const2, const1);
9254 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9255 && (TREE_CODE (lhs) != INTEGER_CST
9256 || !TREE_OVERFLOW (lhs)))
9257 return fold_build2 (code, type, variable, lhs);
9260 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9262 tree targ0 = strip_float_extensions (arg0);
9263 tree targ1 = strip_float_extensions (arg1);
9264 tree newtype = TREE_TYPE (targ0);
9266 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9267 newtype = TREE_TYPE (targ1);
9269 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9270 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9271 return fold_build2 (code, type, fold_convert (newtype, targ0),
9272 fold_convert (newtype, targ1));
9274 /* (-a) CMP (-b) -> b CMP a */
9275 if (TREE_CODE (arg0) == NEGATE_EXPR
9276 && TREE_CODE (arg1) == NEGATE_EXPR)
9277 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9278 TREE_OPERAND (arg0, 0));
9280 if (TREE_CODE (arg1) == REAL_CST)
9282 REAL_VALUE_TYPE cst;
9283 cst = TREE_REAL_CST (arg1);
9285 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9286 if (TREE_CODE (arg0) == NEGATE_EXPR)
9287 return
9288 fold_build2 (swap_tree_comparison (code), type,
9289 TREE_OPERAND (arg0, 0),
9290 build_real (TREE_TYPE (arg1),
9291 REAL_VALUE_NEGATE (cst)));
9293 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9294 /* a CMP (-0) -> a CMP 0 */
9295 if (REAL_VALUE_MINUS_ZERO (cst))
9296 return fold_build2 (code, type, arg0,
9297 build_real (TREE_TYPE (arg1), dconst0));
9299 /* x != NaN is always true, other ops are always false. */
9300 if (REAL_VALUE_ISNAN (cst)
9301 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9303 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9304 return omit_one_operand (type, tem, arg0);
9307 /* Fold comparisons against infinity. */
9308 if (REAL_VALUE_ISINF (cst))
9310 tem = fold_inf_compare (code, type, arg0, arg1);
9311 if (tem != NULL_TREE)
9312 return tem;
9316 /* If this is a comparison of a real constant with a PLUS_EXPR
9317 or a MINUS_EXPR of a real constant, we can convert it into a
9318 comparison with a revised real constant as long as no overflow
9319 occurs when unsafe_math_optimizations are enabled. */
9320 if (flag_unsafe_math_optimizations
9321 && TREE_CODE (arg1) == REAL_CST
9322 && (TREE_CODE (arg0) == PLUS_EXPR
9323 || TREE_CODE (arg0) == MINUS_EXPR)
9324 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9325 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9326 ? MINUS_EXPR : PLUS_EXPR,
9327 arg1, TREE_OPERAND (arg0, 1), 0))
9328 && ! TREE_CONSTANT_OVERFLOW (tem))
9329 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9331 /* Likewise, we can simplify a comparison of a real constant with
9332 a MINUS_EXPR whose first operand is also a real constant, i.e.
9333 (c1 - x) < c2 becomes x > c1-c2. */
9334 if (flag_unsafe_math_optimizations
9335 && TREE_CODE (arg1) == REAL_CST
9336 && TREE_CODE (arg0) == MINUS_EXPR
9337 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9338 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9339 arg1, 0))
9340 && ! TREE_CONSTANT_OVERFLOW (tem))
9341 return fold_build2 (swap_tree_comparison (code), type,
9342 TREE_OPERAND (arg0, 1), tem);
9344 /* Fold comparisons against built-in math functions. */
9345 if (TREE_CODE (arg1) == REAL_CST
9346 && flag_unsafe_math_optimizations
9347 && ! flag_errno_math)
9349 enum built_in_function fcode = builtin_mathfn_code (arg0);
9351 if (fcode != END_BUILTINS)
9353 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9354 if (tem != NULL_TREE)
9355 return tem;
9360 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9361 if (TREE_CONSTANT (arg1)
9362 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9363 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9364 /* This optimization is invalid for ordered comparisons
9365 if CONST+INCR overflows or if foo+incr might overflow.
9366 This optimization is invalid for floating point due to rounding.
9367 For pointer types we assume overflow doesn't happen. */
9368 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9369 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9370 && (code == EQ_EXPR || code == NE_EXPR))))
9372 tree varop, newconst;
9374 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9376 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9377 arg1, TREE_OPERAND (arg0, 1));
9378 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9379 TREE_OPERAND (arg0, 0),
9380 TREE_OPERAND (arg0, 1));
9382 else
9384 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9385 arg1, TREE_OPERAND (arg0, 1));
9386 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9387 TREE_OPERAND (arg0, 0),
9388 TREE_OPERAND (arg0, 1));
9392 /* If VAROP is a reference to a bitfield, we must mask
9393 the constant by the width of the field. */
9394 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9395 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9396 && host_integerp (DECL_SIZE (TREE_OPERAND
9397 (TREE_OPERAND (varop, 0), 1)), 1))
9399 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9400 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9401 tree folded_compare, shift;
9403 /* First check whether the comparison would come out
9404 always the same. If we don't do that we would
9405 change the meaning with the masking. */
9406 folded_compare = fold_build2 (code, type,
9407 TREE_OPERAND (varop, 0), arg1);
9408 if (integer_zerop (folded_compare)
9409 || integer_onep (folded_compare))
9410 return omit_one_operand (type, folded_compare, varop);
9412 shift = build_int_cst (NULL_TREE,
9413 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9414 shift = fold_convert (TREE_TYPE (varop), shift);
9415 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9416 newconst, shift);
9417 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9418 newconst, shift);
9421 return fold_build2 (code, type, varop, newconst);
9424 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9425 This transformation affects the cases which are handled in later
9426 optimizations involving comparisons with non-negative constants. */
9427 if (TREE_CODE (arg1) == INTEGER_CST
9428 && TREE_CODE (arg0) != INTEGER_CST
9429 && tree_int_cst_sgn (arg1) > 0)
9431 switch (code)
9433 case GE_EXPR:
9434 arg1 = const_binop (MINUS_EXPR, arg1,
9435 build_int_cst (TREE_TYPE (arg1), 1), 0);
9436 return fold_build2 (GT_EXPR, type, arg0,
9437 fold_convert (TREE_TYPE (arg0), arg1));
9439 case LT_EXPR:
9440 arg1 = const_binop (MINUS_EXPR, arg1,
9441 build_int_cst (TREE_TYPE (arg1), 1), 0);
9442 return fold_build2 (LE_EXPR, type, arg0,
9443 fold_convert (TREE_TYPE (arg0), arg1));
9445 default:
9446 break;
9450 /* Comparisons with the highest or lowest possible integer of
9451 the specified size will have known values. */
9453 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9455 if (TREE_CODE (arg1) == INTEGER_CST
9456 && ! TREE_CONSTANT_OVERFLOW (arg1)
9457 && width <= 2 * HOST_BITS_PER_WIDE_INT
9458 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9459 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9461 HOST_WIDE_INT signed_max_hi;
9462 unsigned HOST_WIDE_INT signed_max_lo;
9463 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9465 if (width <= HOST_BITS_PER_WIDE_INT)
9467 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9468 - 1;
9469 signed_max_hi = 0;
9470 max_hi = 0;
9472 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9474 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9475 min_lo = 0;
9476 min_hi = 0;
9478 else
9480 max_lo = signed_max_lo;
9481 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9482 min_hi = -1;
9485 else
9487 width -= HOST_BITS_PER_WIDE_INT;
9488 signed_max_lo = -1;
9489 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9490 - 1;
9491 max_lo = -1;
9492 min_lo = 0;
9494 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9496 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9497 min_hi = 0;
9499 else
9501 max_hi = signed_max_hi;
9502 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9506 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9507 && TREE_INT_CST_LOW (arg1) == max_lo)
9508 switch (code)
9510 case GT_EXPR:
9511 return omit_one_operand (type, integer_zero_node, arg0);
9513 case GE_EXPR:
9514 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9516 case LE_EXPR:
9517 return omit_one_operand (type, integer_one_node, arg0);
9519 case LT_EXPR:
9520 return fold_build2 (NE_EXPR, type, arg0, arg1);
9522 /* The GE_EXPR and LT_EXPR cases above are not normally
9523 reached because of previous transformations. */
9525 default:
9526 break;
9528 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9529 == max_hi
9530 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9531 switch (code)
9533 case GT_EXPR:
9534 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9535 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9536 case LE_EXPR:
9537 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9538 return fold_build2 (NE_EXPR, type, arg0, arg1);
9539 default:
9540 break;
9542 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9543 == min_hi
9544 && TREE_INT_CST_LOW (arg1) == min_lo)
9545 switch (code)
9547 case LT_EXPR:
9548 return omit_one_operand (type, integer_zero_node, arg0);
9550 case LE_EXPR:
9551 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9553 case GE_EXPR:
9554 return omit_one_operand (type, integer_one_node, arg0);
9556 case GT_EXPR:
9557 return fold_build2 (NE_EXPR, type, op0, op1);
9559 default:
9560 break;
9562 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9563 == min_hi
9564 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9565 switch (code)
9567 case GE_EXPR:
9568 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9569 return fold_build2 (NE_EXPR, type, arg0, arg1);
9570 case LT_EXPR:
9571 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9572 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9573 default:
9574 break;
9577 else if (!in_gimple_form
9578 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9579 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9580 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9581 /* signed_type does not work on pointer types. */
9582 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9584 /* The following case also applies to X < signed_max+1
9585 and X >= signed_max+1 because previous transformations. */
9586 if (code == LE_EXPR || code == GT_EXPR)
9588 tree st0, st1;
9589 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9590 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9591 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9592 type, fold_convert (st0, arg0),
9593 build_int_cst (st1, 0));
9599 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9600 a MINUS_EXPR of a constant, we can convert it into a comparison with
9601 a revised constant as long as no overflow occurs. */
9602 if ((code == EQ_EXPR || code == NE_EXPR)
9603 && TREE_CODE (arg1) == INTEGER_CST
9604 && (TREE_CODE (arg0) == PLUS_EXPR
9605 || TREE_CODE (arg0) == MINUS_EXPR)
9606 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9607 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9608 ? MINUS_EXPR : PLUS_EXPR,
9609 arg1, TREE_OPERAND (arg0, 1), 0))
9610 && ! TREE_CONSTANT_OVERFLOW (tem))
9611 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9613 /* Similarly for a NEGATE_EXPR. */
9614 else if ((code == EQ_EXPR || code == NE_EXPR)
9615 && TREE_CODE (arg0) == NEGATE_EXPR
9616 && TREE_CODE (arg1) == INTEGER_CST
9617 && 0 != (tem = negate_expr (arg1))
9618 && TREE_CODE (tem) == INTEGER_CST
9619 && ! TREE_CONSTANT_OVERFLOW (tem))
9620 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9622 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9623 for !=. Don't do this for ordered comparisons due to overflow. */
9624 else if ((code == NE_EXPR || code == EQ_EXPR)
9625 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9626 return fold_build2 (code, type,
9627 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9629 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9630 && (TREE_CODE (arg0) == NOP_EXPR
9631 || TREE_CODE (arg0) == CONVERT_EXPR))
9633 /* If we are widening one operand of an integer comparison,
9634 see if the other operand is similarly being widened. Perhaps we
9635 can do the comparison in the narrower type. */
9636 tem = fold_widened_comparison (code, type, arg0, arg1);
9637 if (tem)
9638 return tem;
9640 /* Or if we are changing signedness. */
9641 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9642 if (tem)
9643 return tem;
9646 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9647 constant, we can simplify it. */
9648 else if (TREE_CODE (arg1) == INTEGER_CST
9649 && (TREE_CODE (arg0) == MIN_EXPR
9650 || TREE_CODE (arg0) == MAX_EXPR)
9651 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9653 tem = optimize_minmax_comparison (code, type, op0, op1);
9654 if (tem)
9655 return tem;
9657 return NULL_TREE;
9660 /* If we are comparing an ABS_EXPR with a constant, we can
9661 convert all the cases into explicit comparisons, but they may
9662 well not be faster than doing the ABS and one comparison.
9663 But ABS (X) <= C is a range comparison, which becomes a subtraction
9664 and a comparison, and is probably faster. */
9665 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9666 && TREE_CODE (arg0) == ABS_EXPR
9667 && ! TREE_SIDE_EFFECTS (arg0)
9668 && (0 != (tem = negate_expr (arg1)))
9669 && TREE_CODE (tem) == INTEGER_CST
9670 && ! TREE_CONSTANT_OVERFLOW (tem))
9671 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9672 build2 (GE_EXPR, type,
9673 TREE_OPERAND (arg0, 0), tem),
9674 build2 (LE_EXPR, type,
9675 TREE_OPERAND (arg0, 0), arg1));
9677 /* Convert ABS_EXPR<x> >= 0 to true. */
9678 else if (code == GE_EXPR
9679 && tree_expr_nonnegative_p (arg0)
9680 && (integer_zerop (arg1)
9681 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9682 && real_zerop (arg1))))
9683 return omit_one_operand (type, integer_one_node, arg0);
9685 /* Convert ABS_EXPR<x> < 0 to false. */
9686 else if (code == LT_EXPR
9687 && tree_expr_nonnegative_p (arg0)
9688 && (integer_zerop (arg1) || real_zerop (arg1)))
9689 return omit_one_operand (type, integer_zero_node, arg0);
9691 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9692 else if ((code == EQ_EXPR || code == NE_EXPR)
9693 && TREE_CODE (arg0) == ABS_EXPR
9694 && (integer_zerop (arg1) || real_zerop (arg1)))
9695 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9697 /* If this is an EQ or NE comparison with zero and ARG0 is
9698 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9699 two operations, but the latter can be done in one less insn
9700 on machines that have only two-operand insns or on which a
9701 constant cannot be the first operand. */
9702 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9703 && TREE_CODE (arg0) == BIT_AND_EXPR)
9705 tree arg00 = TREE_OPERAND (arg0, 0);
9706 tree arg01 = TREE_OPERAND (arg0, 1);
9707 if (TREE_CODE (arg00) == LSHIFT_EXPR
9708 && integer_onep (TREE_OPERAND (arg00, 0)))
9709 return
9710 fold_build2 (code, type,
9711 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9712 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9713 arg01, TREE_OPERAND (arg00, 1)),
9714 fold_convert (TREE_TYPE (arg0),
9715 integer_one_node)),
9716 arg1);
9717 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9718 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9719 return
9720 fold_build2 (code, type,
9721 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9722 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9723 arg00, TREE_OPERAND (arg01, 1)),
9724 fold_convert (TREE_TYPE (arg0),
9725 integer_one_node)),
9726 arg1);
9729 /* If this is an NE or EQ comparison of zero against the result of a
9730 signed MOD operation whose second operand is a power of 2, make
9731 the MOD operation unsigned since it is simpler and equivalent. */
9732 if ((code == NE_EXPR || code == EQ_EXPR)
9733 && integer_zerop (arg1)
9734 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9735 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9736 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9737 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9738 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9739 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9741 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9742 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9743 fold_convert (newtype,
9744 TREE_OPERAND (arg0, 0)),
9745 fold_convert (newtype,
9746 TREE_OPERAND (arg0, 1)));
9748 return fold_build2 (code, type, newmod,
9749 fold_convert (newtype, arg1));
9752 /* If this is an NE comparison of zero with an AND of one, remove the
9753 comparison since the AND will give the correct value. */
9754 if (code == NE_EXPR && integer_zerop (arg1)
9755 && TREE_CODE (arg0) == BIT_AND_EXPR
9756 && integer_onep (TREE_OPERAND (arg0, 1)))
9757 return fold_convert (type, arg0);
9759 /* If we have (A & C) == C where C is a power of 2, convert this into
9760 (A & C) != 0. Similarly for NE_EXPR. */
9761 if ((code == EQ_EXPR || code == NE_EXPR)
9762 && TREE_CODE (arg0) == BIT_AND_EXPR
9763 && integer_pow2p (TREE_OPERAND (arg0, 1))
9764 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9765 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9766 arg0, fold_convert (TREE_TYPE (arg0),
9767 integer_zero_node));
9769 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9770 bit, then fold the expression into A < 0 or A >= 0. */
9771 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9772 if (tem)
9773 return tem;
9775 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9776 Similarly for NE_EXPR. */
9777 if ((code == EQ_EXPR || code == NE_EXPR)
9778 && TREE_CODE (arg0) == BIT_AND_EXPR
9779 && TREE_CODE (arg1) == INTEGER_CST
9780 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9782 tree notc = fold_build1 (BIT_NOT_EXPR,
9783 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9784 TREE_OPERAND (arg0, 1));
9785 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9786 arg1, notc);
9787 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9788 if (integer_nonzerop (dandnotc))
9789 return omit_one_operand (type, rslt, arg0);
9792 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9793 Similarly for NE_EXPR. */
9794 if ((code == EQ_EXPR || code == NE_EXPR)
9795 && TREE_CODE (arg0) == BIT_IOR_EXPR
9796 && TREE_CODE (arg1) == INTEGER_CST
9797 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9799 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9800 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9801 TREE_OPERAND (arg0, 1), notd);
9802 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9803 if (integer_nonzerop (candnotd))
9804 return omit_one_operand (type, rslt, arg0);
9807 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9808 and similarly for >= into !=. */
9809 if ((code == LT_EXPR || code == GE_EXPR)
9810 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9811 && TREE_CODE (arg1) == LSHIFT_EXPR
9812 && integer_onep (TREE_OPERAND (arg1, 0)))
9813 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9814 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9815 TREE_OPERAND (arg1, 1)),
9816 build_int_cst (TREE_TYPE (arg0), 0));
9818 else if ((code == LT_EXPR || code == GE_EXPR)
9819 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9820 && (TREE_CODE (arg1) == NOP_EXPR
9821 || TREE_CODE (arg1) == CONVERT_EXPR)
9822 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9823 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9824 return
9825 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9826 fold_convert (TREE_TYPE (arg0),
9827 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9828 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9829 1))),
9830 build_int_cst (TREE_TYPE (arg0), 0));
9832 /* Simplify comparison of something with itself. (For IEEE
9833 floating-point, we can only do some of these simplifications.) */
9834 if (operand_equal_p (arg0, arg1, 0))
9836 switch (code)
9838 case EQ_EXPR:
9839 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9840 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9841 return constant_boolean_node (1, type);
9842 break;
9844 case GE_EXPR:
9845 case LE_EXPR:
9846 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9847 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9848 return constant_boolean_node (1, type);
9849 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9851 case NE_EXPR:
9852 /* For NE, we can only do this simplification if integer
9853 or we don't honor IEEE floating point NaNs. */
9854 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9855 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9856 break;
9857 /* ... fall through ... */
9858 case GT_EXPR:
9859 case LT_EXPR:
9860 return constant_boolean_node (0, type);
9861 default:
9862 gcc_unreachable ();
9866 /* If we are comparing an expression that just has comparisons
9867 of two integer values, arithmetic expressions of those comparisons,
9868 and constants, we can simplify it. There are only three cases
9869 to check: the two values can either be equal, the first can be
9870 greater, or the second can be greater. Fold the expression for
9871 those three values. Since each value must be 0 or 1, we have
9872 eight possibilities, each of which corresponds to the constant 0
9873 or 1 or one of the six possible comparisons.
9875 This handles common cases like (a > b) == 0 but also handles
9876 expressions like ((x > y) - (y > x)) > 0, which supposedly
9877 occur in macroized code. */
9879 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9881 tree cval1 = 0, cval2 = 0;
9882 int save_p = 0;
9884 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9885 /* Don't handle degenerate cases here; they should already
9886 have been handled anyway. */
9887 && cval1 != 0 && cval2 != 0
9888 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9889 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9890 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9891 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9892 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9893 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9894 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9896 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9897 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9899 /* We can't just pass T to eval_subst in case cval1 or cval2
9900 was the same as ARG1. */
9902 tree high_result
9903 = fold_build2 (code, type,
9904 eval_subst (arg0, cval1, maxval,
9905 cval2, minval),
9906 arg1);
9907 tree equal_result
9908 = fold_build2 (code, type,
9909 eval_subst (arg0, cval1, maxval,
9910 cval2, maxval),
9911 arg1);
9912 tree low_result
9913 = fold_build2 (code, type,
9914 eval_subst (arg0, cval1, minval,
9915 cval2, maxval),
9916 arg1);
9918 /* All three of these results should be 0 or 1. Confirm they
9919 are. Then use those values to select the proper code
9920 to use. */
9922 if ((integer_zerop (high_result)
9923 || integer_onep (high_result))
9924 && (integer_zerop (equal_result)
9925 || integer_onep (equal_result))
9926 && (integer_zerop (low_result)
9927 || integer_onep (low_result)))
9929 /* Make a 3-bit mask with the high-order bit being the
9930 value for `>', the next for '=', and the low for '<'. */
9931 switch ((integer_onep (high_result) * 4)
9932 + (integer_onep (equal_result) * 2)
9933 + integer_onep (low_result))
9935 case 0:
9936 /* Always false. */
9937 return omit_one_operand (type, integer_zero_node, arg0);
9938 case 1:
9939 code = LT_EXPR;
9940 break;
9941 case 2:
9942 code = EQ_EXPR;
9943 break;
9944 case 3:
9945 code = LE_EXPR;
9946 break;
9947 case 4:
9948 code = GT_EXPR;
9949 break;
9950 case 5:
9951 code = NE_EXPR;
9952 break;
9953 case 6:
9954 code = GE_EXPR;
9955 break;
9956 case 7:
9957 /* Always true. */
9958 return omit_one_operand (type, integer_one_node, arg0);
9961 if (save_p)
9962 return save_expr (build2 (code, type, cval1, cval2));
9963 else
9964 return fold_build2 (code, type, cval1, cval2);
9969 /* If this is a comparison of a field, we may be able to simplify it. */
9970 if (((TREE_CODE (arg0) == COMPONENT_REF
9971 && lang_hooks.can_use_bit_fields_p ())
9972 || TREE_CODE (arg0) == BIT_FIELD_REF)
9973 && (code == EQ_EXPR || code == NE_EXPR)
9974 /* Handle the constant case even without -O
9975 to make sure the warnings are given. */
9976 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9978 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9979 if (t1)
9980 return t1;
9983 /* Fold a comparison of the address of COMPONENT_REFs with the same
9984 type and component to a comparison of the address of the base
9985 object. In short, &x->a OP &y->a to x OP y and
9986 &x->a OP &y.a to x OP &y */
9987 if (TREE_CODE (arg0) == ADDR_EXPR
9988 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9989 && TREE_CODE (arg1) == ADDR_EXPR
9990 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9992 tree cref0 = TREE_OPERAND (arg0, 0);
9993 tree cref1 = TREE_OPERAND (arg1, 0);
9994 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9996 tree op0 = TREE_OPERAND (cref0, 0);
9997 tree op1 = TREE_OPERAND (cref1, 0);
9998 return fold_build2 (code, type,
9999 build_fold_addr_expr (op0),
10000 build_fold_addr_expr (op1));
10004 /* Optimize comparisons of strlen vs zero to a compare of the
10005 first character of the string vs zero. To wit,
10006 strlen(ptr) == 0 => *ptr == 0
10007 strlen(ptr) != 0 => *ptr != 0
10008 Other cases should reduce to one of these two (or a constant)
10009 due to the return value of strlen being unsigned. */
10010 if ((code == EQ_EXPR || code == NE_EXPR)
10011 && integer_zerop (arg1)
10012 && TREE_CODE (arg0) == CALL_EXPR)
10014 tree fndecl = get_callee_fndecl (arg0);
10015 tree arglist;
10017 if (fndecl
10018 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10019 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10020 && (arglist = TREE_OPERAND (arg0, 1))
10021 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10022 && ! TREE_CHAIN (arglist))
10024 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10025 return fold_build2 (code, type, iref,
10026 build_int_cst (TREE_TYPE (iref), 0));
10030 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
10031 into a single range test. */
10032 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
10033 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
10034 && TREE_CODE (arg1) == INTEGER_CST
10035 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10036 && !integer_zerop (TREE_OPERAND (arg0, 1))
10037 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
10038 && !TREE_OVERFLOW (arg1))
10040 t1 = fold_div_compare (code, type, arg0, arg1);
10041 if (t1 != NULL_TREE)
10042 return t1;
10045 if ((code == EQ_EXPR || code == NE_EXPR)
10046 && integer_zerop (arg1)
10047 && tree_expr_nonzero_p (arg0))
10049 tree res = constant_boolean_node (code==NE_EXPR, type);
10050 return omit_one_operand (type, res, arg0);
10053 t1 = fold_relational_const (code, type, arg0, arg1);
10054 return t1 == NULL_TREE ? NULL_TREE : t1;
10056 case UNORDERED_EXPR:
10057 case ORDERED_EXPR:
10058 case UNLT_EXPR:
10059 case UNLE_EXPR:
10060 case UNGT_EXPR:
10061 case UNGE_EXPR:
10062 case UNEQ_EXPR:
10063 case LTGT_EXPR:
10064 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10066 t1 = fold_relational_const (code, type, arg0, arg1);
10067 if (t1 != NULL_TREE)
10068 return t1;
10071 /* If the first operand is NaN, the result is constant. */
10072 if (TREE_CODE (arg0) == REAL_CST
10073 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
10074 && (code != LTGT_EXPR || ! flag_trapping_math))
10076 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10077 ? integer_zero_node
10078 : integer_one_node;
10079 return omit_one_operand (type, t1, arg1);
10082 /* If the second operand is NaN, the result is constant. */
10083 if (TREE_CODE (arg1) == REAL_CST
10084 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
10085 && (code != LTGT_EXPR || ! flag_trapping_math))
10087 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10088 ? integer_zero_node
10089 : integer_one_node;
10090 return omit_one_operand (type, t1, arg0);
10093 /* Simplify unordered comparison of something with itself. */
10094 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
10095 && operand_equal_p (arg0, arg1, 0))
10096 return constant_boolean_node (1, type);
10098 if (code == LTGT_EXPR
10099 && !flag_trapping_math
10100 && operand_equal_p (arg0, arg1, 0))
10101 return constant_boolean_node (0, type);
10103 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10105 tree targ0 = strip_float_extensions (arg0);
10106 tree targ1 = strip_float_extensions (arg1);
10107 tree newtype = TREE_TYPE (targ0);
10109 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
10110 newtype = TREE_TYPE (targ1);
10112 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
10113 return fold_build2 (code, type, fold_convert (newtype, targ0),
10114 fold_convert (newtype, targ1));
10117 return NULL_TREE;
10119 case COMPOUND_EXPR:
10120 /* When pedantic, a compound expression can be neither an lvalue
10121 nor an integer constant expression. */
10122 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
10123 return NULL_TREE;
10124 /* Don't let (0, 0) be null pointer constant. */
10125 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
10126 : fold_convert (type, arg1);
10127 return pedantic_non_lvalue (tem);
10129 case COMPLEX_EXPR:
10130 if ((TREE_CODE (arg0) == REAL_CST
10131 && TREE_CODE (arg1) == REAL_CST)
10132 || (TREE_CODE (arg0) == INTEGER_CST
10133 && TREE_CODE (arg1) == INTEGER_CST))
10134 return build_complex (type, arg0, arg1);
10135 return NULL_TREE;
10137 case ASSERT_EXPR:
10138 /* An ASSERT_EXPR should never be passed to fold_binary. */
10139 gcc_unreachable ();
10141 default:
10142 return NULL_TREE;
10143 } /* switch (code) */
10146 /* Callback for walk_tree, looking for LABEL_EXPR.
10147 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10148 Do not check the sub-tree of GOTO_EXPR. */
10150 static tree
10151 contains_label_1 (tree *tp,
10152 int *walk_subtrees,
10153 void *data ATTRIBUTE_UNUSED)
10155 switch (TREE_CODE (*tp))
10157 case LABEL_EXPR:
10158 return *tp;
10159 case GOTO_EXPR:
10160 *walk_subtrees = 0;
10161 /* no break */
10162 default:
10163 return NULL_TREE;
10167 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
10168 accessible from outside the sub-tree. Returns NULL_TREE if no
10169 addressable label is found. */
10171 static bool
10172 contains_label_p (tree st)
10174 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
10177 /* Fold a ternary expression of code CODE and type TYPE with operands
10178 OP0, OP1, and OP2. Return the folded expression if folding is
10179 successful. Otherwise, return NULL_TREE. */
10181 tree
10182 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10184 tree tem;
10185 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
10186 enum tree_code_class kind = TREE_CODE_CLASS (code);
10188 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10189 && TREE_CODE_LENGTH (code) == 3);
10191 /* Strip any conversions that don't change the mode. This is safe
10192 for every expression, except for a comparison expression because
10193 its signedness is derived from its operands. So, in the latter
10194 case, only strip conversions that don't change the signedness.
10196 Note that this is done as an internal manipulation within the
10197 constant folder, in order to find the simplest representation of
10198 the arguments so that their form can be studied. In any cases,
10199 the appropriate type conversions should be put back in the tree
10200 that will get out of the constant folder. */
10201 if (op0)
10203 arg0 = op0;
10204 STRIP_NOPS (arg0);
10207 if (op1)
10209 arg1 = op1;
10210 STRIP_NOPS (arg1);
10213 switch (code)
10215 case COMPONENT_REF:
10216 if (TREE_CODE (arg0) == CONSTRUCTOR
10217 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
10219 unsigned HOST_WIDE_INT idx;
10220 tree field, value;
10221 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
10222 if (field == arg1)
10223 return value;
10225 return NULL_TREE;
10227 case COND_EXPR:
10228 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10229 so all simple results must be passed through pedantic_non_lvalue. */
10230 if (TREE_CODE (arg0) == INTEGER_CST)
10232 tree unused_op = integer_zerop (arg0) ? op1 : op2;
10233 tem = integer_zerop (arg0) ? op2 : op1;
10234 /* Only optimize constant conditions when the selected branch
10235 has the same type as the COND_EXPR. This avoids optimizing
10236 away "c ? x : throw", where the throw has a void type.
10237 Avoid throwing away that operand which contains label. */
10238 if ((!TREE_SIDE_EFFECTS (unused_op)
10239 || !contains_label_p (unused_op))
10240 && (! VOID_TYPE_P (TREE_TYPE (tem))
10241 || VOID_TYPE_P (type)))
10242 return pedantic_non_lvalue (tem);
10243 return NULL_TREE;
10245 if (operand_equal_p (arg1, op2, 0))
10246 return pedantic_omit_one_operand (type, arg1, arg0);
10248 /* If we have A op B ? A : C, we may be able to convert this to a
10249 simpler expression, depending on the operation and the values
10250 of B and C. Signed zeros prevent all of these transformations,
10251 for reasons given above each one.
10253 Also try swapping the arguments and inverting the conditional. */
10254 if (COMPARISON_CLASS_P (arg0)
10255 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10256 arg1, TREE_OPERAND (arg0, 1))
10257 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10259 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10260 if (tem)
10261 return tem;
10264 if (COMPARISON_CLASS_P (arg0)
10265 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10266 op2,
10267 TREE_OPERAND (arg0, 1))
10268 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10270 tem = invert_truthvalue (arg0);
10271 if (COMPARISON_CLASS_P (tem))
10273 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10274 if (tem)
10275 return tem;
10279 /* If the second operand is simpler than the third, swap them
10280 since that produces better jump optimization results. */
10281 if (truth_value_p (TREE_CODE (arg0))
10282 && tree_swap_operands_p (op1, op2, false))
10284 /* See if this can be inverted. If it can't, possibly because
10285 it was a floating-point inequality comparison, don't do
10286 anything. */
10287 tem = invert_truthvalue (arg0);
10289 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10290 return fold_build3 (code, type, tem, op2, op1);
10293 /* Convert A ? 1 : 0 to simply A. */
10294 if (integer_onep (op1)
10295 && integer_zerop (op2)
10296 /* If we try to convert OP0 to our type, the
10297 call to fold will try to move the conversion inside
10298 a COND, which will recurse. In that case, the COND_EXPR
10299 is probably the best choice, so leave it alone. */
10300 && type == TREE_TYPE (arg0))
10301 return pedantic_non_lvalue (arg0);
10303 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10304 over COND_EXPR in cases such as floating point comparisons. */
10305 if (integer_zerop (op1)
10306 && integer_onep (op2)
10307 && truth_value_p (TREE_CODE (arg0)))
10308 return pedantic_non_lvalue (fold_convert (type,
10309 invert_truthvalue (arg0)));
10311 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10312 if (TREE_CODE (arg0) == LT_EXPR
10313 && integer_zerop (TREE_OPERAND (arg0, 1))
10314 && integer_zerop (op2)
10315 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10316 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10317 TREE_TYPE (tem), tem, arg1));
10319 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10320 already handled above. */
10321 if (TREE_CODE (arg0) == BIT_AND_EXPR
10322 && integer_onep (TREE_OPERAND (arg0, 1))
10323 && integer_zerop (op2)
10324 && integer_pow2p (arg1))
10326 tree tem = TREE_OPERAND (arg0, 0);
10327 STRIP_NOPS (tem);
10328 if (TREE_CODE (tem) == RSHIFT_EXPR
10329 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10330 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10331 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10332 return fold_build2 (BIT_AND_EXPR, type,
10333 TREE_OPERAND (tem, 0), arg1);
10336 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10337 is probably obsolete because the first operand should be a
10338 truth value (that's why we have the two cases above), but let's
10339 leave it in until we can confirm this for all front-ends. */
10340 if (integer_zerop (op2)
10341 && TREE_CODE (arg0) == NE_EXPR
10342 && integer_zerop (TREE_OPERAND (arg0, 1))
10343 && integer_pow2p (arg1)
10344 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10345 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10346 arg1, OEP_ONLY_CONST))
10347 return pedantic_non_lvalue (fold_convert (type,
10348 TREE_OPERAND (arg0, 0)));
10350 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10351 if (integer_zerop (op2)
10352 && truth_value_p (TREE_CODE (arg0))
10353 && truth_value_p (TREE_CODE (arg1)))
10354 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10356 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10357 if (integer_onep (op2)
10358 && truth_value_p (TREE_CODE (arg0))
10359 && truth_value_p (TREE_CODE (arg1)))
10361 /* Only perform transformation if ARG0 is easily inverted. */
10362 tem = invert_truthvalue (arg0);
10363 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10364 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10367 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10368 if (integer_zerop (arg1)
10369 && truth_value_p (TREE_CODE (arg0))
10370 && truth_value_p (TREE_CODE (op2)))
10372 /* Only perform transformation if ARG0 is easily inverted. */
10373 tem = invert_truthvalue (arg0);
10374 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10375 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10378 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10379 if (integer_onep (arg1)
10380 && truth_value_p (TREE_CODE (arg0))
10381 && truth_value_p (TREE_CODE (op2)))
10382 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10384 return NULL_TREE;
10386 case CALL_EXPR:
10387 /* Check for a built-in function. */
10388 if (TREE_CODE (op0) == ADDR_EXPR
10389 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10390 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10391 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
10392 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
10393 here are when we've propagated the address of a decl into the
10394 object slot. */
10395 if (TREE_CODE (op0) == OBJ_TYPE_REF
10396 && lang_hooks.fold_obj_type_ref
10397 && TREE_CODE (OBJ_TYPE_REF_OBJECT (op0)) == ADDR_EXPR
10398 && DECL_P (TREE_OPERAND (OBJ_TYPE_REF_OBJECT (op0), 0)))
10400 tree t;
10402 /* ??? Caution: Broken ADDR_EXPR semantics means that
10403 looking at the type of the operand of the addr_expr
10404 can yield an array type. See silly exception in
10405 check_pointer_types_r. */
10407 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (op0)));
10408 t = lang_hooks.fold_obj_type_ref (op0, t);
10409 if (t)
10410 return fold_build3 (code, type, t, op1, op2);
10412 return NULL_TREE;
10414 case BIT_FIELD_REF:
10415 if (TREE_CODE (arg0) == VECTOR_CST
10416 && type == TREE_TYPE (TREE_TYPE (arg0))
10417 && host_integerp (arg1, 1)
10418 && host_integerp (op2, 1))
10420 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10421 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10423 if (width != 0
10424 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10425 && (idx % width) == 0
10426 && (idx = idx / width)
10427 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10429 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10430 while (idx-- > 0 && elements)
10431 elements = TREE_CHAIN (elements);
10432 if (elements)
10433 return TREE_VALUE (elements);
10434 else
10435 return fold_convert (type, integer_zero_node);
10438 return NULL_TREE;
10440 default:
10441 return NULL_TREE;
10442 } /* switch (code) */
10445 /* Perform constant folding and related simplification of EXPR.
10446 The related simplifications include x*1 => x, x*0 => 0, etc.,
10447 and application of the associative law.
10448 NOP_EXPR conversions may be removed freely (as long as we
10449 are careful not to change the type of the overall expression).
10450 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10451 but we can constant-fold them if they have constant operands. */
10453 #ifdef ENABLE_FOLD_CHECKING
10454 # define fold(x) fold_1 (x)
10455 static tree fold_1 (tree);
10456 static
10457 #endif
10458 tree
10459 fold (tree expr)
10461 const tree t = expr;
10462 enum tree_code code = TREE_CODE (t);
10463 enum tree_code_class kind = TREE_CODE_CLASS (code);
10464 tree tem;
10466 /* Return right away if a constant. */
10467 if (kind == tcc_constant)
10468 return t;
10470 if (IS_EXPR_CODE_CLASS (kind))
10472 tree type = TREE_TYPE (t);
10473 tree op0, op1, op2;
10475 switch (TREE_CODE_LENGTH (code))
10477 case 1:
10478 op0 = TREE_OPERAND (t, 0);
10479 tem = fold_unary (code, type, op0);
10480 return tem ? tem : expr;
10481 case 2:
10482 op0 = TREE_OPERAND (t, 0);
10483 op1 = TREE_OPERAND (t, 1);
10484 tem = fold_binary (code, type, op0, op1);
10485 return tem ? tem : expr;
10486 case 3:
10487 op0 = TREE_OPERAND (t, 0);
10488 op1 = TREE_OPERAND (t, 1);
10489 op2 = TREE_OPERAND (t, 2);
10490 tem = fold_ternary (code, type, op0, op1, op2);
10491 return tem ? tem : expr;
10492 default:
10493 break;
10497 switch (code)
10499 case CONST_DECL:
10500 return fold (DECL_INITIAL (t));
10502 default:
10503 return t;
10504 } /* switch (code) */
10507 #ifdef ENABLE_FOLD_CHECKING
10508 #undef fold
10510 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10511 static void fold_check_failed (tree, tree);
10512 void print_fold_checksum (tree);
10514 /* When --enable-checking=fold, compute a digest of expr before
10515 and after actual fold call to see if fold did not accidentally
10516 change original expr. */
10518 tree
10519 fold (tree expr)
10521 tree ret;
10522 struct md5_ctx ctx;
10523 unsigned char checksum_before[16], checksum_after[16];
10524 htab_t ht;
10526 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10527 md5_init_ctx (&ctx);
10528 fold_checksum_tree (expr, &ctx, ht);
10529 md5_finish_ctx (&ctx, checksum_before);
10530 htab_empty (ht);
10532 ret = fold_1 (expr);
10534 md5_init_ctx (&ctx);
10535 fold_checksum_tree (expr, &ctx, ht);
10536 md5_finish_ctx (&ctx, checksum_after);
10537 htab_delete (ht);
10539 if (memcmp (checksum_before, checksum_after, 16))
10540 fold_check_failed (expr, ret);
10542 return ret;
10545 void
10546 print_fold_checksum (tree expr)
10548 struct md5_ctx ctx;
10549 unsigned char checksum[16], cnt;
10550 htab_t ht;
10552 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10553 md5_init_ctx (&ctx);
10554 fold_checksum_tree (expr, &ctx, ht);
10555 md5_finish_ctx (&ctx, checksum);
10556 htab_delete (ht);
10557 for (cnt = 0; cnt < 16; ++cnt)
10558 fprintf (stderr, "%02x", checksum[cnt]);
10559 putc ('\n', stderr);
10562 static void
10563 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10565 internal_error ("fold check: original tree changed by fold");
10568 static void
10569 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10571 void **slot;
10572 enum tree_code code;
10573 struct tree_function_decl buf;
10574 int i, len;
10576 recursive_label:
10578 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10579 <= sizeof (struct tree_function_decl))
10580 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
10581 if (expr == NULL)
10582 return;
10583 slot = htab_find_slot (ht, expr, INSERT);
10584 if (*slot != NULL)
10585 return;
10586 *slot = expr;
10587 code = TREE_CODE (expr);
10588 if (TREE_CODE_CLASS (code) == tcc_declaration
10589 && DECL_ASSEMBLER_NAME_SET_P (expr))
10591 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10592 memcpy ((char *) &buf, expr, tree_size (expr));
10593 expr = (tree) &buf;
10594 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10596 else if (TREE_CODE_CLASS (code) == tcc_type
10597 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10598 || TYPE_CACHED_VALUES_P (expr)
10599 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10601 /* Allow these fields to be modified. */
10602 memcpy ((char *) &buf, expr, tree_size (expr));
10603 expr = (tree) &buf;
10604 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10605 TYPE_POINTER_TO (expr) = NULL;
10606 TYPE_REFERENCE_TO (expr) = NULL;
10607 if (TYPE_CACHED_VALUES_P (expr))
10609 TYPE_CACHED_VALUES_P (expr) = 0;
10610 TYPE_CACHED_VALUES (expr) = NULL;
10613 md5_process_bytes (expr, tree_size (expr), ctx);
10614 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10615 if (TREE_CODE_CLASS (code) != tcc_type
10616 && TREE_CODE_CLASS (code) != tcc_declaration
10617 && code != TREE_LIST)
10618 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10619 switch (TREE_CODE_CLASS (code))
10621 case tcc_constant:
10622 switch (code)
10624 case STRING_CST:
10625 md5_process_bytes (TREE_STRING_POINTER (expr),
10626 TREE_STRING_LENGTH (expr), ctx);
10627 break;
10628 case COMPLEX_CST:
10629 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10630 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10631 break;
10632 case VECTOR_CST:
10633 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10634 break;
10635 default:
10636 break;
10638 break;
10639 case tcc_exceptional:
10640 switch (code)
10642 case TREE_LIST:
10643 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10644 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10645 expr = TREE_CHAIN (expr);
10646 goto recursive_label;
10647 break;
10648 case TREE_VEC:
10649 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10650 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10651 break;
10652 default:
10653 break;
10655 break;
10656 case tcc_expression:
10657 case tcc_reference:
10658 case tcc_comparison:
10659 case tcc_unary:
10660 case tcc_binary:
10661 case tcc_statement:
10662 len = TREE_CODE_LENGTH (code);
10663 for (i = 0; i < len; ++i)
10664 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10665 break;
10666 case tcc_declaration:
10667 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10668 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10669 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
10671 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10672 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10673 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10674 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10675 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10677 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
10678 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10680 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
10682 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10683 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10684 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
10686 break;
10687 case tcc_type:
10688 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10689 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10690 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10691 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10692 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10693 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10694 if (INTEGRAL_TYPE_P (expr)
10695 || SCALAR_FLOAT_TYPE_P (expr))
10697 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10698 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10700 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10701 if (TREE_CODE (expr) == RECORD_TYPE
10702 || TREE_CODE (expr) == UNION_TYPE
10703 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10704 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10705 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10706 break;
10707 default:
10708 break;
10712 #endif
10714 /* Fold a unary tree expression with code CODE of type TYPE with an
10715 operand OP0. Return a folded expression if successful. Otherwise,
10716 return a tree expression with code CODE of type TYPE with an
10717 operand OP0. */
10719 tree
10720 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
10722 tree tem;
10723 #ifdef ENABLE_FOLD_CHECKING
10724 unsigned char checksum_before[16], checksum_after[16];
10725 struct md5_ctx ctx;
10726 htab_t ht;
10728 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10729 md5_init_ctx (&ctx);
10730 fold_checksum_tree (op0, &ctx, ht);
10731 md5_finish_ctx (&ctx, checksum_before);
10732 htab_empty (ht);
10733 #endif
10735 tem = fold_unary (code, type, op0);
10736 if (!tem)
10737 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
10739 #ifdef ENABLE_FOLD_CHECKING
10740 md5_init_ctx (&ctx);
10741 fold_checksum_tree (op0, &ctx, ht);
10742 md5_finish_ctx (&ctx, checksum_after);
10743 htab_delete (ht);
10745 if (memcmp (checksum_before, checksum_after, 16))
10746 fold_check_failed (op0, tem);
10747 #endif
10748 return tem;
10751 /* Fold a binary tree expression with code CODE of type TYPE with
10752 operands OP0 and OP1. Return a folded expression if successful.
10753 Otherwise, return a tree expression with code CODE of type TYPE
10754 with operands OP0 and OP1. */
10756 tree
10757 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
10758 MEM_STAT_DECL)
10760 tree tem;
10761 #ifdef ENABLE_FOLD_CHECKING
10762 unsigned char checksum_before_op0[16],
10763 checksum_before_op1[16],
10764 checksum_after_op0[16],
10765 checksum_after_op1[16];
10766 struct md5_ctx ctx;
10767 htab_t ht;
10769 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10770 md5_init_ctx (&ctx);
10771 fold_checksum_tree (op0, &ctx, ht);
10772 md5_finish_ctx (&ctx, checksum_before_op0);
10773 htab_empty (ht);
10775 md5_init_ctx (&ctx);
10776 fold_checksum_tree (op1, &ctx, ht);
10777 md5_finish_ctx (&ctx, checksum_before_op1);
10778 htab_empty (ht);
10779 #endif
10781 tem = fold_binary (code, type, op0, op1);
10782 if (!tem)
10783 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
10785 #ifdef ENABLE_FOLD_CHECKING
10786 md5_init_ctx (&ctx);
10787 fold_checksum_tree (op0, &ctx, ht);
10788 md5_finish_ctx (&ctx, checksum_after_op0);
10789 htab_empty (ht);
10791 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10792 fold_check_failed (op0, tem);
10794 md5_init_ctx (&ctx);
10795 fold_checksum_tree (op1, &ctx, ht);
10796 md5_finish_ctx (&ctx, checksum_after_op1);
10797 htab_delete (ht);
10799 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10800 fold_check_failed (op1, tem);
10801 #endif
10802 return tem;
10805 /* Fold a ternary tree expression with code CODE of type TYPE with
10806 operands OP0, OP1, and OP2. Return a folded expression if
10807 successful. Otherwise, return a tree expression with code CODE of
10808 type TYPE with operands OP0, OP1, and OP2. */
10810 tree
10811 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
10812 MEM_STAT_DECL)
10814 tree tem;
10815 #ifdef ENABLE_FOLD_CHECKING
10816 unsigned char checksum_before_op0[16],
10817 checksum_before_op1[16],
10818 checksum_before_op2[16],
10819 checksum_after_op0[16],
10820 checksum_after_op1[16],
10821 checksum_after_op2[16];
10822 struct md5_ctx ctx;
10823 htab_t ht;
10825 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10826 md5_init_ctx (&ctx);
10827 fold_checksum_tree (op0, &ctx, ht);
10828 md5_finish_ctx (&ctx, checksum_before_op0);
10829 htab_empty (ht);
10831 md5_init_ctx (&ctx);
10832 fold_checksum_tree (op1, &ctx, ht);
10833 md5_finish_ctx (&ctx, checksum_before_op1);
10834 htab_empty (ht);
10836 md5_init_ctx (&ctx);
10837 fold_checksum_tree (op2, &ctx, ht);
10838 md5_finish_ctx (&ctx, checksum_before_op2);
10839 htab_empty (ht);
10840 #endif
10842 tem = fold_ternary (code, type, op0, op1, op2);
10843 if (!tem)
10844 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
10846 #ifdef ENABLE_FOLD_CHECKING
10847 md5_init_ctx (&ctx);
10848 fold_checksum_tree (op0, &ctx, ht);
10849 md5_finish_ctx (&ctx, checksum_after_op0);
10850 htab_empty (ht);
10852 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10853 fold_check_failed (op0, tem);
10855 md5_init_ctx (&ctx);
10856 fold_checksum_tree (op1, &ctx, ht);
10857 md5_finish_ctx (&ctx, checksum_after_op1);
10858 htab_empty (ht);
10860 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10861 fold_check_failed (op1, tem);
10863 md5_init_ctx (&ctx);
10864 fold_checksum_tree (op2, &ctx, ht);
10865 md5_finish_ctx (&ctx, checksum_after_op2);
10866 htab_delete (ht);
10868 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
10869 fold_check_failed (op2, tem);
10870 #endif
10871 return tem;
10874 /* Perform constant folding and related simplification of initializer
10875 expression EXPR. These behave identically to "fold_buildN" but ignore
10876 potential run-time traps and exceptions that fold must preserve. */
10878 #define START_FOLD_INIT \
10879 int saved_signaling_nans = flag_signaling_nans;\
10880 int saved_trapping_math = flag_trapping_math;\
10881 int saved_rounding_math = flag_rounding_math;\
10882 int saved_trapv = flag_trapv;\
10883 flag_signaling_nans = 0;\
10884 flag_trapping_math = 0;\
10885 flag_rounding_math = 0;\
10886 flag_trapv = 0
10888 #define END_FOLD_INIT \
10889 flag_signaling_nans = saved_signaling_nans;\
10890 flag_trapping_math = saved_trapping_math;\
10891 flag_rounding_math = saved_rounding_math;\
10892 flag_trapv = saved_trapv
10894 tree
10895 fold_build1_initializer (enum tree_code code, tree type, tree op)
10897 tree result;
10898 START_FOLD_INIT;
10900 result = fold_build1 (code, type, op);
10902 END_FOLD_INIT;
10903 return result;
10906 tree
10907 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
10909 tree result;
10910 START_FOLD_INIT;
10912 result = fold_build2 (code, type, op0, op1);
10914 END_FOLD_INIT;
10915 return result;
10918 tree
10919 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
10920 tree op2)
10922 tree result;
10923 START_FOLD_INIT;
10925 result = fold_build3 (code, type, op0, op1, op2);
10927 END_FOLD_INIT;
10928 return result;
10931 #undef START_FOLD_INIT
10932 #undef END_FOLD_INIT
10934 /* Determine if first argument is a multiple of second argument. Return 0 if
10935 it is not, or we cannot easily determined it to be.
10937 An example of the sort of thing we care about (at this point; this routine
10938 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10939 fold cases do now) is discovering that
10941 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10943 is a multiple of
10945 SAVE_EXPR (J * 8)
10947 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10949 This code also handles discovering that
10951 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10953 is a multiple of 8 so we don't have to worry about dealing with a
10954 possible remainder.
10956 Note that we *look* inside a SAVE_EXPR only to determine how it was
10957 calculated; it is not safe for fold to do much of anything else with the
10958 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10959 at run time. For example, the latter example above *cannot* be implemented
10960 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10961 evaluation time of the original SAVE_EXPR is not necessarily the same at
10962 the time the new expression is evaluated. The only optimization of this
10963 sort that would be valid is changing
10965 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10967 divided by 8 to
10969 SAVE_EXPR (I) * SAVE_EXPR (J)
10971 (where the same SAVE_EXPR (J) is used in the original and the
10972 transformed version). */
10974 static int
10975 multiple_of_p (tree type, tree top, tree bottom)
10977 if (operand_equal_p (top, bottom, 0))
10978 return 1;
10980 if (TREE_CODE (type) != INTEGER_TYPE)
10981 return 0;
10983 switch (TREE_CODE (top))
10985 case BIT_AND_EXPR:
10986 /* Bitwise and provides a power of two multiple. If the mask is
10987 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10988 if (!integer_pow2p (bottom))
10989 return 0;
10990 /* FALLTHRU */
10992 case MULT_EXPR:
10993 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10994 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10996 case PLUS_EXPR:
10997 case MINUS_EXPR:
10998 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10999 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11001 case LSHIFT_EXPR:
11002 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
11004 tree op1, t1;
11006 op1 = TREE_OPERAND (top, 1);
11007 /* const_binop may not detect overflow correctly,
11008 so check for it explicitly here. */
11009 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
11010 > TREE_INT_CST_LOW (op1)
11011 && TREE_INT_CST_HIGH (op1) == 0
11012 && 0 != (t1 = fold_convert (type,
11013 const_binop (LSHIFT_EXPR,
11014 size_one_node,
11015 op1, 0)))
11016 && ! TREE_OVERFLOW (t1))
11017 return multiple_of_p (type, t1, bottom);
11019 return 0;
11021 case NOP_EXPR:
11022 /* Can't handle conversions from non-integral or wider integral type. */
11023 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
11024 || (TYPE_PRECISION (type)
11025 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
11026 return 0;
11028 /* .. fall through ... */
11030 case SAVE_EXPR:
11031 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
11033 case INTEGER_CST:
11034 if (TREE_CODE (bottom) != INTEGER_CST
11035 || (TYPE_UNSIGNED (type)
11036 && (tree_int_cst_sgn (top) < 0
11037 || tree_int_cst_sgn (bottom) < 0)))
11038 return 0;
11039 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
11040 top, bottom, 0));
11042 default:
11043 return 0;
11047 /* Return true if `t' is known to be non-negative. */
11050 tree_expr_nonnegative_p (tree t)
11052 if (TYPE_UNSIGNED (TREE_TYPE (t)))
11053 return 1;
11055 switch (TREE_CODE (t))
11057 case ABS_EXPR:
11058 /* We can't return 1 if flag_wrapv is set because
11059 ABS_EXPR<INT_MIN> = INT_MIN. */
11060 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
11061 return 1;
11062 break;
11064 case INTEGER_CST:
11065 return tree_int_cst_sgn (t) >= 0;
11067 case REAL_CST:
11068 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
11070 case PLUS_EXPR:
11071 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11072 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11073 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11075 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
11076 both unsigned and at least 2 bits shorter than the result. */
11077 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11078 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11079 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11081 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11082 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11083 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11084 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11086 unsigned int prec = MAX (TYPE_PRECISION (inner1),
11087 TYPE_PRECISION (inner2)) + 1;
11088 return prec < TYPE_PRECISION (TREE_TYPE (t));
11091 break;
11093 case MULT_EXPR:
11094 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11096 /* x * x for floating point x is always non-negative. */
11097 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
11098 return 1;
11099 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11100 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11103 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
11104 both unsigned and their total bits is shorter than the result. */
11105 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11106 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11107 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11109 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11110 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11111 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11112 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11113 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
11114 < TYPE_PRECISION (TREE_TYPE (t));
11116 return 0;
11118 case BIT_AND_EXPR:
11119 case MAX_EXPR:
11120 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11121 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11123 case BIT_IOR_EXPR:
11124 case BIT_XOR_EXPR:
11125 case MIN_EXPR:
11126 case RDIV_EXPR:
11127 case TRUNC_DIV_EXPR:
11128 case CEIL_DIV_EXPR:
11129 case FLOOR_DIV_EXPR:
11130 case ROUND_DIV_EXPR:
11131 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11132 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11134 case TRUNC_MOD_EXPR:
11135 case CEIL_MOD_EXPR:
11136 case FLOOR_MOD_EXPR:
11137 case ROUND_MOD_EXPR:
11138 case SAVE_EXPR:
11139 case NON_LVALUE_EXPR:
11140 case FLOAT_EXPR:
11141 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11143 case COMPOUND_EXPR:
11144 case MODIFY_EXPR:
11145 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11147 case BIND_EXPR:
11148 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
11150 case COND_EXPR:
11151 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
11152 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
11154 case NOP_EXPR:
11156 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11157 tree outer_type = TREE_TYPE (t);
11159 if (TREE_CODE (outer_type) == REAL_TYPE)
11161 if (TREE_CODE (inner_type) == REAL_TYPE)
11162 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11163 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11165 if (TYPE_UNSIGNED (inner_type))
11166 return 1;
11167 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11170 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
11172 if (TREE_CODE (inner_type) == REAL_TYPE)
11173 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
11174 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11175 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
11176 && TYPE_UNSIGNED (inner_type);
11179 break;
11181 case TARGET_EXPR:
11183 tree temp = TARGET_EXPR_SLOT (t);
11184 t = TARGET_EXPR_INITIAL (t);
11186 /* If the initializer is non-void, then it's a normal expression
11187 that will be assigned to the slot. */
11188 if (!VOID_TYPE_P (t))
11189 return tree_expr_nonnegative_p (t);
11191 /* Otherwise, the initializer sets the slot in some way. One common
11192 way is an assignment statement at the end of the initializer. */
11193 while (1)
11195 if (TREE_CODE (t) == BIND_EXPR)
11196 t = expr_last (BIND_EXPR_BODY (t));
11197 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
11198 || TREE_CODE (t) == TRY_CATCH_EXPR)
11199 t = expr_last (TREE_OPERAND (t, 0));
11200 else if (TREE_CODE (t) == STATEMENT_LIST)
11201 t = expr_last (t);
11202 else
11203 break;
11205 if (TREE_CODE (t) == MODIFY_EXPR
11206 && TREE_OPERAND (t, 0) == temp)
11207 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11209 return 0;
11212 case CALL_EXPR:
11214 tree fndecl = get_callee_fndecl (t);
11215 tree arglist = TREE_OPERAND (t, 1);
11216 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
11217 switch (DECL_FUNCTION_CODE (fndecl))
11219 CASE_FLT_FN (BUILT_IN_ACOS):
11220 CASE_FLT_FN (BUILT_IN_ACOSH):
11221 CASE_FLT_FN (BUILT_IN_CABS):
11222 CASE_FLT_FN (BUILT_IN_COSH):
11223 CASE_FLT_FN (BUILT_IN_ERFC):
11224 CASE_FLT_FN (BUILT_IN_EXP):
11225 CASE_FLT_FN (BUILT_IN_EXP10):
11226 CASE_FLT_FN (BUILT_IN_EXP2):
11227 CASE_FLT_FN (BUILT_IN_FABS):
11228 CASE_FLT_FN (BUILT_IN_FDIM):
11229 CASE_FLT_FN (BUILT_IN_HYPOT):
11230 CASE_FLT_FN (BUILT_IN_POW10):
11231 CASE_INT_FN (BUILT_IN_FFS):
11232 CASE_INT_FN (BUILT_IN_PARITY):
11233 CASE_INT_FN (BUILT_IN_POPCOUNT):
11234 /* Always true. */
11235 return 1;
11237 CASE_FLT_FN (BUILT_IN_SQRT):
11238 /* sqrt(-0.0) is -0.0. */
11239 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
11240 return 1;
11241 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11243 CASE_FLT_FN (BUILT_IN_ASINH):
11244 CASE_FLT_FN (BUILT_IN_ATAN):
11245 CASE_FLT_FN (BUILT_IN_ATANH):
11246 CASE_FLT_FN (BUILT_IN_CBRT):
11247 CASE_FLT_FN (BUILT_IN_CEIL):
11248 CASE_FLT_FN (BUILT_IN_ERF):
11249 CASE_FLT_FN (BUILT_IN_EXPM1):
11250 CASE_FLT_FN (BUILT_IN_FLOOR):
11251 CASE_FLT_FN (BUILT_IN_FMOD):
11252 CASE_FLT_FN (BUILT_IN_FREXP):
11253 CASE_FLT_FN (BUILT_IN_LCEIL):
11254 CASE_FLT_FN (BUILT_IN_LDEXP):
11255 CASE_FLT_FN (BUILT_IN_LFLOOR):
11256 CASE_FLT_FN (BUILT_IN_LLCEIL):
11257 CASE_FLT_FN (BUILT_IN_LLFLOOR):
11258 CASE_FLT_FN (BUILT_IN_LLRINT):
11259 CASE_FLT_FN (BUILT_IN_LLROUND):
11260 CASE_FLT_FN (BUILT_IN_LRINT):
11261 CASE_FLT_FN (BUILT_IN_LROUND):
11262 CASE_FLT_FN (BUILT_IN_MODF):
11263 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11264 CASE_FLT_FN (BUILT_IN_POW):
11265 CASE_FLT_FN (BUILT_IN_RINT):
11266 CASE_FLT_FN (BUILT_IN_ROUND):
11267 CASE_FLT_FN (BUILT_IN_SIGNBIT):
11268 CASE_FLT_FN (BUILT_IN_SINH):
11269 CASE_FLT_FN (BUILT_IN_TANH):
11270 CASE_FLT_FN (BUILT_IN_TRUNC):
11271 /* True if the 1st argument is nonnegative. */
11272 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11274 CASE_FLT_FN (BUILT_IN_FMAX):
11275 /* True if the 1st OR 2nd arguments are nonnegative. */
11276 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11277 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11279 CASE_FLT_FN (BUILT_IN_FMIN):
11280 /* True if the 1st AND 2nd arguments are nonnegative. */
11281 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11282 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11284 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11285 /* True if the 2nd argument is nonnegative. */
11286 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11288 default:
11289 break;
11293 /* ... fall through ... */
11295 default:
11296 if (truth_value_p (TREE_CODE (t)))
11297 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11298 return 1;
11301 /* We don't know sign of `t', so be conservative and return false. */
11302 return 0;
11305 /* Return true when T is an address and is known to be nonzero.
11306 For floating point we further ensure that T is not denormal.
11307 Similar logic is present in nonzero_address in rtlanal.h. */
11309 bool
11310 tree_expr_nonzero_p (tree t)
11312 tree type = TREE_TYPE (t);
11314 /* Doing something useful for floating point would need more work. */
11315 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11316 return false;
11318 switch (TREE_CODE (t))
11320 case ABS_EXPR:
11321 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11323 case INTEGER_CST:
11324 /* We used to test for !integer_zerop here. This does not work correctly
11325 if TREE_CONSTANT_OVERFLOW (t). */
11326 return (TREE_INT_CST_LOW (t) != 0
11327 || TREE_INT_CST_HIGH (t) != 0);
11329 case PLUS_EXPR:
11330 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11332 /* With the presence of negative values it is hard
11333 to say something. */
11334 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11335 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11336 return false;
11337 /* One of operands must be positive and the other non-negative. */
11338 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11339 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11341 break;
11343 case MULT_EXPR:
11344 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11346 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11347 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11349 break;
11351 case NOP_EXPR:
11353 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11354 tree outer_type = TREE_TYPE (t);
11356 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
11357 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11359 break;
11361 case ADDR_EXPR:
11363 tree base = get_base_address (TREE_OPERAND (t, 0));
11365 if (!base)
11366 return false;
11368 /* Weak declarations may link to NULL. */
11369 if (VAR_OR_FUNCTION_DECL_P (base))
11370 return !DECL_WEAK (base);
11372 /* Constants are never weak. */
11373 if (CONSTANT_CLASS_P (base))
11374 return true;
11376 return false;
11379 case COND_EXPR:
11380 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11381 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11383 case MIN_EXPR:
11384 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11385 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11387 case MAX_EXPR:
11388 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11390 /* When both operands are nonzero, then MAX must be too. */
11391 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11392 return true;
11394 /* MAX where operand 0 is positive is positive. */
11395 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11397 /* MAX where operand 1 is positive is positive. */
11398 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11399 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11400 return true;
11401 break;
11403 case COMPOUND_EXPR:
11404 case MODIFY_EXPR:
11405 case BIND_EXPR:
11406 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11408 case SAVE_EXPR:
11409 case NON_LVALUE_EXPR:
11410 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11412 case BIT_IOR_EXPR:
11413 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11414 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11416 case CALL_EXPR:
11417 return alloca_call_p (t);
11419 default:
11420 break;
11422 return false;
11425 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11426 attempt to fold the expression to a constant without modifying TYPE,
11427 OP0 or OP1.
11429 If the expression could be simplified to a constant, then return
11430 the constant. If the expression would not be simplified to a
11431 constant, then return NULL_TREE. */
11433 tree
11434 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11436 tree tem = fold_binary (code, type, op0, op1);
11437 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11440 /* Given the components of a unary expression CODE, TYPE and OP0,
11441 attempt to fold the expression to a constant without modifying
11442 TYPE or OP0.
11444 If the expression could be simplified to a constant, then return
11445 the constant. If the expression would not be simplified to a
11446 constant, then return NULL_TREE. */
11448 tree
11449 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11451 tree tem = fold_unary (code, type, op0);
11452 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11455 /* If EXP represents referencing an element in a constant string
11456 (either via pointer arithmetic or array indexing), return the
11457 tree representing the value accessed, otherwise return NULL. */
11459 tree
11460 fold_read_from_constant_string (tree exp)
11462 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11464 tree exp1 = TREE_OPERAND (exp, 0);
11465 tree index;
11466 tree string;
11468 if (TREE_CODE (exp) == INDIRECT_REF)
11469 string = string_constant (exp1, &index);
11470 else
11472 tree low_bound = array_ref_low_bound (exp);
11473 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11475 /* Optimize the special-case of a zero lower bound.
11477 We convert the low_bound to sizetype to avoid some problems
11478 with constant folding. (E.g. suppose the lower bound is 1,
11479 and its mode is QI. Without the conversion,l (ARRAY
11480 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11481 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11482 if (! integer_zerop (low_bound))
11483 index = size_diffop (index, fold_convert (sizetype, low_bound));
11485 string = exp1;
11488 if (string
11489 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11490 && TREE_CODE (string) == STRING_CST
11491 && TREE_CODE (index) == INTEGER_CST
11492 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11493 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11494 == MODE_INT)
11495 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11496 return fold_convert (TREE_TYPE (exp),
11497 build_int_cst (NULL_TREE,
11498 (TREE_STRING_POINTER (string)
11499 [TREE_INT_CST_LOW (index)])));
11501 return NULL;
11504 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11505 an integer constant or real constant.
11507 TYPE is the type of the result. */
11509 static tree
11510 fold_negate_const (tree arg0, tree type)
11512 tree t = NULL_TREE;
11514 switch (TREE_CODE (arg0))
11516 case INTEGER_CST:
11518 unsigned HOST_WIDE_INT low;
11519 HOST_WIDE_INT high;
11520 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11521 TREE_INT_CST_HIGH (arg0),
11522 &low, &high);
11523 t = build_int_cst_wide (type, low, high);
11524 t = force_fit_type (t, 1,
11525 (overflow | TREE_OVERFLOW (arg0))
11526 && !TYPE_UNSIGNED (type),
11527 TREE_CONSTANT_OVERFLOW (arg0));
11528 break;
11531 case REAL_CST:
11532 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11533 break;
11535 default:
11536 gcc_unreachable ();
11539 return t;
11542 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11543 an integer constant or real constant.
11545 TYPE is the type of the result. */
11547 tree
11548 fold_abs_const (tree arg0, tree type)
11550 tree t = NULL_TREE;
11552 switch (TREE_CODE (arg0))
11554 case INTEGER_CST:
11555 /* If the value is unsigned, then the absolute value is
11556 the same as the ordinary value. */
11557 if (TYPE_UNSIGNED (type))
11558 t = arg0;
11559 /* Similarly, if the value is non-negative. */
11560 else if (INT_CST_LT (integer_minus_one_node, arg0))
11561 t = arg0;
11562 /* If the value is negative, then the absolute value is
11563 its negation. */
11564 else
11566 unsigned HOST_WIDE_INT low;
11567 HOST_WIDE_INT high;
11568 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11569 TREE_INT_CST_HIGH (arg0),
11570 &low, &high);
11571 t = build_int_cst_wide (type, low, high);
11572 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11573 TREE_CONSTANT_OVERFLOW (arg0));
11575 break;
11577 case REAL_CST:
11578 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11579 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11580 else
11581 t = arg0;
11582 break;
11584 default:
11585 gcc_unreachable ();
11588 return t;
11591 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11592 constant. TYPE is the type of the result. */
11594 static tree
11595 fold_not_const (tree arg0, tree type)
11597 tree t = NULL_TREE;
11599 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11601 t = build_int_cst_wide (type,
11602 ~ TREE_INT_CST_LOW (arg0),
11603 ~ TREE_INT_CST_HIGH (arg0));
11604 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11605 TREE_CONSTANT_OVERFLOW (arg0));
11607 return t;
11610 /* Given CODE, a relational operator, the target type, TYPE and two
11611 constant operands OP0 and OP1, return the result of the
11612 relational operation. If the result is not a compile time
11613 constant, then return NULL_TREE. */
11615 static tree
11616 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11618 int result, invert;
11620 /* From here on, the only cases we handle are when the result is
11621 known to be a constant. */
11623 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11625 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11626 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11628 /* Handle the cases where either operand is a NaN. */
11629 if (real_isnan (c0) || real_isnan (c1))
11631 switch (code)
11633 case EQ_EXPR:
11634 case ORDERED_EXPR:
11635 result = 0;
11636 break;
11638 case NE_EXPR:
11639 case UNORDERED_EXPR:
11640 case UNLT_EXPR:
11641 case UNLE_EXPR:
11642 case UNGT_EXPR:
11643 case UNGE_EXPR:
11644 case UNEQ_EXPR:
11645 result = 1;
11646 break;
11648 case LT_EXPR:
11649 case LE_EXPR:
11650 case GT_EXPR:
11651 case GE_EXPR:
11652 case LTGT_EXPR:
11653 if (flag_trapping_math)
11654 return NULL_TREE;
11655 result = 0;
11656 break;
11658 default:
11659 gcc_unreachable ();
11662 return constant_boolean_node (result, type);
11665 return constant_boolean_node (real_compare (code, c0, c1), type);
11668 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11670 To compute GT, swap the arguments and do LT.
11671 To compute GE, do LT and invert the result.
11672 To compute LE, swap the arguments, do LT and invert the result.
11673 To compute NE, do EQ and invert the result.
11675 Therefore, the code below must handle only EQ and LT. */
11677 if (code == LE_EXPR || code == GT_EXPR)
11679 tree tem = op0;
11680 op0 = op1;
11681 op1 = tem;
11682 code = swap_tree_comparison (code);
11685 /* Note that it is safe to invert for real values here because we
11686 have already handled the one case that it matters. */
11688 invert = 0;
11689 if (code == NE_EXPR || code == GE_EXPR)
11691 invert = 1;
11692 code = invert_tree_comparison (code, false);
11695 /* Compute a result for LT or EQ if args permit;
11696 Otherwise return T. */
11697 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11699 if (code == EQ_EXPR)
11700 result = tree_int_cst_equal (op0, op1);
11701 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11702 result = INT_CST_LT_UNSIGNED (op0, op1);
11703 else
11704 result = INT_CST_LT (op0, op1);
11706 else
11707 return NULL_TREE;
11709 if (invert)
11710 result ^= 1;
11711 return constant_boolean_node (result, type);
11714 /* Build an expression for the a clean point containing EXPR with type TYPE.
11715 Don't build a cleanup point expression for EXPR which don't have side
11716 effects. */
11718 tree
11719 fold_build_cleanup_point_expr (tree type, tree expr)
11721 /* If the expression does not have side effects then we don't have to wrap
11722 it with a cleanup point expression. */
11723 if (!TREE_SIDE_EFFECTS (expr))
11724 return expr;
11726 /* If the expression is a return, check to see if the expression inside the
11727 return has no side effects or the right hand side of the modify expression
11728 inside the return. If either don't have side effects set we don't need to
11729 wrap the expression in a cleanup point expression. Note we don't check the
11730 left hand side of the modify because it should always be a return decl. */
11731 if (TREE_CODE (expr) == RETURN_EXPR)
11733 tree op = TREE_OPERAND (expr, 0);
11734 if (!op || !TREE_SIDE_EFFECTS (op))
11735 return expr;
11736 op = TREE_OPERAND (op, 1);
11737 if (!TREE_SIDE_EFFECTS (op))
11738 return expr;
11741 return build1 (CLEANUP_POINT_EXPR, type, expr);
11744 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11745 avoid confusing the gimplify process. */
11747 tree
11748 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11750 /* The size of the object is not relevant when talking about its address. */
11751 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11752 t = TREE_OPERAND (t, 0);
11754 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11755 if (TREE_CODE (t) == INDIRECT_REF
11756 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11758 t = TREE_OPERAND (t, 0);
11759 if (TREE_TYPE (t) != ptrtype)
11760 t = build1 (NOP_EXPR, ptrtype, t);
11762 else
11764 tree base = t;
11766 while (handled_component_p (base))
11767 base = TREE_OPERAND (base, 0);
11768 if (DECL_P (base))
11769 TREE_ADDRESSABLE (base) = 1;
11771 t = build1 (ADDR_EXPR, ptrtype, t);
11774 return t;
11777 tree
11778 build_fold_addr_expr (tree t)
11780 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11783 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11784 of an indirection through OP0, or NULL_TREE if no simplification is
11785 possible. */
11787 tree
11788 fold_indirect_ref_1 (tree type, tree op0)
11790 tree sub = op0;
11791 tree subtype;
11793 STRIP_NOPS (sub);
11794 subtype = TREE_TYPE (sub);
11795 if (!POINTER_TYPE_P (subtype))
11796 return NULL_TREE;
11798 if (TREE_CODE (sub) == ADDR_EXPR)
11800 tree op = TREE_OPERAND (sub, 0);
11801 tree optype = TREE_TYPE (op);
11802 /* *&p => p; make sure to handle *&"str"[cst] here. */
11803 if (type == optype)
11805 tree fop = fold_read_from_constant_string (op);
11806 if (fop)
11807 return fop;
11808 else
11809 return op;
11811 /* *(foo *)&fooarray => fooarray[0] */
11812 else if (TREE_CODE (optype) == ARRAY_TYPE
11813 && type == TREE_TYPE (optype))
11815 tree type_domain = TYPE_DOMAIN (optype);
11816 tree min_val = size_zero_node;
11817 if (type_domain && TYPE_MIN_VALUE (type_domain))
11818 min_val = TYPE_MIN_VALUE (type_domain);
11819 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11821 /* *(foo *)&complexfoo => __real__ complexfoo */
11822 else if (TREE_CODE (optype) == COMPLEX_TYPE
11823 && type == TREE_TYPE (optype))
11824 return fold_build1 (REALPART_EXPR, type, op);
11827 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
11828 if (TREE_CODE (sub) == PLUS_EXPR
11829 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
11831 tree op00 = TREE_OPERAND (sub, 0);
11832 tree op01 = TREE_OPERAND (sub, 1);
11833 tree op00type;
11835 STRIP_NOPS (op00);
11836 op00type = TREE_TYPE (op00);
11837 if (TREE_CODE (op00) == ADDR_EXPR
11838 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
11839 && type == TREE_TYPE (TREE_TYPE (op00type)))
11841 tree size = TYPE_SIZE_UNIT (type);
11842 if (tree_int_cst_equal (size, op01))
11843 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
11847 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11848 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11849 && type == TREE_TYPE (TREE_TYPE (subtype)))
11851 tree type_domain;
11852 tree min_val = size_zero_node;
11853 sub = build_fold_indirect_ref (sub);
11854 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11855 if (type_domain && TYPE_MIN_VALUE (type_domain))
11856 min_val = TYPE_MIN_VALUE (type_domain);
11857 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11860 return NULL_TREE;
11863 /* Builds an expression for an indirection through T, simplifying some
11864 cases. */
11866 tree
11867 build_fold_indirect_ref (tree t)
11869 tree type = TREE_TYPE (TREE_TYPE (t));
11870 tree sub = fold_indirect_ref_1 (type, t);
11872 if (sub)
11873 return sub;
11874 else
11875 return build1 (INDIRECT_REF, type, t);
11878 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11880 tree
11881 fold_indirect_ref (tree t)
11883 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11885 if (sub)
11886 return sub;
11887 else
11888 return t;
11891 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11892 whose result is ignored. The type of the returned tree need not be
11893 the same as the original expression. */
11895 tree
11896 fold_ignored_result (tree t)
11898 if (!TREE_SIDE_EFFECTS (t))
11899 return integer_zero_node;
11901 for (;;)
11902 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11904 case tcc_unary:
11905 t = TREE_OPERAND (t, 0);
11906 break;
11908 case tcc_binary:
11909 case tcc_comparison:
11910 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11911 t = TREE_OPERAND (t, 0);
11912 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11913 t = TREE_OPERAND (t, 1);
11914 else
11915 return t;
11916 break;
11918 case tcc_expression:
11919 switch (TREE_CODE (t))
11921 case COMPOUND_EXPR:
11922 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11923 return t;
11924 t = TREE_OPERAND (t, 0);
11925 break;
11927 case COND_EXPR:
11928 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11929 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11930 return t;
11931 t = TREE_OPERAND (t, 0);
11932 break;
11934 default:
11935 return t;
11937 break;
11939 default:
11940 return t;
11944 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11945 This can only be applied to objects of a sizetype. */
11947 tree
11948 round_up (tree value, int divisor)
11950 tree div = NULL_TREE;
11952 gcc_assert (divisor > 0);
11953 if (divisor == 1)
11954 return value;
11956 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11957 have to do anything. Only do this when we are not given a const,
11958 because in that case, this check is more expensive than just
11959 doing it. */
11960 if (TREE_CODE (value) != INTEGER_CST)
11962 div = build_int_cst (TREE_TYPE (value), divisor);
11964 if (multiple_of_p (TREE_TYPE (value), value, div))
11965 return value;
11968 /* If divisor is a power of two, simplify this to bit manipulation. */
11969 if (divisor == (divisor & -divisor))
11971 tree t;
11973 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11974 value = size_binop (PLUS_EXPR, value, t);
11975 t = build_int_cst (TREE_TYPE (value), -divisor);
11976 value = size_binop (BIT_AND_EXPR, value, t);
11978 else
11980 if (!div)
11981 div = build_int_cst (TREE_TYPE (value), divisor);
11982 value = size_binop (CEIL_DIV_EXPR, value, div);
11983 value = size_binop (MULT_EXPR, value, div);
11986 return value;
11989 /* Likewise, but round down. */
11991 tree
11992 round_down (tree value, int divisor)
11994 tree div = NULL_TREE;
11996 gcc_assert (divisor > 0);
11997 if (divisor == 1)
11998 return value;
12000 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12001 have to do anything. Only do this when we are not given a const,
12002 because in that case, this check is more expensive than just
12003 doing it. */
12004 if (TREE_CODE (value) != INTEGER_CST)
12006 div = build_int_cst (TREE_TYPE (value), divisor);
12008 if (multiple_of_p (TREE_TYPE (value), value, div))
12009 return value;
12012 /* If divisor is a power of two, simplify this to bit manipulation. */
12013 if (divisor == (divisor & -divisor))
12015 tree t;
12017 t = build_int_cst (TREE_TYPE (value), -divisor);
12018 value = size_binop (BIT_AND_EXPR, value, t);
12020 else
12022 if (!div)
12023 div = build_int_cst (TREE_TYPE (value), divisor);
12024 value = size_binop (FLOOR_DIV_EXPR, value, div);
12025 value = size_binop (MULT_EXPR, value, div);
12028 return value;
12031 /* Returns the pointer to the base of the object addressed by EXP and
12032 extracts the information about the offset of the access, storing it
12033 to PBITPOS and POFFSET. */
12035 static tree
12036 split_address_to_core_and_offset (tree exp,
12037 HOST_WIDE_INT *pbitpos, tree *poffset)
12039 tree core;
12040 enum machine_mode mode;
12041 int unsignedp, volatilep;
12042 HOST_WIDE_INT bitsize;
12044 if (TREE_CODE (exp) == ADDR_EXPR)
12046 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
12047 poffset, &mode, &unsignedp, &volatilep,
12048 false);
12049 core = build_fold_addr_expr (core);
12051 else
12053 core = exp;
12054 *pbitpos = 0;
12055 *poffset = NULL_TREE;
12058 return core;
12061 /* Returns true if addresses of E1 and E2 differ by a constant, false
12062 otherwise. If they do, E1 - E2 is stored in *DIFF. */
12064 bool
12065 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
12067 tree core1, core2;
12068 HOST_WIDE_INT bitpos1, bitpos2;
12069 tree toffset1, toffset2, tdiff, type;
12071 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
12072 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
12074 if (bitpos1 % BITS_PER_UNIT != 0
12075 || bitpos2 % BITS_PER_UNIT != 0
12076 || !operand_equal_p (core1, core2, 0))
12077 return false;
12079 if (toffset1 && toffset2)
12081 type = TREE_TYPE (toffset1);
12082 if (type != TREE_TYPE (toffset2))
12083 toffset2 = fold_convert (type, toffset2);
12085 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
12086 if (!cst_and_fits_in_hwi (tdiff))
12087 return false;
12089 *diff = int_cst_value (tdiff);
12091 else if (toffset1 || toffset2)
12093 /* If only one of the offsets is non-constant, the difference cannot
12094 be a constant. */
12095 return false;
12097 else
12098 *diff = 0;
12100 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
12101 return true;
12104 /* Simplify the floating point expression EXP when the sign of the
12105 result is not significant. Return NULL_TREE if no simplification
12106 is possible. */
12108 tree
12109 fold_strip_sign_ops (tree exp)
12111 tree arg0, arg1;
12113 switch (TREE_CODE (exp))
12115 case ABS_EXPR:
12116 case NEGATE_EXPR:
12117 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12118 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
12120 case MULT_EXPR:
12121 case RDIV_EXPR:
12122 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
12123 return NULL_TREE;
12124 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12125 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
12126 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
12127 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
12128 arg0 ? arg0 : TREE_OPERAND (exp, 0),
12129 arg1 ? arg1 : TREE_OPERAND (exp, 1));
12130 break;
12132 default:
12133 break;
12135 return NULL_TREE;