* de.po: Update.
[official-gcc.git] / gcc / fold-const.c
blob7cab3c464beee53ecfb2793f55e8c1d637ecace6
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
106 tree *, tree *);
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
114 tree);
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
124 tree, tree,
125 tree, tree, int);
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
128 tree, tree, tree);
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
139 addition.
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
143 sign. */
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
151 #define LOWPART(x) \
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
161 static void
162 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
164 words[0] = LOWPART (low);
165 words[1] = HIGHPART (low);
166 words[2] = LOWPART (hi);
167 words[3] = HIGHPART (hi);
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
174 static void
175 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
176 HOST_WIDE_INT *hi)
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
197 tree
198 force_fit_type (tree t, int overflowable,
199 bool overflowed, bool overflowed_const)
201 unsigned HOST_WIDE_INT low;
202 HOST_WIDE_INT high;
203 unsigned int prec;
204 int sign_extended_type;
206 gcc_assert (TREE_CODE (t) == INTEGER_CST);
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
211 if (POINTER_TYPE_P (TREE_TYPE (t))
212 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
213 prec = POINTER_SIZE;
214 else
215 prec = TYPE_PRECISION (TREE_TYPE (t));
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
218 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 else
229 high = 0;
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 low &= ~((HOST_WIDE_INT) (-1) << prec);
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (high & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)low < 0)
248 high = -1;
250 else
252 /* Sign extend bottom half? */
253 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
255 high = -1;
256 low |= (HOST_WIDE_INT)(-1) << prec;
260 /* If the value changed, return a new node. */
261 if (overflowed || overflowed_const
262 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
264 t = build_int_cst_wide (TREE_TYPE (t), low, high);
266 if (overflowed
267 || overflowable < 0
268 || (overflowable > 0 && sign_extended_type))
270 t = copy_node (t);
271 TREE_OVERFLOW (t) = 1;
272 TREE_CONSTANT_OVERFLOW (t) = 1;
274 else if (overflowed_const)
276 t = copy_node (t);
277 TREE_CONSTANT_OVERFLOW (t) = 1;
281 return t;
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
291 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
292 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
294 unsigned HOST_WIDE_INT l;
295 HOST_WIDE_INT h;
297 l = l1 + l2;
298 h = h1 + h2 + (l < l1);
300 *lv = l;
301 *hv = h;
302 return OVERFLOW_SUM_SIGN (h1, h2, h);
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
314 if (l1 == 0)
316 *lv = 0;
317 *hv = - h1;
318 return (*hv & h1) < 0;
320 else
322 *lv = -l1;
323 *hv = ~h1;
324 return 0;
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
335 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
336 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
337 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
339 HOST_WIDE_INT arg1[4];
340 HOST_WIDE_INT arg2[4];
341 HOST_WIDE_INT prod[4 * 2];
342 unsigned HOST_WIDE_INT carry;
343 int i, j, k;
344 unsigned HOST_WIDE_INT toplow, neglow;
345 HOST_WIDE_INT tophigh, neghigh;
347 encode (arg1, l1, h1);
348 encode (arg2, l2, h2);
350 memset (prod, 0, sizeof prod);
352 for (i = 0; i < 4; i++)
354 carry = 0;
355 for (j = 0; j < 4; j++)
357 k = i + j;
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry += arg1[i] * arg2[j];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
361 carry += prod[k];
362 prod[k] = LOWPART (carry);
363 carry = HIGHPART (carry);
365 prod[i + 4] = carry;
368 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod + 4, &toplow, &tophigh);
373 if (h1 < 0)
375 neg_double (l2, h2, &neglow, &neghigh);
376 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
378 if (h2 < 0)
380 neg_double (l1, h1, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
392 void
393 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
394 HOST_WIDE_INT count, unsigned int prec,
395 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
397 unsigned HOST_WIDE_INT signmask;
399 if (count < 0)
401 rshift_double (l1, h1, -count, prec, lv, hv, arith);
402 return;
405 if (SHIFT_COUNT_TRUNCATED)
406 count %= prec;
408 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
412 *hv = 0;
413 *lv = 0;
415 else if (count >= HOST_BITS_PER_WIDE_INT)
417 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
418 *lv = 0;
420 else
422 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
423 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
424 *lv = l1 << count;
427 /* Sign extend all bits that are beyond the precision. */
429 signmask = -((prec > HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT) *hv
431 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
432 : (*lv >> (prec - 1))) & 1);
434 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
436 else if (prec >= HOST_BITS_PER_WIDE_INT)
438 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
439 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
441 else
443 *hv = signmask;
444 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
445 *lv |= signmask << prec;
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
454 void
455 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
456 HOST_WIDE_INT count, unsigned int prec,
457 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
458 int arith)
460 unsigned HOST_WIDE_INT signmask;
462 signmask = (arith
463 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
464 : 0);
466 if (SHIFT_COUNT_TRUNCATED)
467 count %= prec;
469 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
473 *hv = 0;
474 *lv = 0;
476 else if (count >= HOST_BITS_PER_WIDE_INT)
478 *hv = 0;
479 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
481 else
483 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
484 *lv = ((l1 >> count)
485 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
488 /* Zero / sign extend all bits that are beyond the precision. */
490 if (count >= (HOST_WIDE_INT)prec)
492 *hv = signmask;
493 *lv = signmask;
495 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
497 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
499 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
500 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
502 else
504 *hv = signmask;
505 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
506 *lv |= signmask << (prec - count);
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
515 void
516 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
517 HOST_WIDE_INT count, unsigned int prec,
518 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
520 unsigned HOST_WIDE_INT s1l, s2l;
521 HOST_WIDE_INT s1h, s2h;
523 count %= prec;
524 if (count < 0)
525 count += prec;
527 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
528 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
529 *lv = s1l | s2l;
530 *hv = s1h | s2h;
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
537 void
538 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
545 count %= prec;
546 if (count < 0)
547 count += prec;
549 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551 *lv = s1l | s2l;
552 *hv = s1h | s2h;
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
559 or EXACT_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
565 div_and_round_double (enum tree_code code, int uns,
566 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig,
568 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig,
570 unsigned HOST_WIDE_INT *lquo,
571 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
572 HOST_WIDE_INT *hrem)
574 int quo_neg = 0;
575 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den[4], quo[4];
577 int i, j;
578 unsigned HOST_WIDE_INT work;
579 unsigned HOST_WIDE_INT carry = 0;
580 unsigned HOST_WIDE_INT lnum = lnum_orig;
581 HOST_WIDE_INT hnum = hnum_orig;
582 unsigned HOST_WIDE_INT lden = lden_orig;
583 HOST_WIDE_INT hden = hden_orig;
584 int overflow = 0;
586 if (hden == 0 && lden == 0)
587 overflow = 1, lden = 1;
589 /* Calculate quotient sign and convert operands to unsigned. */
590 if (!uns)
592 if (hnum < 0)
594 quo_neg = ~ quo_neg;
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum, hnum, &lnum, &hnum)
597 && ((HOST_WIDE_INT) lden & hden) == -1)
598 overflow = 1;
600 if (hden < 0)
602 quo_neg = ~ quo_neg;
603 neg_double (lden, hden, &lden, &hden);
607 if (hnum == 0 && hden == 0)
608 { /* single precision */
609 *hquo = *hrem = 0;
610 /* This unsigned division rounds toward zero. */
611 *lquo = lnum / lden;
612 goto finish_up;
615 if (hnum == 0)
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
618 *hquo = *lquo = 0;
619 *hrem = hnum;
620 *lrem = lnum;
621 goto finish_up;
624 memset (quo, 0, sizeof quo);
626 memset (num, 0, sizeof num); /* to zero 9th element */
627 memset (den, 0, sizeof den);
629 encode (num, lnum, hnum);
630 encode (den, lden, hden);
632 /* Special code for when the divisor < BASE. */
633 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
635 /* hnum != 0 already checked. */
636 for (i = 4 - 1; i >= 0; i--)
638 work = num[i] + carry * BASE;
639 quo[i] = work / lden;
640 carry = work % lden;
643 else
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig, den_hi_sig;
648 unsigned HOST_WIDE_INT quo_est, scale;
650 /* Find the highest nonzero divisor digit. */
651 for (i = 4 - 1;; i--)
652 if (den[i] != 0)
654 den_hi_sig = i;
655 break;
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
661 scale = BASE / (den[den_hi_sig] + 1);
662 if (scale > 1)
663 { /* scale divisor and dividend */
664 carry = 0;
665 for (i = 0; i <= 4 - 1; i++)
667 work = (num[i] * scale) + carry;
668 num[i] = LOWPART (work);
669 carry = HIGHPART (work);
672 num[4] = carry;
673 carry = 0;
674 for (i = 0; i <= 4 - 1; i++)
676 work = (den[i] * scale) + carry;
677 den[i] = LOWPART (work);
678 carry = HIGHPART (work);
679 if (den[i] != 0) den_hi_sig = i;
683 num_hi_sig = 4;
685 /* Main loop */
686 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp;
693 num_hi_sig = i + den_hi_sig + 1;
694 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
695 if (num[num_hi_sig] != den[den_hi_sig])
696 quo_est = work / den[den_hi_sig];
697 else
698 quo_est = BASE - 1;
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp = work - quo_est * den[den_hi_sig];
702 if (tmp < BASE
703 && (den[den_hi_sig - 1] * quo_est
704 > (tmp * BASE + num[num_hi_sig - 2])))
705 quo_est--;
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
711 carry = 0;
712 for (j = 0; j <= den_hi_sig; j++)
714 work = quo_est * den[j] + carry;
715 carry = HIGHPART (work);
716 work = num[i + j] - LOWPART (work);
717 num[i + j] = LOWPART (work);
718 carry += HIGHPART (work) != 0;
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
725 quo_est--;
726 carry = 0; /* add divisor back in */
727 for (j = 0; j <= den_hi_sig; j++)
729 work = num[i + j] + den[j] + carry;
730 carry = HIGHPART (work);
731 num[i + j] = LOWPART (work);
734 num [num_hi_sig] += carry;
737 /* Store the quotient digit. */
738 quo[i] = quo_est;
742 decode (quo, lquo, hquo);
744 finish_up:
745 /* If result is negative, make it so. */
746 if (quo_neg)
747 neg_double (*lquo, *hquo, lquo, hquo);
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
751 neg_double (*lrem, *hrem, lrem, hrem);
752 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
754 switch (code)
756 case TRUNC_DIV_EXPR:
757 case TRUNC_MOD_EXPR: /* round toward zero */
758 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
759 return overflow;
761 case FLOOR_DIV_EXPR:
762 case FLOOR_MOD_EXPR: /* round toward negative infinity */
763 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
765 /* quo = quo - 1; */
766 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
767 lquo, hquo);
769 else
770 return overflow;
771 break;
773 case CEIL_DIV_EXPR:
774 case CEIL_MOD_EXPR: /* round toward positive infinity */
775 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
778 lquo, hquo);
780 else
781 return overflow;
782 break;
784 case ROUND_DIV_EXPR:
785 case ROUND_MOD_EXPR: /* round to closest integer */
787 unsigned HOST_WIDE_INT labs_rem = *lrem;
788 HOST_WIDE_INT habs_rem = *hrem;
789 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
790 HOST_WIDE_INT habs_den = hden, htwice;
792 /* Get absolute values. */
793 if (*hrem < 0)
794 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
795 if (hden < 0)
796 neg_double (lden, hden, &labs_den, &habs_den);
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
800 labs_rem, habs_rem, &ltwice, &htwice);
802 if (((unsigned HOST_WIDE_INT) habs_den
803 < (unsigned HOST_WIDE_INT) htwice)
804 || (((unsigned HOST_WIDE_INT) habs_den
805 == (unsigned HOST_WIDE_INT) htwice)
806 && (labs_den < ltwice)))
808 if (*hquo < 0)
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo,
811 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
812 else
813 /* quo = quo + 1; */
814 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
815 lquo, hquo);
817 else
818 return overflow;
820 break;
822 default:
823 gcc_unreachable ();
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
828 neg_double (*lrem, *hrem, lrem, hrem);
829 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
830 return overflow;
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
837 static tree
838 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
840 unsigned HOST_WIDE_INT int1l, int2l;
841 HOST_WIDE_INT int1h, int2h;
842 unsigned HOST_WIDE_INT quol, reml;
843 HOST_WIDE_INT quoh, remh;
844 tree type = TREE_TYPE (arg1);
845 int uns = TYPE_UNSIGNED (type);
847 int1l = TREE_INT_CST_LOW (arg1);
848 int1h = TREE_INT_CST_HIGH (arg1);
849 int2l = TREE_INT_CST_LOW (arg2);
850 int2h = TREE_INT_CST_HIGH (arg2);
852 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
853 &quol, &quoh, &reml, &remh);
854 if (remh != 0 || reml != 0)
855 return NULL_TREE;
857 return build_int_cst_wide (type, quol, quoh);
860 /* Return true if the built-in mathematical function specified by CODE
861 is odd, i.e. -f(x) == f(-x). */
863 static bool
864 negate_mathfn_p (enum built_in_function code)
866 switch (code)
868 CASE_FLT_FN (BUILT_IN_ASIN):
869 CASE_FLT_FN (BUILT_IN_ASINH):
870 CASE_FLT_FN (BUILT_IN_ATAN):
871 CASE_FLT_FN (BUILT_IN_ATANH):
872 CASE_FLT_FN (BUILT_IN_CBRT):
873 CASE_FLT_FN (BUILT_IN_SIN):
874 CASE_FLT_FN (BUILT_IN_SINH):
875 CASE_FLT_FN (BUILT_IN_TAN):
876 CASE_FLT_FN (BUILT_IN_TANH):
877 return true;
879 default:
880 break;
882 return false;
885 /* Check whether we may negate an integer constant T without causing
886 overflow. */
888 bool
889 may_negate_without_overflow_p (tree t)
891 unsigned HOST_WIDE_INT val;
892 unsigned int prec;
893 tree type;
895 gcc_assert (TREE_CODE (t) == INTEGER_CST);
897 type = TREE_TYPE (t);
898 if (TYPE_UNSIGNED (type))
899 return false;
901 prec = TYPE_PRECISION (type);
902 if (prec > HOST_BITS_PER_WIDE_INT)
904 if (TREE_INT_CST_LOW (t) != 0)
905 return true;
906 prec -= HOST_BITS_PER_WIDE_INT;
907 val = TREE_INT_CST_HIGH (t);
909 else
910 val = TREE_INT_CST_LOW (t);
911 if (prec < HOST_BITS_PER_WIDE_INT)
912 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
913 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
916 /* Determine whether an expression T can be cheaply negated using
917 the function negate_expr. */
919 static bool
920 negate_expr_p (tree t)
922 tree type;
924 if (t == 0)
925 return false;
927 type = TREE_TYPE (t);
929 STRIP_SIGN_NOPS (t);
930 switch (TREE_CODE (t))
932 case INTEGER_CST:
933 if (TYPE_UNSIGNED (type) || ! flag_trapv)
934 return true;
936 /* Check that -CST will not overflow type. */
937 return may_negate_without_overflow_p (t);
938 case BIT_NOT_EXPR:
939 return INTEGRAL_TYPE_P (type);
941 case REAL_CST:
942 case NEGATE_EXPR:
943 return true;
945 case COMPLEX_CST:
946 return negate_expr_p (TREE_REALPART (t))
947 && negate_expr_p (TREE_IMAGPART (t));
949 case PLUS_EXPR:
950 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
951 return false;
952 /* -(A + B) -> (-B) - A. */
953 if (negate_expr_p (TREE_OPERAND (t, 1))
954 && reorder_operands_p (TREE_OPERAND (t, 0),
955 TREE_OPERAND (t, 1)))
956 return true;
957 /* -(A + B) -> (-A) - B. */
958 return negate_expr_p (TREE_OPERAND (t, 0));
960 case MINUS_EXPR:
961 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
962 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
963 && reorder_operands_p (TREE_OPERAND (t, 0),
964 TREE_OPERAND (t, 1));
966 case MULT_EXPR:
967 if (TYPE_UNSIGNED (TREE_TYPE (t)))
968 break;
970 /* Fall through. */
972 case RDIV_EXPR:
973 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
974 return negate_expr_p (TREE_OPERAND (t, 1))
975 || negate_expr_p (TREE_OPERAND (t, 0));
976 break;
978 case TRUNC_DIV_EXPR:
979 case ROUND_DIV_EXPR:
980 case FLOOR_DIV_EXPR:
981 case CEIL_DIV_EXPR:
982 case EXACT_DIV_EXPR:
983 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
984 break;
985 return negate_expr_p (TREE_OPERAND (t, 1))
986 || negate_expr_p (TREE_OPERAND (t, 0));
988 case NOP_EXPR:
989 /* Negate -((double)float) as (double)(-float). */
990 if (TREE_CODE (type) == REAL_TYPE)
992 tree tem = strip_float_extensions (t);
993 if (tem != t)
994 return negate_expr_p (tem);
996 break;
998 case CALL_EXPR:
999 /* Negate -f(x) as f(-x). */
1000 if (negate_mathfn_p (builtin_mathfn_code (t)))
1001 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1002 break;
1004 case RSHIFT_EXPR:
1005 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1006 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1008 tree op1 = TREE_OPERAND (t, 1);
1009 if (TREE_INT_CST_HIGH (op1) == 0
1010 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1011 == TREE_INT_CST_LOW (op1))
1012 return true;
1014 break;
1016 default:
1017 break;
1019 return false;
1022 /* Given T, an expression, return the negation of T. Allow for T to be
1023 null, in which case return null. */
1025 static tree
1026 negate_expr (tree t)
1028 tree type;
1029 tree tem;
1031 if (t == 0)
1032 return 0;
1034 type = TREE_TYPE (t);
1035 STRIP_SIGN_NOPS (t);
1037 switch (TREE_CODE (t))
1039 /* Convert - (~A) to A + 1. */
1040 case BIT_NOT_EXPR:
1041 if (INTEGRAL_TYPE_P (type))
1042 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1043 build_int_cst (type, 1));
1044 break;
1046 case INTEGER_CST:
1047 tem = fold_negate_const (t, type);
1048 if (! TREE_OVERFLOW (tem)
1049 || TYPE_UNSIGNED (type)
1050 || ! flag_trapv)
1051 return tem;
1052 break;
1054 case REAL_CST:
1055 tem = fold_negate_const (t, type);
1056 /* Two's complement FP formats, such as c4x, may overflow. */
1057 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1058 return fold_convert (type, tem);
1059 break;
1061 case COMPLEX_CST:
1063 tree rpart = negate_expr (TREE_REALPART (t));
1064 tree ipart = negate_expr (TREE_IMAGPART (t));
1066 if ((TREE_CODE (rpart) == REAL_CST
1067 && TREE_CODE (ipart) == REAL_CST)
1068 || (TREE_CODE (rpart) == INTEGER_CST
1069 && TREE_CODE (ipart) == INTEGER_CST))
1070 return build_complex (type, rpart, ipart);
1072 break;
1074 case NEGATE_EXPR:
1075 return fold_convert (type, TREE_OPERAND (t, 0));
1077 case PLUS_EXPR:
1078 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1080 /* -(A + B) -> (-B) - A. */
1081 if (negate_expr_p (TREE_OPERAND (t, 1))
1082 && reorder_operands_p (TREE_OPERAND (t, 0),
1083 TREE_OPERAND (t, 1)))
1085 tem = negate_expr (TREE_OPERAND (t, 1));
1086 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1087 tem, TREE_OPERAND (t, 0));
1088 return fold_convert (type, tem);
1091 /* -(A + B) -> (-A) - B. */
1092 if (negate_expr_p (TREE_OPERAND (t, 0)))
1094 tem = negate_expr (TREE_OPERAND (t, 0));
1095 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1096 tem, TREE_OPERAND (t, 1));
1097 return fold_convert (type, tem);
1100 break;
1102 case MINUS_EXPR:
1103 /* - (A - B) -> B - A */
1104 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1105 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1106 return fold_convert (type,
1107 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1108 TREE_OPERAND (t, 1),
1109 TREE_OPERAND (t, 0)));
1110 break;
1112 case MULT_EXPR:
1113 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1114 break;
1116 /* Fall through. */
1118 case RDIV_EXPR:
1119 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1121 tem = TREE_OPERAND (t, 1);
1122 if (negate_expr_p (tem))
1123 return fold_convert (type,
1124 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1125 TREE_OPERAND (t, 0),
1126 negate_expr (tem)));
1127 tem = TREE_OPERAND (t, 0);
1128 if (negate_expr_p (tem))
1129 return fold_convert (type,
1130 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1131 negate_expr (tem),
1132 TREE_OPERAND (t, 1)));
1134 break;
1136 case TRUNC_DIV_EXPR:
1137 case ROUND_DIV_EXPR:
1138 case FLOOR_DIV_EXPR:
1139 case CEIL_DIV_EXPR:
1140 case EXACT_DIV_EXPR:
1141 if (!TYPE_UNSIGNED (TREE_TYPE (t)) && !flag_wrapv)
1143 tem = TREE_OPERAND (t, 1);
1144 if (negate_expr_p (tem))
1145 return fold_convert (type,
1146 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1147 TREE_OPERAND (t, 0),
1148 negate_expr (tem)));
1149 tem = TREE_OPERAND (t, 0);
1150 if (negate_expr_p (tem))
1151 return fold_convert (type,
1152 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1153 negate_expr (tem),
1154 TREE_OPERAND (t, 1)));
1156 break;
1158 case NOP_EXPR:
1159 /* Convert -((double)float) into (double)(-float). */
1160 if (TREE_CODE (type) == REAL_TYPE)
1162 tem = strip_float_extensions (t);
1163 if (tem != t && negate_expr_p (tem))
1164 return fold_convert (type, negate_expr (tem));
1166 break;
1168 case CALL_EXPR:
1169 /* Negate -f(x) as f(-x). */
1170 if (negate_mathfn_p (builtin_mathfn_code (t))
1171 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1173 tree fndecl, arg, arglist;
1175 fndecl = get_callee_fndecl (t);
1176 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1177 arglist = build_tree_list (NULL_TREE, arg);
1178 return build_function_call_expr (fndecl, arglist);
1180 break;
1182 case RSHIFT_EXPR:
1183 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1184 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1186 tree op1 = TREE_OPERAND (t, 1);
1187 if (TREE_INT_CST_HIGH (op1) == 0
1188 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1189 == TREE_INT_CST_LOW (op1))
1191 tree ntype = TYPE_UNSIGNED (type)
1192 ? lang_hooks.types.signed_type (type)
1193 : lang_hooks.types.unsigned_type (type);
1194 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1195 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1196 return fold_convert (type, temp);
1199 break;
1201 default:
1202 break;
1205 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1206 return fold_convert (type, tem);
1209 /* Split a tree IN into a constant, literal and variable parts that could be
1210 combined with CODE to make IN. "constant" means an expression with
1211 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1212 commutative arithmetic operation. Store the constant part into *CONP,
1213 the literal in *LITP and return the variable part. If a part isn't
1214 present, set it to null. If the tree does not decompose in this way,
1215 return the entire tree as the variable part and the other parts as null.
1217 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1218 case, we negate an operand that was subtracted. Except if it is a
1219 literal for which we use *MINUS_LITP instead.
1221 If NEGATE_P is true, we are negating all of IN, again except a literal
1222 for which we use *MINUS_LITP instead.
1224 If IN is itself a literal or constant, return it as appropriate.
1226 Note that we do not guarantee that any of the three values will be the
1227 same type as IN, but they will have the same signedness and mode. */
1229 static tree
1230 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1231 tree *minus_litp, int negate_p)
1233 tree var = 0;
1235 *conp = 0;
1236 *litp = 0;
1237 *minus_litp = 0;
1239 /* Strip any conversions that don't change the machine mode or signedness. */
1240 STRIP_SIGN_NOPS (in);
1242 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1243 *litp = in;
1244 else if (TREE_CODE (in) == code
1245 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1246 /* We can associate addition and subtraction together (even
1247 though the C standard doesn't say so) for integers because
1248 the value is not affected. For reals, the value might be
1249 affected, so we can't. */
1250 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1251 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1253 tree op0 = TREE_OPERAND (in, 0);
1254 tree op1 = TREE_OPERAND (in, 1);
1255 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1256 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1258 /* First see if either of the operands is a literal, then a constant. */
1259 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1260 *litp = op0, op0 = 0;
1261 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1262 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1264 if (op0 != 0 && TREE_CONSTANT (op0))
1265 *conp = op0, op0 = 0;
1266 else if (op1 != 0 && TREE_CONSTANT (op1))
1267 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1269 /* If we haven't dealt with either operand, this is not a case we can
1270 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1271 if (op0 != 0 && op1 != 0)
1272 var = in;
1273 else if (op0 != 0)
1274 var = op0;
1275 else
1276 var = op1, neg_var_p = neg1_p;
1278 /* Now do any needed negations. */
1279 if (neg_litp_p)
1280 *minus_litp = *litp, *litp = 0;
1281 if (neg_conp_p)
1282 *conp = negate_expr (*conp);
1283 if (neg_var_p)
1284 var = negate_expr (var);
1286 else if (TREE_CONSTANT (in))
1287 *conp = in;
1288 else
1289 var = in;
1291 if (negate_p)
1293 if (*litp)
1294 *minus_litp = *litp, *litp = 0;
1295 else if (*minus_litp)
1296 *litp = *minus_litp, *minus_litp = 0;
1297 *conp = negate_expr (*conp);
1298 var = negate_expr (var);
1301 return var;
1304 /* Re-associate trees split by the above function. T1 and T2 are either
1305 expressions to associate or null. Return the new expression, if any. If
1306 we build an operation, do it in TYPE and with CODE. */
1308 static tree
1309 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1311 if (t1 == 0)
1312 return t2;
1313 else if (t2 == 0)
1314 return t1;
1316 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1317 try to fold this since we will have infinite recursion. But do
1318 deal with any NEGATE_EXPRs. */
1319 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1320 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1322 if (code == PLUS_EXPR)
1324 if (TREE_CODE (t1) == NEGATE_EXPR)
1325 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1326 fold_convert (type, TREE_OPERAND (t1, 0)));
1327 else if (TREE_CODE (t2) == NEGATE_EXPR)
1328 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1329 fold_convert (type, TREE_OPERAND (t2, 0)));
1330 else if (integer_zerop (t2))
1331 return fold_convert (type, t1);
1333 else if (code == MINUS_EXPR)
1335 if (integer_zerop (t2))
1336 return fold_convert (type, t1);
1339 return build2 (code, type, fold_convert (type, t1),
1340 fold_convert (type, t2));
1343 return fold_build2 (code, type, fold_convert (type, t1),
1344 fold_convert (type, t2));
1347 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1348 to produce a new constant. Return NULL_TREE if we don't know how
1349 to evaluate CODE at compile-time.
1351 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1353 tree
1354 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1356 unsigned HOST_WIDE_INT int1l, int2l;
1357 HOST_WIDE_INT int1h, int2h;
1358 unsigned HOST_WIDE_INT low;
1359 HOST_WIDE_INT hi;
1360 unsigned HOST_WIDE_INT garbagel;
1361 HOST_WIDE_INT garbageh;
1362 tree t;
1363 tree type = TREE_TYPE (arg1);
1364 int uns = TYPE_UNSIGNED (type);
1365 int is_sizetype
1366 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1367 int overflow = 0;
1369 int1l = TREE_INT_CST_LOW (arg1);
1370 int1h = TREE_INT_CST_HIGH (arg1);
1371 int2l = TREE_INT_CST_LOW (arg2);
1372 int2h = TREE_INT_CST_HIGH (arg2);
1374 switch (code)
1376 case BIT_IOR_EXPR:
1377 low = int1l | int2l, hi = int1h | int2h;
1378 break;
1380 case BIT_XOR_EXPR:
1381 low = int1l ^ int2l, hi = int1h ^ int2h;
1382 break;
1384 case BIT_AND_EXPR:
1385 low = int1l & int2l, hi = int1h & int2h;
1386 break;
1388 case RSHIFT_EXPR:
1389 int2l = -int2l;
1390 case LSHIFT_EXPR:
1391 /* It's unclear from the C standard whether shifts can overflow.
1392 The following code ignores overflow; perhaps a C standard
1393 interpretation ruling is needed. */
1394 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1395 &low, &hi, !uns);
1396 break;
1398 case RROTATE_EXPR:
1399 int2l = - int2l;
1400 case LROTATE_EXPR:
1401 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1402 &low, &hi);
1403 break;
1405 case PLUS_EXPR:
1406 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1407 break;
1409 case MINUS_EXPR:
1410 neg_double (int2l, int2h, &low, &hi);
1411 add_double (int1l, int1h, low, hi, &low, &hi);
1412 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1413 break;
1415 case MULT_EXPR:
1416 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1417 break;
1419 case TRUNC_DIV_EXPR:
1420 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1421 case EXACT_DIV_EXPR:
1422 /* This is a shortcut for a common special case. */
1423 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1424 && ! TREE_CONSTANT_OVERFLOW (arg1)
1425 && ! TREE_CONSTANT_OVERFLOW (arg2)
1426 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1428 if (code == CEIL_DIV_EXPR)
1429 int1l += int2l - 1;
1431 low = int1l / int2l, hi = 0;
1432 break;
1435 /* ... fall through ... */
1437 case ROUND_DIV_EXPR:
1438 if (int2h == 0 && int2l == 0)
1439 return NULL_TREE;
1440 if (int2h == 0 && int2l == 1)
1442 low = int1l, hi = int1h;
1443 break;
1445 if (int1l == int2l && int1h == int2h
1446 && ! (int1l == 0 && int1h == 0))
1448 low = 1, hi = 0;
1449 break;
1451 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1452 &low, &hi, &garbagel, &garbageh);
1453 break;
1455 case TRUNC_MOD_EXPR:
1456 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1457 /* This is a shortcut for a common special case. */
1458 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1459 && ! TREE_CONSTANT_OVERFLOW (arg1)
1460 && ! TREE_CONSTANT_OVERFLOW (arg2)
1461 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1463 if (code == CEIL_MOD_EXPR)
1464 int1l += int2l - 1;
1465 low = int1l % int2l, hi = 0;
1466 break;
1469 /* ... fall through ... */
1471 case ROUND_MOD_EXPR:
1472 if (int2h == 0 && int2l == 0)
1473 return NULL_TREE;
1474 overflow = div_and_round_double (code, uns,
1475 int1l, int1h, int2l, int2h,
1476 &garbagel, &garbageh, &low, &hi);
1477 break;
1479 case MIN_EXPR:
1480 case MAX_EXPR:
1481 if (uns)
1482 low = (((unsigned HOST_WIDE_INT) int1h
1483 < (unsigned HOST_WIDE_INT) int2h)
1484 || (((unsigned HOST_WIDE_INT) int1h
1485 == (unsigned HOST_WIDE_INT) int2h)
1486 && int1l < int2l));
1487 else
1488 low = (int1h < int2h
1489 || (int1h == int2h && int1l < int2l));
1491 if (low == (code == MIN_EXPR))
1492 low = int1l, hi = int1h;
1493 else
1494 low = int2l, hi = int2h;
1495 break;
1497 default:
1498 return NULL_TREE;
1501 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1503 if (notrunc)
1505 /* Propagate overflow flags ourselves. */
1506 if (((!uns || is_sizetype) && overflow)
1507 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1509 t = copy_node (t);
1510 TREE_OVERFLOW (t) = 1;
1511 TREE_CONSTANT_OVERFLOW (t) = 1;
1513 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1515 t = copy_node (t);
1516 TREE_CONSTANT_OVERFLOW (t) = 1;
1519 else
1520 t = force_fit_type (t, 1,
1521 ((!uns || is_sizetype) && overflow)
1522 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1523 TREE_CONSTANT_OVERFLOW (arg1)
1524 | TREE_CONSTANT_OVERFLOW (arg2));
1526 return t;
1529 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1530 constant. We assume ARG1 and ARG2 have the same data type, or at least
1531 are the same kind of constant and the same machine mode.
1533 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1535 static tree
1536 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1538 STRIP_NOPS (arg1);
1539 STRIP_NOPS (arg2);
1541 if (TREE_CODE (arg1) == INTEGER_CST)
1542 return int_const_binop (code, arg1, arg2, notrunc);
1544 if (TREE_CODE (arg1) == REAL_CST)
1546 enum machine_mode mode;
1547 REAL_VALUE_TYPE d1;
1548 REAL_VALUE_TYPE d2;
1549 REAL_VALUE_TYPE value;
1550 REAL_VALUE_TYPE result;
1551 bool inexact;
1552 tree t, type;
1554 /* The following codes are handled by real_arithmetic. */
1555 switch (code)
1557 case PLUS_EXPR:
1558 case MINUS_EXPR:
1559 case MULT_EXPR:
1560 case RDIV_EXPR:
1561 case MIN_EXPR:
1562 case MAX_EXPR:
1563 break;
1565 default:
1566 return NULL_TREE;
1569 d1 = TREE_REAL_CST (arg1);
1570 d2 = TREE_REAL_CST (arg2);
1572 type = TREE_TYPE (arg1);
1573 mode = TYPE_MODE (type);
1575 /* Don't perform operation if we honor signaling NaNs and
1576 either operand is a NaN. */
1577 if (HONOR_SNANS (mode)
1578 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1579 return NULL_TREE;
1581 /* Don't perform operation if it would raise a division
1582 by zero exception. */
1583 if (code == RDIV_EXPR
1584 && REAL_VALUES_EQUAL (d2, dconst0)
1585 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1586 return NULL_TREE;
1588 /* If either operand is a NaN, just return it. Otherwise, set up
1589 for floating-point trap; we return an overflow. */
1590 if (REAL_VALUE_ISNAN (d1))
1591 return arg1;
1592 else if (REAL_VALUE_ISNAN (d2))
1593 return arg2;
1595 inexact = real_arithmetic (&value, code, &d1, &d2);
1596 real_convert (&result, mode, &value);
1598 /* Don't constant fold this floating point operation if
1599 the result has overflowed and flag_trapping_math. */
1601 if (flag_trapping_math
1602 && MODE_HAS_INFINITIES (mode)
1603 && REAL_VALUE_ISINF (result)
1604 && !REAL_VALUE_ISINF (d1)
1605 && !REAL_VALUE_ISINF (d2))
1606 return NULL_TREE;
1608 /* Don't constant fold this floating point operation if the
1609 result may dependent upon the run-time rounding mode and
1610 flag_rounding_math is set, or if GCC's software emulation
1611 is unable to accurately represent the result. */
1613 if ((flag_rounding_math
1614 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1615 && !flag_unsafe_math_optimizations))
1616 && (inexact || !real_identical (&result, &value)))
1617 return NULL_TREE;
1619 t = build_real (type, result);
1621 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1622 TREE_CONSTANT_OVERFLOW (t)
1623 = TREE_OVERFLOW (t)
1624 | TREE_CONSTANT_OVERFLOW (arg1)
1625 | TREE_CONSTANT_OVERFLOW (arg2);
1626 return t;
1629 if (TREE_CODE (arg1) == COMPLEX_CST)
1631 tree type = TREE_TYPE (arg1);
1632 tree r1 = TREE_REALPART (arg1);
1633 tree i1 = TREE_IMAGPART (arg1);
1634 tree r2 = TREE_REALPART (arg2);
1635 tree i2 = TREE_IMAGPART (arg2);
1636 tree t;
1638 switch (code)
1640 case PLUS_EXPR:
1641 t = build_complex (type,
1642 const_binop (PLUS_EXPR, r1, r2, notrunc),
1643 const_binop (PLUS_EXPR, i1, i2, notrunc));
1644 break;
1646 case MINUS_EXPR:
1647 t = build_complex (type,
1648 const_binop (MINUS_EXPR, r1, r2, notrunc),
1649 const_binop (MINUS_EXPR, i1, i2, notrunc));
1650 break;
1652 case MULT_EXPR:
1653 t = build_complex (type,
1654 const_binop (MINUS_EXPR,
1655 const_binop (MULT_EXPR,
1656 r1, r2, notrunc),
1657 const_binop (MULT_EXPR,
1658 i1, i2, notrunc),
1659 notrunc),
1660 const_binop (PLUS_EXPR,
1661 const_binop (MULT_EXPR,
1662 r1, i2, notrunc),
1663 const_binop (MULT_EXPR,
1664 i1, r2, notrunc),
1665 notrunc));
1666 break;
1668 case RDIV_EXPR:
1670 tree t1, t2, real, imag;
1671 tree magsquared
1672 = const_binop (PLUS_EXPR,
1673 const_binop (MULT_EXPR, r2, r2, notrunc),
1674 const_binop (MULT_EXPR, i2, i2, notrunc),
1675 notrunc);
1677 t1 = const_binop (PLUS_EXPR,
1678 const_binop (MULT_EXPR, r1, r2, notrunc),
1679 const_binop (MULT_EXPR, i1, i2, notrunc),
1680 notrunc);
1681 t2 = const_binop (MINUS_EXPR,
1682 const_binop (MULT_EXPR, i1, r2, notrunc),
1683 const_binop (MULT_EXPR, r1, i2, notrunc),
1684 notrunc);
1686 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1688 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1689 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1691 else
1693 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1694 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1695 if (!real || !imag)
1696 return NULL_TREE;
1699 t = build_complex (type, real, imag);
1701 break;
1703 default:
1704 return NULL_TREE;
1706 return t;
1708 return NULL_TREE;
1711 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1712 indicates which particular sizetype to create. */
1714 tree
1715 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1717 return build_int_cst (sizetype_tab[(int) kind], number);
1720 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1721 is a tree code. The type of the result is taken from the operands.
1722 Both must be the same type integer type and it must be a size type.
1723 If the operands are constant, so is the result. */
1725 tree
1726 size_binop (enum tree_code code, tree arg0, tree arg1)
1728 tree type = TREE_TYPE (arg0);
1730 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1731 && type == TREE_TYPE (arg1));
1733 /* Handle the special case of two integer constants faster. */
1734 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1736 /* And some specific cases even faster than that. */
1737 if (code == PLUS_EXPR && integer_zerop (arg0))
1738 return arg1;
1739 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1740 && integer_zerop (arg1))
1741 return arg0;
1742 else if (code == MULT_EXPR && integer_onep (arg0))
1743 return arg1;
1745 /* Handle general case of two integer constants. */
1746 return int_const_binop (code, arg0, arg1, 0);
1749 if (arg0 == error_mark_node || arg1 == error_mark_node)
1750 return error_mark_node;
1752 return fold_build2 (code, type, arg0, arg1);
1755 /* Given two values, either both of sizetype or both of bitsizetype,
1756 compute the difference between the two values. Return the value
1757 in signed type corresponding to the type of the operands. */
1759 tree
1760 size_diffop (tree arg0, tree arg1)
1762 tree type = TREE_TYPE (arg0);
1763 tree ctype;
1765 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1766 && type == TREE_TYPE (arg1));
1768 /* If the type is already signed, just do the simple thing. */
1769 if (!TYPE_UNSIGNED (type))
1770 return size_binop (MINUS_EXPR, arg0, arg1);
1772 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1774 /* If either operand is not a constant, do the conversions to the signed
1775 type and subtract. The hardware will do the right thing with any
1776 overflow in the subtraction. */
1777 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1778 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1779 fold_convert (ctype, arg1));
1781 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1782 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1783 overflow) and negate (which can't either). Special-case a result
1784 of zero while we're here. */
1785 if (tree_int_cst_equal (arg0, arg1))
1786 return build_int_cst (ctype, 0);
1787 else if (tree_int_cst_lt (arg1, arg0))
1788 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1789 else
1790 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1791 fold_convert (ctype, size_binop (MINUS_EXPR,
1792 arg1, arg0)));
1795 /* A subroutine of fold_convert_const handling conversions of an
1796 INTEGER_CST to another integer type. */
1798 static tree
1799 fold_convert_const_int_from_int (tree type, tree arg1)
1801 tree t;
1803 /* Given an integer constant, make new constant with new type,
1804 appropriately sign-extended or truncated. */
1805 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1806 TREE_INT_CST_HIGH (arg1));
1808 t = force_fit_type (t,
1809 /* Don't set the overflow when
1810 converting a pointer */
1811 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1812 (TREE_INT_CST_HIGH (arg1) < 0
1813 && (TYPE_UNSIGNED (type)
1814 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1815 | TREE_OVERFLOW (arg1),
1816 TREE_CONSTANT_OVERFLOW (arg1));
1818 return t;
1821 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1822 to an integer type. */
1824 static tree
1825 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1827 int overflow = 0;
1828 tree t;
1830 /* The following code implements the floating point to integer
1831 conversion rules required by the Java Language Specification,
1832 that IEEE NaNs are mapped to zero and values that overflow
1833 the target precision saturate, i.e. values greater than
1834 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1835 are mapped to INT_MIN. These semantics are allowed by the
1836 C and C++ standards that simply state that the behavior of
1837 FP-to-integer conversion is unspecified upon overflow. */
1839 HOST_WIDE_INT high, low;
1840 REAL_VALUE_TYPE r;
1841 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1843 switch (code)
1845 case FIX_TRUNC_EXPR:
1846 real_trunc (&r, VOIDmode, &x);
1847 break;
1849 case FIX_CEIL_EXPR:
1850 real_ceil (&r, VOIDmode, &x);
1851 break;
1853 case FIX_FLOOR_EXPR:
1854 real_floor (&r, VOIDmode, &x);
1855 break;
1857 case FIX_ROUND_EXPR:
1858 real_round (&r, VOIDmode, &x);
1859 break;
1861 default:
1862 gcc_unreachable ();
1865 /* If R is NaN, return zero and show we have an overflow. */
1866 if (REAL_VALUE_ISNAN (r))
1868 overflow = 1;
1869 high = 0;
1870 low = 0;
1873 /* See if R is less than the lower bound or greater than the
1874 upper bound. */
1876 if (! overflow)
1878 tree lt = TYPE_MIN_VALUE (type);
1879 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1880 if (REAL_VALUES_LESS (r, l))
1882 overflow = 1;
1883 high = TREE_INT_CST_HIGH (lt);
1884 low = TREE_INT_CST_LOW (lt);
1888 if (! overflow)
1890 tree ut = TYPE_MAX_VALUE (type);
1891 if (ut)
1893 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1894 if (REAL_VALUES_LESS (u, r))
1896 overflow = 1;
1897 high = TREE_INT_CST_HIGH (ut);
1898 low = TREE_INT_CST_LOW (ut);
1903 if (! overflow)
1904 REAL_VALUE_TO_INT (&low, &high, r);
1906 t = build_int_cst_wide (type, low, high);
1908 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1909 TREE_CONSTANT_OVERFLOW (arg1));
1910 return t;
1913 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1914 to another floating point type. */
1916 static tree
1917 fold_convert_const_real_from_real (tree type, tree arg1)
1919 REAL_VALUE_TYPE value;
1920 tree t;
1922 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1923 t = build_real (type, value);
1925 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1926 TREE_CONSTANT_OVERFLOW (t)
1927 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1928 return t;
1931 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1932 type TYPE. If no simplification can be done return NULL_TREE. */
1934 static tree
1935 fold_convert_const (enum tree_code code, tree type, tree arg1)
1937 if (TREE_TYPE (arg1) == type)
1938 return arg1;
1940 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1942 if (TREE_CODE (arg1) == INTEGER_CST)
1943 return fold_convert_const_int_from_int (type, arg1);
1944 else if (TREE_CODE (arg1) == REAL_CST)
1945 return fold_convert_const_int_from_real (code, type, arg1);
1947 else if (TREE_CODE (type) == REAL_TYPE)
1949 if (TREE_CODE (arg1) == INTEGER_CST)
1950 return build_real_from_int_cst (type, arg1);
1951 if (TREE_CODE (arg1) == REAL_CST)
1952 return fold_convert_const_real_from_real (type, arg1);
1954 return NULL_TREE;
1957 /* Construct a vector of zero elements of vector type TYPE. */
1959 static tree
1960 build_zero_vector (tree type)
1962 tree elem, list;
1963 int i, units;
1965 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1966 units = TYPE_VECTOR_SUBPARTS (type);
1968 list = NULL_TREE;
1969 for (i = 0; i < units; i++)
1970 list = tree_cons (NULL_TREE, elem, list);
1971 return build_vector (type, list);
1974 /* Convert expression ARG to type TYPE. Used by the middle-end for
1975 simple conversions in preference to calling the front-end's convert. */
1977 tree
1978 fold_convert (tree type, tree arg)
1980 tree orig = TREE_TYPE (arg);
1981 tree tem;
1983 if (type == orig)
1984 return arg;
1986 if (TREE_CODE (arg) == ERROR_MARK
1987 || TREE_CODE (type) == ERROR_MARK
1988 || TREE_CODE (orig) == ERROR_MARK)
1989 return error_mark_node;
1991 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1992 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1993 TYPE_MAIN_VARIANT (orig)))
1994 return fold_build1 (NOP_EXPR, type, arg);
1996 switch (TREE_CODE (type))
1998 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1999 case POINTER_TYPE: case REFERENCE_TYPE:
2000 case OFFSET_TYPE:
2001 if (TREE_CODE (arg) == INTEGER_CST)
2003 tem = fold_convert_const (NOP_EXPR, type, arg);
2004 if (tem != NULL_TREE)
2005 return tem;
2007 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2008 || TREE_CODE (orig) == OFFSET_TYPE)
2009 return fold_build1 (NOP_EXPR, type, arg);
2010 if (TREE_CODE (orig) == COMPLEX_TYPE)
2012 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2013 return fold_convert (type, tem);
2015 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2016 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2017 return fold_build1 (NOP_EXPR, type, arg);
2019 case REAL_TYPE:
2020 if (TREE_CODE (arg) == INTEGER_CST)
2022 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2023 if (tem != NULL_TREE)
2024 return tem;
2026 else if (TREE_CODE (arg) == REAL_CST)
2028 tem = fold_convert_const (NOP_EXPR, type, arg);
2029 if (tem != NULL_TREE)
2030 return tem;
2033 switch (TREE_CODE (orig))
2035 case INTEGER_TYPE: case CHAR_TYPE:
2036 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2037 case POINTER_TYPE: case REFERENCE_TYPE:
2038 return fold_build1 (FLOAT_EXPR, type, arg);
2040 case REAL_TYPE:
2041 return fold_build1 (NOP_EXPR, type, arg);
2043 case COMPLEX_TYPE:
2044 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2045 return fold_convert (type, tem);
2047 default:
2048 gcc_unreachable ();
2051 case COMPLEX_TYPE:
2052 switch (TREE_CODE (orig))
2054 case INTEGER_TYPE: case CHAR_TYPE:
2055 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2056 case POINTER_TYPE: case REFERENCE_TYPE:
2057 case REAL_TYPE:
2058 return build2 (COMPLEX_EXPR, type,
2059 fold_convert (TREE_TYPE (type), arg),
2060 fold_convert (TREE_TYPE (type), integer_zero_node));
2061 case COMPLEX_TYPE:
2063 tree rpart, ipart;
2065 if (TREE_CODE (arg) == COMPLEX_EXPR)
2067 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2068 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2069 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2072 arg = save_expr (arg);
2073 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2074 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2075 rpart = fold_convert (TREE_TYPE (type), rpart);
2076 ipart = fold_convert (TREE_TYPE (type), ipart);
2077 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2080 default:
2081 gcc_unreachable ();
2084 case VECTOR_TYPE:
2085 if (integer_zerop (arg))
2086 return build_zero_vector (type);
2087 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2088 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2089 || TREE_CODE (orig) == VECTOR_TYPE);
2090 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2092 case VOID_TYPE:
2093 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2095 default:
2096 gcc_unreachable ();
2100 /* Return false if expr can be assumed not to be an lvalue, true
2101 otherwise. */
2103 static bool
2104 maybe_lvalue_p (tree x)
2106 /* We only need to wrap lvalue tree codes. */
2107 switch (TREE_CODE (x))
2109 case VAR_DECL:
2110 case PARM_DECL:
2111 case RESULT_DECL:
2112 case LABEL_DECL:
2113 case FUNCTION_DECL:
2114 case SSA_NAME:
2116 case COMPONENT_REF:
2117 case INDIRECT_REF:
2118 case ALIGN_INDIRECT_REF:
2119 case MISALIGNED_INDIRECT_REF:
2120 case ARRAY_REF:
2121 case ARRAY_RANGE_REF:
2122 case BIT_FIELD_REF:
2123 case OBJ_TYPE_REF:
2125 case REALPART_EXPR:
2126 case IMAGPART_EXPR:
2127 case PREINCREMENT_EXPR:
2128 case PREDECREMENT_EXPR:
2129 case SAVE_EXPR:
2130 case TRY_CATCH_EXPR:
2131 case WITH_CLEANUP_EXPR:
2132 case COMPOUND_EXPR:
2133 case MODIFY_EXPR:
2134 case TARGET_EXPR:
2135 case COND_EXPR:
2136 case BIND_EXPR:
2137 case MIN_EXPR:
2138 case MAX_EXPR:
2139 break;
2141 default:
2142 /* Assume the worst for front-end tree codes. */
2143 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2144 break;
2145 return false;
2148 return true;
2151 /* Return an expr equal to X but certainly not valid as an lvalue. */
2153 tree
2154 non_lvalue (tree x)
2156 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2157 us. */
2158 if (in_gimple_form)
2159 return x;
2161 if (! maybe_lvalue_p (x))
2162 return x;
2163 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2166 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2167 Zero means allow extended lvalues. */
2169 int pedantic_lvalues;
2171 /* When pedantic, return an expr equal to X but certainly not valid as a
2172 pedantic lvalue. Otherwise, return X. */
2174 static tree
2175 pedantic_non_lvalue (tree x)
2177 if (pedantic_lvalues)
2178 return non_lvalue (x);
2179 else
2180 return x;
2183 /* Given a tree comparison code, return the code that is the logical inverse
2184 of the given code. It is not safe to do this for floating-point
2185 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2186 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2188 enum tree_code
2189 invert_tree_comparison (enum tree_code code, bool honor_nans)
2191 if (honor_nans && flag_trapping_math)
2192 return ERROR_MARK;
2194 switch (code)
2196 case EQ_EXPR:
2197 return NE_EXPR;
2198 case NE_EXPR:
2199 return EQ_EXPR;
2200 case GT_EXPR:
2201 return honor_nans ? UNLE_EXPR : LE_EXPR;
2202 case GE_EXPR:
2203 return honor_nans ? UNLT_EXPR : LT_EXPR;
2204 case LT_EXPR:
2205 return honor_nans ? UNGE_EXPR : GE_EXPR;
2206 case LE_EXPR:
2207 return honor_nans ? UNGT_EXPR : GT_EXPR;
2208 case LTGT_EXPR:
2209 return UNEQ_EXPR;
2210 case UNEQ_EXPR:
2211 return LTGT_EXPR;
2212 case UNGT_EXPR:
2213 return LE_EXPR;
2214 case UNGE_EXPR:
2215 return LT_EXPR;
2216 case UNLT_EXPR:
2217 return GE_EXPR;
2218 case UNLE_EXPR:
2219 return GT_EXPR;
2220 case ORDERED_EXPR:
2221 return UNORDERED_EXPR;
2222 case UNORDERED_EXPR:
2223 return ORDERED_EXPR;
2224 default:
2225 gcc_unreachable ();
2229 /* Similar, but return the comparison that results if the operands are
2230 swapped. This is safe for floating-point. */
2232 enum tree_code
2233 swap_tree_comparison (enum tree_code code)
2235 switch (code)
2237 case EQ_EXPR:
2238 case NE_EXPR:
2239 case ORDERED_EXPR:
2240 case UNORDERED_EXPR:
2241 case LTGT_EXPR:
2242 case UNEQ_EXPR:
2243 return code;
2244 case GT_EXPR:
2245 return LT_EXPR;
2246 case GE_EXPR:
2247 return LE_EXPR;
2248 case LT_EXPR:
2249 return GT_EXPR;
2250 case LE_EXPR:
2251 return GE_EXPR;
2252 case UNGT_EXPR:
2253 return UNLT_EXPR;
2254 case UNGE_EXPR:
2255 return UNLE_EXPR;
2256 case UNLT_EXPR:
2257 return UNGT_EXPR;
2258 case UNLE_EXPR:
2259 return UNGE_EXPR;
2260 default:
2261 gcc_unreachable ();
2266 /* Convert a comparison tree code from an enum tree_code representation
2267 into a compcode bit-based encoding. This function is the inverse of
2268 compcode_to_comparison. */
2270 static enum comparison_code
2271 comparison_to_compcode (enum tree_code code)
2273 switch (code)
2275 case LT_EXPR:
2276 return COMPCODE_LT;
2277 case EQ_EXPR:
2278 return COMPCODE_EQ;
2279 case LE_EXPR:
2280 return COMPCODE_LE;
2281 case GT_EXPR:
2282 return COMPCODE_GT;
2283 case NE_EXPR:
2284 return COMPCODE_NE;
2285 case GE_EXPR:
2286 return COMPCODE_GE;
2287 case ORDERED_EXPR:
2288 return COMPCODE_ORD;
2289 case UNORDERED_EXPR:
2290 return COMPCODE_UNORD;
2291 case UNLT_EXPR:
2292 return COMPCODE_UNLT;
2293 case UNEQ_EXPR:
2294 return COMPCODE_UNEQ;
2295 case UNLE_EXPR:
2296 return COMPCODE_UNLE;
2297 case UNGT_EXPR:
2298 return COMPCODE_UNGT;
2299 case LTGT_EXPR:
2300 return COMPCODE_LTGT;
2301 case UNGE_EXPR:
2302 return COMPCODE_UNGE;
2303 default:
2304 gcc_unreachable ();
2308 /* Convert a compcode bit-based encoding of a comparison operator back
2309 to GCC's enum tree_code representation. This function is the
2310 inverse of comparison_to_compcode. */
2312 static enum tree_code
2313 compcode_to_comparison (enum comparison_code code)
2315 switch (code)
2317 case COMPCODE_LT:
2318 return LT_EXPR;
2319 case COMPCODE_EQ:
2320 return EQ_EXPR;
2321 case COMPCODE_LE:
2322 return LE_EXPR;
2323 case COMPCODE_GT:
2324 return GT_EXPR;
2325 case COMPCODE_NE:
2326 return NE_EXPR;
2327 case COMPCODE_GE:
2328 return GE_EXPR;
2329 case COMPCODE_ORD:
2330 return ORDERED_EXPR;
2331 case COMPCODE_UNORD:
2332 return UNORDERED_EXPR;
2333 case COMPCODE_UNLT:
2334 return UNLT_EXPR;
2335 case COMPCODE_UNEQ:
2336 return UNEQ_EXPR;
2337 case COMPCODE_UNLE:
2338 return UNLE_EXPR;
2339 case COMPCODE_UNGT:
2340 return UNGT_EXPR;
2341 case COMPCODE_LTGT:
2342 return LTGT_EXPR;
2343 case COMPCODE_UNGE:
2344 return UNGE_EXPR;
2345 default:
2346 gcc_unreachable ();
2350 /* Return a tree for the comparison which is the combination of
2351 doing the AND or OR (depending on CODE) of the two operations LCODE
2352 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2353 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2354 if this makes the transformation invalid. */
2356 tree
2357 combine_comparisons (enum tree_code code, enum tree_code lcode,
2358 enum tree_code rcode, tree truth_type,
2359 tree ll_arg, tree lr_arg)
2361 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2362 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2363 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2364 enum comparison_code compcode;
2366 switch (code)
2368 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2369 compcode = lcompcode & rcompcode;
2370 break;
2372 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2373 compcode = lcompcode | rcompcode;
2374 break;
2376 default:
2377 return NULL_TREE;
2380 if (!honor_nans)
2382 /* Eliminate unordered comparisons, as well as LTGT and ORD
2383 which are not used unless the mode has NaNs. */
2384 compcode &= ~COMPCODE_UNORD;
2385 if (compcode == COMPCODE_LTGT)
2386 compcode = COMPCODE_NE;
2387 else if (compcode == COMPCODE_ORD)
2388 compcode = COMPCODE_TRUE;
2390 else if (flag_trapping_math)
2392 /* Check that the original operation and the optimized ones will trap
2393 under the same condition. */
2394 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2395 && (lcompcode != COMPCODE_EQ)
2396 && (lcompcode != COMPCODE_ORD);
2397 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2398 && (rcompcode != COMPCODE_EQ)
2399 && (rcompcode != COMPCODE_ORD);
2400 bool trap = (compcode & COMPCODE_UNORD) == 0
2401 && (compcode != COMPCODE_EQ)
2402 && (compcode != COMPCODE_ORD);
2404 /* In a short-circuited boolean expression the LHS might be
2405 such that the RHS, if evaluated, will never trap. For
2406 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2407 if neither x nor y is NaN. (This is a mixed blessing: for
2408 example, the expression above will never trap, hence
2409 optimizing it to x < y would be invalid). */
2410 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2411 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2412 rtrap = false;
2414 /* If the comparison was short-circuited, and only the RHS
2415 trapped, we may now generate a spurious trap. */
2416 if (rtrap && !ltrap
2417 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2418 return NULL_TREE;
2420 /* If we changed the conditions that cause a trap, we lose. */
2421 if ((ltrap || rtrap) != trap)
2422 return NULL_TREE;
2425 if (compcode == COMPCODE_TRUE)
2426 return constant_boolean_node (true, truth_type);
2427 else if (compcode == COMPCODE_FALSE)
2428 return constant_boolean_node (false, truth_type);
2429 else
2430 return fold_build2 (compcode_to_comparison (compcode),
2431 truth_type, ll_arg, lr_arg);
2434 /* Return nonzero if CODE is a tree code that represents a truth value. */
2436 static int
2437 truth_value_p (enum tree_code code)
2439 return (TREE_CODE_CLASS (code) == tcc_comparison
2440 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2441 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2442 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2445 /* Return nonzero if two operands (typically of the same tree node)
2446 are necessarily equal. If either argument has side-effects this
2447 function returns zero. FLAGS modifies behavior as follows:
2449 If OEP_ONLY_CONST is set, only return nonzero for constants.
2450 This function tests whether the operands are indistinguishable;
2451 it does not test whether they are equal using C's == operation.
2452 The distinction is important for IEEE floating point, because
2453 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2454 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2456 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2457 even though it may hold multiple values during a function.
2458 This is because a GCC tree node guarantees that nothing else is
2459 executed between the evaluation of its "operands" (which may often
2460 be evaluated in arbitrary order). Hence if the operands themselves
2461 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2462 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2463 unset means assuming isochronic (or instantaneous) tree equivalence.
2464 Unless comparing arbitrary expression trees, such as from different
2465 statements, this flag can usually be left unset.
2467 If OEP_PURE_SAME is set, then pure functions with identical arguments
2468 are considered the same. It is used when the caller has other ways
2469 to ensure that global memory is unchanged in between. */
2472 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2474 /* If either is ERROR_MARK, they aren't equal. */
2475 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2476 return 0;
2478 /* If both types don't have the same signedness, then we can't consider
2479 them equal. We must check this before the STRIP_NOPS calls
2480 because they may change the signedness of the arguments. */
2481 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2482 return 0;
2484 STRIP_NOPS (arg0);
2485 STRIP_NOPS (arg1);
2487 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2488 /* This is needed for conversions and for COMPONENT_REF.
2489 Might as well play it safe and always test this. */
2490 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2491 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2492 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2493 return 0;
2495 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2496 We don't care about side effects in that case because the SAVE_EXPR
2497 takes care of that for us. In all other cases, two expressions are
2498 equal if they have no side effects. If we have two identical
2499 expressions with side effects that should be treated the same due
2500 to the only side effects being identical SAVE_EXPR's, that will
2501 be detected in the recursive calls below. */
2502 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2503 && (TREE_CODE (arg0) == SAVE_EXPR
2504 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2505 return 1;
2507 /* Next handle constant cases, those for which we can return 1 even
2508 if ONLY_CONST is set. */
2509 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2510 switch (TREE_CODE (arg0))
2512 case INTEGER_CST:
2513 return (! TREE_CONSTANT_OVERFLOW (arg0)
2514 && ! TREE_CONSTANT_OVERFLOW (arg1)
2515 && tree_int_cst_equal (arg0, arg1));
2517 case REAL_CST:
2518 return (! TREE_CONSTANT_OVERFLOW (arg0)
2519 && ! TREE_CONSTANT_OVERFLOW (arg1)
2520 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2521 TREE_REAL_CST (arg1)));
2523 case VECTOR_CST:
2525 tree v1, v2;
2527 if (TREE_CONSTANT_OVERFLOW (arg0)
2528 || TREE_CONSTANT_OVERFLOW (arg1))
2529 return 0;
2531 v1 = TREE_VECTOR_CST_ELTS (arg0);
2532 v2 = TREE_VECTOR_CST_ELTS (arg1);
2533 while (v1 && v2)
2535 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2536 flags))
2537 return 0;
2538 v1 = TREE_CHAIN (v1);
2539 v2 = TREE_CHAIN (v2);
2542 return v1 == v2;
2545 case COMPLEX_CST:
2546 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2547 flags)
2548 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2549 flags));
2551 case STRING_CST:
2552 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2553 && ! memcmp (TREE_STRING_POINTER (arg0),
2554 TREE_STRING_POINTER (arg1),
2555 TREE_STRING_LENGTH (arg0)));
2557 case ADDR_EXPR:
2558 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2560 default:
2561 break;
2564 if (flags & OEP_ONLY_CONST)
2565 return 0;
2567 /* Define macros to test an operand from arg0 and arg1 for equality and a
2568 variant that allows null and views null as being different from any
2569 non-null value. In the latter case, if either is null, the both
2570 must be; otherwise, do the normal comparison. */
2571 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2572 TREE_OPERAND (arg1, N), flags)
2574 #define OP_SAME_WITH_NULL(N) \
2575 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2576 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2578 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2580 case tcc_unary:
2581 /* Two conversions are equal only if signedness and modes match. */
2582 switch (TREE_CODE (arg0))
2584 case NOP_EXPR:
2585 case CONVERT_EXPR:
2586 case FIX_CEIL_EXPR:
2587 case FIX_TRUNC_EXPR:
2588 case FIX_FLOOR_EXPR:
2589 case FIX_ROUND_EXPR:
2590 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2591 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2592 return 0;
2593 break;
2594 default:
2595 break;
2598 return OP_SAME (0);
2601 case tcc_comparison:
2602 case tcc_binary:
2603 if (OP_SAME (0) && OP_SAME (1))
2604 return 1;
2606 /* For commutative ops, allow the other order. */
2607 return (commutative_tree_code (TREE_CODE (arg0))
2608 && operand_equal_p (TREE_OPERAND (arg0, 0),
2609 TREE_OPERAND (arg1, 1), flags)
2610 && operand_equal_p (TREE_OPERAND (arg0, 1),
2611 TREE_OPERAND (arg1, 0), flags));
2613 case tcc_reference:
2614 /* If either of the pointer (or reference) expressions we are
2615 dereferencing contain a side effect, these cannot be equal. */
2616 if (TREE_SIDE_EFFECTS (arg0)
2617 || TREE_SIDE_EFFECTS (arg1))
2618 return 0;
2620 switch (TREE_CODE (arg0))
2622 case INDIRECT_REF:
2623 case ALIGN_INDIRECT_REF:
2624 case MISALIGNED_INDIRECT_REF:
2625 case REALPART_EXPR:
2626 case IMAGPART_EXPR:
2627 return OP_SAME (0);
2629 case ARRAY_REF:
2630 case ARRAY_RANGE_REF:
2631 /* Operands 2 and 3 may be null. */
2632 return (OP_SAME (0)
2633 && OP_SAME (1)
2634 && OP_SAME_WITH_NULL (2)
2635 && OP_SAME_WITH_NULL (3));
2637 case COMPONENT_REF:
2638 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2639 may be NULL when we're called to compare MEM_EXPRs. */
2640 return OP_SAME_WITH_NULL (0)
2641 && OP_SAME (1)
2642 && OP_SAME_WITH_NULL (2);
2644 case BIT_FIELD_REF:
2645 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2647 default:
2648 return 0;
2651 case tcc_expression:
2652 switch (TREE_CODE (arg0))
2654 case ADDR_EXPR:
2655 case TRUTH_NOT_EXPR:
2656 return OP_SAME (0);
2658 case TRUTH_ANDIF_EXPR:
2659 case TRUTH_ORIF_EXPR:
2660 return OP_SAME (0) && OP_SAME (1);
2662 case TRUTH_AND_EXPR:
2663 case TRUTH_OR_EXPR:
2664 case TRUTH_XOR_EXPR:
2665 if (OP_SAME (0) && OP_SAME (1))
2666 return 1;
2668 /* Otherwise take into account this is a commutative operation. */
2669 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2670 TREE_OPERAND (arg1, 1), flags)
2671 && operand_equal_p (TREE_OPERAND (arg0, 1),
2672 TREE_OPERAND (arg1, 0), flags));
2674 case CALL_EXPR:
2675 /* If the CALL_EXPRs call different functions, then they
2676 clearly can not be equal. */
2677 if (!OP_SAME (0))
2678 return 0;
2681 unsigned int cef = call_expr_flags (arg0);
2682 if (flags & OEP_PURE_SAME)
2683 cef &= ECF_CONST | ECF_PURE;
2684 else
2685 cef &= ECF_CONST;
2686 if (!cef)
2687 return 0;
2690 /* Now see if all the arguments are the same. operand_equal_p
2691 does not handle TREE_LIST, so we walk the operands here
2692 feeding them to operand_equal_p. */
2693 arg0 = TREE_OPERAND (arg0, 1);
2694 arg1 = TREE_OPERAND (arg1, 1);
2695 while (arg0 && arg1)
2697 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2698 flags))
2699 return 0;
2701 arg0 = TREE_CHAIN (arg0);
2702 arg1 = TREE_CHAIN (arg1);
2705 /* If we get here and both argument lists are exhausted
2706 then the CALL_EXPRs are equal. */
2707 return ! (arg0 || arg1);
2709 default:
2710 return 0;
2713 case tcc_declaration:
2714 /* Consider __builtin_sqrt equal to sqrt. */
2715 return (TREE_CODE (arg0) == FUNCTION_DECL
2716 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2717 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2718 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2720 default:
2721 return 0;
2724 #undef OP_SAME
2725 #undef OP_SAME_WITH_NULL
2728 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2729 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2731 When in doubt, return 0. */
2733 static int
2734 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2736 int unsignedp1, unsignedpo;
2737 tree primarg0, primarg1, primother;
2738 unsigned int correct_width;
2740 if (operand_equal_p (arg0, arg1, 0))
2741 return 1;
2743 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2744 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2745 return 0;
2747 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2748 and see if the inner values are the same. This removes any
2749 signedness comparison, which doesn't matter here. */
2750 primarg0 = arg0, primarg1 = arg1;
2751 STRIP_NOPS (primarg0);
2752 STRIP_NOPS (primarg1);
2753 if (operand_equal_p (primarg0, primarg1, 0))
2754 return 1;
2756 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2757 actual comparison operand, ARG0.
2759 First throw away any conversions to wider types
2760 already present in the operands. */
2762 primarg1 = get_narrower (arg1, &unsignedp1);
2763 primother = get_narrower (other, &unsignedpo);
2765 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2766 if (unsignedp1 == unsignedpo
2767 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2768 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2770 tree type = TREE_TYPE (arg0);
2772 /* Make sure shorter operand is extended the right way
2773 to match the longer operand. */
2774 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2775 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2777 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2778 return 1;
2781 return 0;
2784 /* See if ARG is an expression that is either a comparison or is performing
2785 arithmetic on comparisons. The comparisons must only be comparing
2786 two different values, which will be stored in *CVAL1 and *CVAL2; if
2787 they are nonzero it means that some operands have already been found.
2788 No variables may be used anywhere else in the expression except in the
2789 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2790 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2792 If this is true, return 1. Otherwise, return zero. */
2794 static int
2795 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2797 enum tree_code code = TREE_CODE (arg);
2798 enum tree_code_class class = TREE_CODE_CLASS (code);
2800 /* We can handle some of the tcc_expression cases here. */
2801 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2802 class = tcc_unary;
2803 else if (class == tcc_expression
2804 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2805 || code == COMPOUND_EXPR))
2806 class = tcc_binary;
2808 else if (class == tcc_expression && code == SAVE_EXPR
2809 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2811 /* If we've already found a CVAL1 or CVAL2, this expression is
2812 two complex to handle. */
2813 if (*cval1 || *cval2)
2814 return 0;
2816 class = tcc_unary;
2817 *save_p = 1;
2820 switch (class)
2822 case tcc_unary:
2823 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2825 case tcc_binary:
2826 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2827 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2828 cval1, cval2, save_p));
2830 case tcc_constant:
2831 return 1;
2833 case tcc_expression:
2834 if (code == COND_EXPR)
2835 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2836 cval1, cval2, save_p)
2837 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2838 cval1, cval2, save_p)
2839 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2840 cval1, cval2, save_p));
2841 return 0;
2843 case tcc_comparison:
2844 /* First see if we can handle the first operand, then the second. For
2845 the second operand, we know *CVAL1 can't be zero. It must be that
2846 one side of the comparison is each of the values; test for the
2847 case where this isn't true by failing if the two operands
2848 are the same. */
2850 if (operand_equal_p (TREE_OPERAND (arg, 0),
2851 TREE_OPERAND (arg, 1), 0))
2852 return 0;
2854 if (*cval1 == 0)
2855 *cval1 = TREE_OPERAND (arg, 0);
2856 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2858 else if (*cval2 == 0)
2859 *cval2 = TREE_OPERAND (arg, 0);
2860 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2862 else
2863 return 0;
2865 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2867 else if (*cval2 == 0)
2868 *cval2 = TREE_OPERAND (arg, 1);
2869 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2871 else
2872 return 0;
2874 return 1;
2876 default:
2877 return 0;
2881 /* ARG is a tree that is known to contain just arithmetic operations and
2882 comparisons. Evaluate the operations in the tree substituting NEW0 for
2883 any occurrence of OLD0 as an operand of a comparison and likewise for
2884 NEW1 and OLD1. */
2886 static tree
2887 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2889 tree type = TREE_TYPE (arg);
2890 enum tree_code code = TREE_CODE (arg);
2891 enum tree_code_class class = TREE_CODE_CLASS (code);
2893 /* We can handle some of the tcc_expression cases here. */
2894 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2895 class = tcc_unary;
2896 else if (class == tcc_expression
2897 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2898 class = tcc_binary;
2900 switch (class)
2902 case tcc_unary:
2903 return fold_build1 (code, type,
2904 eval_subst (TREE_OPERAND (arg, 0),
2905 old0, new0, old1, new1));
2907 case tcc_binary:
2908 return fold_build2 (code, type,
2909 eval_subst (TREE_OPERAND (arg, 0),
2910 old0, new0, old1, new1),
2911 eval_subst (TREE_OPERAND (arg, 1),
2912 old0, new0, old1, new1));
2914 case tcc_expression:
2915 switch (code)
2917 case SAVE_EXPR:
2918 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2920 case COMPOUND_EXPR:
2921 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2923 case COND_EXPR:
2924 return fold_build3 (code, type,
2925 eval_subst (TREE_OPERAND (arg, 0),
2926 old0, new0, old1, new1),
2927 eval_subst (TREE_OPERAND (arg, 1),
2928 old0, new0, old1, new1),
2929 eval_subst (TREE_OPERAND (arg, 2),
2930 old0, new0, old1, new1));
2931 default:
2932 break;
2934 /* Fall through - ??? */
2936 case tcc_comparison:
2938 tree arg0 = TREE_OPERAND (arg, 0);
2939 tree arg1 = TREE_OPERAND (arg, 1);
2941 /* We need to check both for exact equality and tree equality. The
2942 former will be true if the operand has a side-effect. In that
2943 case, we know the operand occurred exactly once. */
2945 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2946 arg0 = new0;
2947 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2948 arg0 = new1;
2950 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2951 arg1 = new0;
2952 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2953 arg1 = new1;
2955 return fold_build2 (code, type, arg0, arg1);
2958 default:
2959 return arg;
2963 /* Return a tree for the case when the result of an expression is RESULT
2964 converted to TYPE and OMITTED was previously an operand of the expression
2965 but is now not needed (e.g., we folded OMITTED * 0).
2967 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2968 the conversion of RESULT to TYPE. */
2970 tree
2971 omit_one_operand (tree type, tree result, tree omitted)
2973 tree t = fold_convert (type, result);
2975 if (TREE_SIDE_EFFECTS (omitted))
2976 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2978 return non_lvalue (t);
2981 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2983 static tree
2984 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2986 tree t = fold_convert (type, result);
2988 if (TREE_SIDE_EFFECTS (omitted))
2989 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2991 return pedantic_non_lvalue (t);
2994 /* Return a tree for the case when the result of an expression is RESULT
2995 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2996 of the expression but are now not needed.
2998 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2999 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3000 evaluated before OMITTED2. Otherwise, if neither has side effects,
3001 just do the conversion of RESULT to TYPE. */
3003 tree
3004 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3006 tree t = fold_convert (type, result);
3008 if (TREE_SIDE_EFFECTS (omitted2))
3009 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3010 if (TREE_SIDE_EFFECTS (omitted1))
3011 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3013 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3017 /* Return a simplified tree node for the truth-negation of ARG. This
3018 never alters ARG itself. We assume that ARG is an operation that
3019 returns a truth value (0 or 1).
3021 FIXME: one would think we would fold the result, but it causes
3022 problems with the dominator optimizer. */
3023 tree
3024 invert_truthvalue (tree arg)
3026 tree type = TREE_TYPE (arg);
3027 enum tree_code code = TREE_CODE (arg);
3029 if (code == ERROR_MARK)
3030 return arg;
3032 /* If this is a comparison, we can simply invert it, except for
3033 floating-point non-equality comparisons, in which case we just
3034 enclose a TRUTH_NOT_EXPR around what we have. */
3036 if (TREE_CODE_CLASS (code) == tcc_comparison)
3038 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3039 if (FLOAT_TYPE_P (op_type)
3040 && flag_trapping_math
3041 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3042 && code != NE_EXPR && code != EQ_EXPR)
3043 return build1 (TRUTH_NOT_EXPR, type, arg);
3044 else
3046 code = invert_tree_comparison (code,
3047 HONOR_NANS (TYPE_MODE (op_type)));
3048 if (code == ERROR_MARK)
3049 return build1 (TRUTH_NOT_EXPR, type, arg);
3050 else
3051 return build2 (code, type,
3052 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3056 switch (code)
3058 case INTEGER_CST:
3059 return constant_boolean_node (integer_zerop (arg), type);
3061 case TRUTH_AND_EXPR:
3062 return build2 (TRUTH_OR_EXPR, type,
3063 invert_truthvalue (TREE_OPERAND (arg, 0)),
3064 invert_truthvalue (TREE_OPERAND (arg, 1)));
3066 case TRUTH_OR_EXPR:
3067 return build2 (TRUTH_AND_EXPR, type,
3068 invert_truthvalue (TREE_OPERAND (arg, 0)),
3069 invert_truthvalue (TREE_OPERAND (arg, 1)));
3071 case TRUTH_XOR_EXPR:
3072 /* Here we can invert either operand. We invert the first operand
3073 unless the second operand is a TRUTH_NOT_EXPR in which case our
3074 result is the XOR of the first operand with the inside of the
3075 negation of the second operand. */
3077 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3078 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3079 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3080 else
3081 return build2 (TRUTH_XOR_EXPR, type,
3082 invert_truthvalue (TREE_OPERAND (arg, 0)),
3083 TREE_OPERAND (arg, 1));
3085 case TRUTH_ANDIF_EXPR:
3086 return build2 (TRUTH_ORIF_EXPR, type,
3087 invert_truthvalue (TREE_OPERAND (arg, 0)),
3088 invert_truthvalue (TREE_OPERAND (arg, 1)));
3090 case TRUTH_ORIF_EXPR:
3091 return build2 (TRUTH_ANDIF_EXPR, type,
3092 invert_truthvalue (TREE_OPERAND (arg, 0)),
3093 invert_truthvalue (TREE_OPERAND (arg, 1)));
3095 case TRUTH_NOT_EXPR:
3096 return TREE_OPERAND (arg, 0);
3098 case COND_EXPR:
3100 tree arg1 = TREE_OPERAND (arg, 1);
3101 tree arg2 = TREE_OPERAND (arg, 2);
3102 /* A COND_EXPR may have a throw as one operand, which
3103 then has void type. Just leave void operands
3104 as they are. */
3105 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3106 VOID_TYPE_P (TREE_TYPE (arg1))
3107 ? arg1 : invert_truthvalue (arg1),
3108 VOID_TYPE_P (TREE_TYPE (arg2))
3109 ? arg2 : invert_truthvalue (arg2));
3112 case COMPOUND_EXPR:
3113 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3114 invert_truthvalue (TREE_OPERAND (arg, 1)));
3116 case NON_LVALUE_EXPR:
3117 return invert_truthvalue (TREE_OPERAND (arg, 0));
3119 case NOP_EXPR:
3120 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3121 break;
3123 case CONVERT_EXPR:
3124 case FLOAT_EXPR:
3125 return build1 (TREE_CODE (arg), type,
3126 invert_truthvalue (TREE_OPERAND (arg, 0)));
3128 case BIT_AND_EXPR:
3129 if (!integer_onep (TREE_OPERAND (arg, 1)))
3130 break;
3131 return build2 (EQ_EXPR, type, arg,
3132 build_int_cst (type, 0));
3134 case SAVE_EXPR:
3135 return build1 (TRUTH_NOT_EXPR, type, arg);
3137 case CLEANUP_POINT_EXPR:
3138 return build1 (CLEANUP_POINT_EXPR, type,
3139 invert_truthvalue (TREE_OPERAND (arg, 0)));
3141 default:
3142 break;
3144 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3145 return build1 (TRUTH_NOT_EXPR, type, arg);
3148 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3149 operands are another bit-wise operation with a common input. If so,
3150 distribute the bit operations to save an operation and possibly two if
3151 constants are involved. For example, convert
3152 (A | B) & (A | C) into A | (B & C)
3153 Further simplification will occur if B and C are constants.
3155 If this optimization cannot be done, 0 will be returned. */
3157 static tree
3158 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3160 tree common;
3161 tree left, right;
3163 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3164 || TREE_CODE (arg0) == code
3165 || (TREE_CODE (arg0) != BIT_AND_EXPR
3166 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3167 return 0;
3169 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3171 common = TREE_OPERAND (arg0, 0);
3172 left = TREE_OPERAND (arg0, 1);
3173 right = TREE_OPERAND (arg1, 1);
3175 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3177 common = TREE_OPERAND (arg0, 0);
3178 left = TREE_OPERAND (arg0, 1);
3179 right = TREE_OPERAND (arg1, 0);
3181 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3183 common = TREE_OPERAND (arg0, 1);
3184 left = TREE_OPERAND (arg0, 0);
3185 right = TREE_OPERAND (arg1, 1);
3187 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3189 common = TREE_OPERAND (arg0, 1);
3190 left = TREE_OPERAND (arg0, 0);
3191 right = TREE_OPERAND (arg1, 0);
3193 else
3194 return 0;
3196 return fold_build2 (TREE_CODE (arg0), type, common,
3197 fold_build2 (code, type, left, right));
3200 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3201 with code CODE. This optimization is unsafe. */
3202 static tree
3203 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3205 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3206 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3208 /* (A / C) +- (B / C) -> (A +- B) / C. */
3209 if (mul0 == mul1
3210 && operand_equal_p (TREE_OPERAND (arg0, 1),
3211 TREE_OPERAND (arg1, 1), 0))
3212 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3213 fold_build2 (code, type,
3214 TREE_OPERAND (arg0, 0),
3215 TREE_OPERAND (arg1, 0)),
3216 TREE_OPERAND (arg0, 1));
3218 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3219 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3220 TREE_OPERAND (arg1, 0), 0)
3221 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3222 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3224 REAL_VALUE_TYPE r0, r1;
3225 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3226 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3227 if (!mul0)
3228 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3229 if (!mul1)
3230 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3231 real_arithmetic (&r0, code, &r0, &r1);
3232 return fold_build2 (MULT_EXPR, type,
3233 TREE_OPERAND (arg0, 0),
3234 build_real (type, r0));
3237 return NULL_TREE;
3240 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3241 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3243 static tree
3244 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3245 int unsignedp)
3247 tree result;
3249 if (bitpos == 0)
3251 tree size = TYPE_SIZE (TREE_TYPE (inner));
3252 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3253 || POINTER_TYPE_P (TREE_TYPE (inner)))
3254 && host_integerp (size, 0)
3255 && tree_low_cst (size, 0) == bitsize)
3256 return fold_convert (type, inner);
3259 result = build3 (BIT_FIELD_REF, type, inner,
3260 size_int (bitsize), bitsize_int (bitpos));
3262 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3264 return result;
3267 /* Optimize a bit-field compare.
3269 There are two cases: First is a compare against a constant and the
3270 second is a comparison of two items where the fields are at the same
3271 bit position relative to the start of a chunk (byte, halfword, word)
3272 large enough to contain it. In these cases we can avoid the shift
3273 implicit in bitfield extractions.
3275 For constants, we emit a compare of the shifted constant with the
3276 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3277 compared. For two fields at the same position, we do the ANDs with the
3278 similar mask and compare the result of the ANDs.
3280 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3281 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3282 are the left and right operands of the comparison, respectively.
3284 If the optimization described above can be done, we return the resulting
3285 tree. Otherwise we return zero. */
3287 static tree
3288 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3289 tree lhs, tree rhs)
3291 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3292 tree type = TREE_TYPE (lhs);
3293 tree signed_type, unsigned_type;
3294 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3295 enum machine_mode lmode, rmode, nmode;
3296 int lunsignedp, runsignedp;
3297 int lvolatilep = 0, rvolatilep = 0;
3298 tree linner, rinner = NULL_TREE;
3299 tree mask;
3300 tree offset;
3302 /* Get all the information about the extractions being done. If the bit size
3303 if the same as the size of the underlying object, we aren't doing an
3304 extraction at all and so can do nothing. We also don't want to
3305 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3306 then will no longer be able to replace it. */
3307 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3308 &lunsignedp, &lvolatilep, false);
3309 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3310 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3311 return 0;
3313 if (!const_p)
3315 /* If this is not a constant, we can only do something if bit positions,
3316 sizes, and signedness are the same. */
3317 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3318 &runsignedp, &rvolatilep, false);
3320 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3321 || lunsignedp != runsignedp || offset != 0
3322 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3323 return 0;
3326 /* See if we can find a mode to refer to this field. We should be able to,
3327 but fail if we can't. */
3328 nmode = get_best_mode (lbitsize, lbitpos,
3329 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3330 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3331 TYPE_ALIGN (TREE_TYPE (rinner))),
3332 word_mode, lvolatilep || rvolatilep);
3333 if (nmode == VOIDmode)
3334 return 0;
3336 /* Set signed and unsigned types of the precision of this mode for the
3337 shifts below. */
3338 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3339 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3341 /* Compute the bit position and size for the new reference and our offset
3342 within it. If the new reference is the same size as the original, we
3343 won't optimize anything, so return zero. */
3344 nbitsize = GET_MODE_BITSIZE (nmode);
3345 nbitpos = lbitpos & ~ (nbitsize - 1);
3346 lbitpos -= nbitpos;
3347 if (nbitsize == lbitsize)
3348 return 0;
3350 if (BYTES_BIG_ENDIAN)
3351 lbitpos = nbitsize - lbitsize - lbitpos;
3353 /* Make the mask to be used against the extracted field. */
3354 mask = build_int_cst (unsigned_type, -1);
3355 mask = force_fit_type (mask, 0, false, false);
3356 mask = fold_convert (unsigned_type, mask);
3357 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3358 mask = const_binop (RSHIFT_EXPR, mask,
3359 size_int (nbitsize - lbitsize - lbitpos), 0);
3361 if (! const_p)
3362 /* If not comparing with constant, just rework the comparison
3363 and return. */
3364 return build2 (code, compare_type,
3365 build2 (BIT_AND_EXPR, unsigned_type,
3366 make_bit_field_ref (linner, unsigned_type,
3367 nbitsize, nbitpos, 1),
3368 mask),
3369 build2 (BIT_AND_EXPR, unsigned_type,
3370 make_bit_field_ref (rinner, unsigned_type,
3371 nbitsize, nbitpos, 1),
3372 mask));
3374 /* Otherwise, we are handling the constant case. See if the constant is too
3375 big for the field. Warn and return a tree of for 0 (false) if so. We do
3376 this not only for its own sake, but to avoid having to test for this
3377 error case below. If we didn't, we might generate wrong code.
3379 For unsigned fields, the constant shifted right by the field length should
3380 be all zero. For signed fields, the high-order bits should agree with
3381 the sign bit. */
3383 if (lunsignedp)
3385 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3386 fold_convert (unsigned_type, rhs),
3387 size_int (lbitsize), 0)))
3389 warning (0, "comparison is always %d due to width of bit-field",
3390 code == NE_EXPR);
3391 return constant_boolean_node (code == NE_EXPR, compare_type);
3394 else
3396 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3397 size_int (lbitsize - 1), 0);
3398 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3400 warning (0, "comparison is always %d due to width of bit-field",
3401 code == NE_EXPR);
3402 return constant_boolean_node (code == NE_EXPR, compare_type);
3406 /* Single-bit compares should always be against zero. */
3407 if (lbitsize == 1 && ! integer_zerop (rhs))
3409 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3410 rhs = build_int_cst (type, 0);
3413 /* Make a new bitfield reference, shift the constant over the
3414 appropriate number of bits and mask it with the computed mask
3415 (in case this was a signed field). If we changed it, make a new one. */
3416 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3417 if (lvolatilep)
3419 TREE_SIDE_EFFECTS (lhs) = 1;
3420 TREE_THIS_VOLATILE (lhs) = 1;
3423 rhs = const_binop (BIT_AND_EXPR,
3424 const_binop (LSHIFT_EXPR,
3425 fold_convert (unsigned_type, rhs),
3426 size_int (lbitpos), 0),
3427 mask, 0);
3429 return build2 (code, compare_type,
3430 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3431 rhs);
3434 /* Subroutine for fold_truthop: decode a field reference.
3436 If EXP is a comparison reference, we return the innermost reference.
3438 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3439 set to the starting bit number.
3441 If the innermost field can be completely contained in a mode-sized
3442 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3444 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3445 otherwise it is not changed.
3447 *PUNSIGNEDP is set to the signedness of the field.
3449 *PMASK is set to the mask used. This is either contained in a
3450 BIT_AND_EXPR or derived from the width of the field.
3452 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3454 Return 0 if this is not a component reference or is one that we can't
3455 do anything with. */
3457 static tree
3458 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3459 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3460 int *punsignedp, int *pvolatilep,
3461 tree *pmask, tree *pand_mask)
3463 tree outer_type = 0;
3464 tree and_mask = 0;
3465 tree mask, inner, offset;
3466 tree unsigned_type;
3467 unsigned int precision;
3469 /* All the optimizations using this function assume integer fields.
3470 There are problems with FP fields since the type_for_size call
3471 below can fail for, e.g., XFmode. */
3472 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3473 return 0;
3475 /* We are interested in the bare arrangement of bits, so strip everything
3476 that doesn't affect the machine mode. However, record the type of the
3477 outermost expression if it may matter below. */
3478 if (TREE_CODE (exp) == NOP_EXPR
3479 || TREE_CODE (exp) == CONVERT_EXPR
3480 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3481 outer_type = TREE_TYPE (exp);
3482 STRIP_NOPS (exp);
3484 if (TREE_CODE (exp) == BIT_AND_EXPR)
3486 and_mask = TREE_OPERAND (exp, 1);
3487 exp = TREE_OPERAND (exp, 0);
3488 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3489 if (TREE_CODE (and_mask) != INTEGER_CST)
3490 return 0;
3493 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3494 punsignedp, pvolatilep, false);
3495 if ((inner == exp && and_mask == 0)
3496 || *pbitsize < 0 || offset != 0
3497 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3498 return 0;
3500 /* If the number of bits in the reference is the same as the bitsize of
3501 the outer type, then the outer type gives the signedness. Otherwise
3502 (in case of a small bitfield) the signedness is unchanged. */
3503 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3504 *punsignedp = TYPE_UNSIGNED (outer_type);
3506 /* Compute the mask to access the bitfield. */
3507 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3508 precision = TYPE_PRECISION (unsigned_type);
3510 mask = build_int_cst (unsigned_type, -1);
3511 mask = force_fit_type (mask, 0, false, false);
3513 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3514 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3516 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3517 if (and_mask != 0)
3518 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3519 fold_convert (unsigned_type, and_mask), mask);
3521 *pmask = mask;
3522 *pand_mask = and_mask;
3523 return inner;
3526 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3527 bit positions. */
3529 static int
3530 all_ones_mask_p (tree mask, int size)
3532 tree type = TREE_TYPE (mask);
3533 unsigned int precision = TYPE_PRECISION (type);
3534 tree tmask;
3536 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3537 tmask = force_fit_type (tmask, 0, false, false);
3539 return
3540 tree_int_cst_equal (mask,
3541 const_binop (RSHIFT_EXPR,
3542 const_binop (LSHIFT_EXPR, tmask,
3543 size_int (precision - size),
3545 size_int (precision - size), 0));
3548 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3549 represents the sign bit of EXP's type. If EXP represents a sign
3550 or zero extension, also test VAL against the unextended type.
3551 The return value is the (sub)expression whose sign bit is VAL,
3552 or NULL_TREE otherwise. */
3554 static tree
3555 sign_bit_p (tree exp, tree val)
3557 unsigned HOST_WIDE_INT mask_lo, lo;
3558 HOST_WIDE_INT mask_hi, hi;
3559 int width;
3560 tree t;
3562 /* Tree EXP must have an integral type. */
3563 t = TREE_TYPE (exp);
3564 if (! INTEGRAL_TYPE_P (t))
3565 return NULL_TREE;
3567 /* Tree VAL must be an integer constant. */
3568 if (TREE_CODE (val) != INTEGER_CST
3569 || TREE_CONSTANT_OVERFLOW (val))
3570 return NULL_TREE;
3572 width = TYPE_PRECISION (t);
3573 if (width > HOST_BITS_PER_WIDE_INT)
3575 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3576 lo = 0;
3578 mask_hi = ((unsigned HOST_WIDE_INT) -1
3579 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3580 mask_lo = -1;
3582 else
3584 hi = 0;
3585 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3587 mask_hi = 0;
3588 mask_lo = ((unsigned HOST_WIDE_INT) -1
3589 >> (HOST_BITS_PER_WIDE_INT - width));
3592 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3593 treat VAL as if it were unsigned. */
3594 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3595 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3596 return exp;
3598 /* Handle extension from a narrower type. */
3599 if (TREE_CODE (exp) == NOP_EXPR
3600 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3601 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3603 return NULL_TREE;
3606 /* Subroutine for fold_truthop: determine if an operand is simple enough
3607 to be evaluated unconditionally. */
3609 static int
3610 simple_operand_p (tree exp)
3612 /* Strip any conversions that don't change the machine mode. */
3613 STRIP_NOPS (exp);
3615 return (CONSTANT_CLASS_P (exp)
3616 || TREE_CODE (exp) == SSA_NAME
3617 || (DECL_P (exp)
3618 && ! TREE_ADDRESSABLE (exp)
3619 && ! TREE_THIS_VOLATILE (exp)
3620 && ! DECL_NONLOCAL (exp)
3621 /* Don't regard global variables as simple. They may be
3622 allocated in ways unknown to the compiler (shared memory,
3623 #pragma weak, etc). */
3624 && ! TREE_PUBLIC (exp)
3625 && ! DECL_EXTERNAL (exp)
3626 /* Loading a static variable is unduly expensive, but global
3627 registers aren't expensive. */
3628 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3631 /* The following functions are subroutines to fold_range_test and allow it to
3632 try to change a logical combination of comparisons into a range test.
3634 For example, both
3635 X == 2 || X == 3 || X == 4 || X == 5
3637 X >= 2 && X <= 5
3638 are converted to
3639 (unsigned) (X - 2) <= 3
3641 We describe each set of comparisons as being either inside or outside
3642 a range, using a variable named like IN_P, and then describe the
3643 range with a lower and upper bound. If one of the bounds is omitted,
3644 it represents either the highest or lowest value of the type.
3646 In the comments below, we represent a range by two numbers in brackets
3647 preceded by a "+" to designate being inside that range, or a "-" to
3648 designate being outside that range, so the condition can be inverted by
3649 flipping the prefix. An omitted bound is represented by a "-". For
3650 example, "- [-, 10]" means being outside the range starting at the lowest
3651 possible value and ending at 10, in other words, being greater than 10.
3652 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3653 always false.
3655 We set up things so that the missing bounds are handled in a consistent
3656 manner so neither a missing bound nor "true" and "false" need to be
3657 handled using a special case. */
3659 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3660 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3661 and UPPER1_P are nonzero if the respective argument is an upper bound
3662 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3663 must be specified for a comparison. ARG1 will be converted to ARG0's
3664 type if both are specified. */
3666 static tree
3667 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3668 tree arg1, int upper1_p)
3670 tree tem;
3671 int result;
3672 int sgn0, sgn1;
3674 /* If neither arg represents infinity, do the normal operation.
3675 Else, if not a comparison, return infinity. Else handle the special
3676 comparison rules. Note that most of the cases below won't occur, but
3677 are handled for consistency. */
3679 if (arg0 != 0 && arg1 != 0)
3681 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3682 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3683 STRIP_NOPS (tem);
3684 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3687 if (TREE_CODE_CLASS (code) != tcc_comparison)
3688 return 0;
3690 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3691 for neither. In real maths, we cannot assume open ended ranges are
3692 the same. But, this is computer arithmetic, where numbers are finite.
3693 We can therefore make the transformation of any unbounded range with
3694 the value Z, Z being greater than any representable number. This permits
3695 us to treat unbounded ranges as equal. */
3696 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3697 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3698 switch (code)
3700 case EQ_EXPR:
3701 result = sgn0 == sgn1;
3702 break;
3703 case NE_EXPR:
3704 result = sgn0 != sgn1;
3705 break;
3706 case LT_EXPR:
3707 result = sgn0 < sgn1;
3708 break;
3709 case LE_EXPR:
3710 result = sgn0 <= sgn1;
3711 break;
3712 case GT_EXPR:
3713 result = sgn0 > sgn1;
3714 break;
3715 case GE_EXPR:
3716 result = sgn0 >= sgn1;
3717 break;
3718 default:
3719 gcc_unreachable ();
3722 return constant_boolean_node (result, type);
3725 /* Given EXP, a logical expression, set the range it is testing into
3726 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3727 actually being tested. *PLOW and *PHIGH will be made of the same type
3728 as the returned expression. If EXP is not a comparison, we will most
3729 likely not be returning a useful value and range. */
3731 static tree
3732 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3734 enum tree_code code;
3735 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3736 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3737 int in_p, n_in_p;
3738 tree low, high, n_low, n_high;
3740 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3741 and see if we can refine the range. Some of the cases below may not
3742 happen, but it doesn't seem worth worrying about this. We "continue"
3743 the outer loop when we've changed something; otherwise we "break"
3744 the switch, which will "break" the while. */
3746 in_p = 0;
3747 low = high = build_int_cst (TREE_TYPE (exp), 0);
3749 while (1)
3751 code = TREE_CODE (exp);
3752 exp_type = TREE_TYPE (exp);
3754 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3756 if (TREE_CODE_LENGTH (code) > 0)
3757 arg0 = TREE_OPERAND (exp, 0);
3758 if (TREE_CODE_CLASS (code) == tcc_comparison
3759 || TREE_CODE_CLASS (code) == tcc_unary
3760 || TREE_CODE_CLASS (code) == tcc_binary)
3761 arg0_type = TREE_TYPE (arg0);
3762 if (TREE_CODE_CLASS (code) == tcc_binary
3763 || TREE_CODE_CLASS (code) == tcc_comparison
3764 || (TREE_CODE_CLASS (code) == tcc_expression
3765 && TREE_CODE_LENGTH (code) > 1))
3766 arg1 = TREE_OPERAND (exp, 1);
3769 switch (code)
3771 case TRUTH_NOT_EXPR:
3772 in_p = ! in_p, exp = arg0;
3773 continue;
3775 case EQ_EXPR: case NE_EXPR:
3776 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3777 /* We can only do something if the range is testing for zero
3778 and if the second operand is an integer constant. Note that
3779 saying something is "in" the range we make is done by
3780 complementing IN_P since it will set in the initial case of
3781 being not equal to zero; "out" is leaving it alone. */
3782 if (low == 0 || high == 0
3783 || ! integer_zerop (low) || ! integer_zerop (high)
3784 || TREE_CODE (arg1) != INTEGER_CST)
3785 break;
3787 switch (code)
3789 case NE_EXPR: /* - [c, c] */
3790 low = high = arg1;
3791 break;
3792 case EQ_EXPR: /* + [c, c] */
3793 in_p = ! in_p, low = high = arg1;
3794 break;
3795 case GT_EXPR: /* - [-, c] */
3796 low = 0, high = arg1;
3797 break;
3798 case GE_EXPR: /* + [c, -] */
3799 in_p = ! in_p, low = arg1, high = 0;
3800 break;
3801 case LT_EXPR: /* - [c, -] */
3802 low = arg1, high = 0;
3803 break;
3804 case LE_EXPR: /* + [-, c] */
3805 in_p = ! in_p, low = 0, high = arg1;
3806 break;
3807 default:
3808 gcc_unreachable ();
3811 /* If this is an unsigned comparison, we also know that EXP is
3812 greater than or equal to zero. We base the range tests we make
3813 on that fact, so we record it here so we can parse existing
3814 range tests. We test arg0_type since often the return type
3815 of, e.g. EQ_EXPR, is boolean. */
3816 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3818 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3819 in_p, low, high, 1,
3820 build_int_cst (arg0_type, 0),
3821 NULL_TREE))
3822 break;
3824 in_p = n_in_p, low = n_low, high = n_high;
3826 /* If the high bound is missing, but we have a nonzero low
3827 bound, reverse the range so it goes from zero to the low bound
3828 minus 1. */
3829 if (high == 0 && low && ! integer_zerop (low))
3831 in_p = ! in_p;
3832 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3833 integer_one_node, 0);
3834 low = build_int_cst (arg0_type, 0);
3838 exp = arg0;
3839 continue;
3841 case NEGATE_EXPR:
3842 /* (-x) IN [a,b] -> x in [-b, -a] */
3843 n_low = range_binop (MINUS_EXPR, exp_type,
3844 build_int_cst (exp_type, 0),
3845 0, high, 1);
3846 n_high = range_binop (MINUS_EXPR, exp_type,
3847 build_int_cst (exp_type, 0),
3848 0, low, 0);
3849 low = n_low, high = n_high;
3850 exp = arg0;
3851 continue;
3853 case BIT_NOT_EXPR:
3854 /* ~ X -> -X - 1 */
3855 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3856 build_int_cst (exp_type, 1));
3857 continue;
3859 case PLUS_EXPR: case MINUS_EXPR:
3860 if (TREE_CODE (arg1) != INTEGER_CST)
3861 break;
3863 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3864 move a constant to the other side. */
3865 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3866 break;
3868 /* If EXP is signed, any overflow in the computation is undefined,
3869 so we don't worry about it so long as our computations on
3870 the bounds don't overflow. For unsigned, overflow is defined
3871 and this is exactly the right thing. */
3872 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3873 arg0_type, low, 0, arg1, 0);
3874 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3875 arg0_type, high, 1, arg1, 0);
3876 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3877 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3878 break;
3880 /* Check for an unsigned range which has wrapped around the maximum
3881 value thus making n_high < n_low, and normalize it. */
3882 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3884 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3885 integer_one_node, 0);
3886 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3887 integer_one_node, 0);
3889 /* If the range is of the form +/- [ x+1, x ], we won't
3890 be able to normalize it. But then, it represents the
3891 whole range or the empty set, so make it
3892 +/- [ -, - ]. */
3893 if (tree_int_cst_equal (n_low, low)
3894 && tree_int_cst_equal (n_high, high))
3895 low = high = 0;
3896 else
3897 in_p = ! in_p;
3899 else
3900 low = n_low, high = n_high;
3902 exp = arg0;
3903 continue;
3905 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3906 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3907 break;
3909 if (! INTEGRAL_TYPE_P (arg0_type)
3910 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3911 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3912 break;
3914 n_low = low, n_high = high;
3916 if (n_low != 0)
3917 n_low = fold_convert (arg0_type, n_low);
3919 if (n_high != 0)
3920 n_high = fold_convert (arg0_type, n_high);
3923 /* If we're converting arg0 from an unsigned type, to exp,
3924 a signed type, we will be doing the comparison as unsigned.
3925 The tests above have already verified that LOW and HIGH
3926 are both positive.
3928 So we have to ensure that we will handle large unsigned
3929 values the same way that the current signed bounds treat
3930 negative values. */
3932 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3934 tree high_positive;
3935 tree equiv_type = lang_hooks.types.type_for_mode
3936 (TYPE_MODE (arg0_type), 1);
3938 /* A range without an upper bound is, naturally, unbounded.
3939 Since convert would have cropped a very large value, use
3940 the max value for the destination type. */
3941 high_positive
3942 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3943 : TYPE_MAX_VALUE (arg0_type);
3945 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3946 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3947 fold_convert (arg0_type,
3948 high_positive),
3949 fold_convert (arg0_type,
3950 integer_one_node));
3952 /* If the low bound is specified, "and" the range with the
3953 range for which the original unsigned value will be
3954 positive. */
3955 if (low != 0)
3957 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3958 1, n_low, n_high, 1,
3959 fold_convert (arg0_type,
3960 integer_zero_node),
3961 high_positive))
3962 break;
3964 in_p = (n_in_p == in_p);
3966 else
3968 /* Otherwise, "or" the range with the range of the input
3969 that will be interpreted as negative. */
3970 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3971 0, n_low, n_high, 1,
3972 fold_convert (arg0_type,
3973 integer_zero_node),
3974 high_positive))
3975 break;
3977 in_p = (in_p != n_in_p);
3981 exp = arg0;
3982 low = n_low, high = n_high;
3983 continue;
3985 default:
3986 break;
3989 break;
3992 /* If EXP is a constant, we can evaluate whether this is true or false. */
3993 if (TREE_CODE (exp) == INTEGER_CST)
3995 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3996 exp, 0, low, 0))
3997 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3998 exp, 1, high, 1)));
3999 low = high = 0;
4000 exp = 0;
4003 *pin_p = in_p, *plow = low, *phigh = high;
4004 return exp;
4007 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4008 type, TYPE, return an expression to test if EXP is in (or out of, depending
4009 on IN_P) the range. Return 0 if the test couldn't be created. */
4011 static tree
4012 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4014 tree etype = TREE_TYPE (exp);
4015 tree value;
4017 #ifdef HAVE_canonicalize_funcptr_for_compare
4018 /* Disable this optimization for function pointer expressions
4019 on targets that require function pointer canonicalization. */
4020 if (HAVE_canonicalize_funcptr_for_compare
4021 && TREE_CODE (etype) == POINTER_TYPE
4022 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4023 return NULL_TREE;
4024 #endif
4026 if (! in_p)
4028 value = build_range_check (type, exp, 1, low, high);
4029 if (value != 0)
4030 return invert_truthvalue (value);
4032 return 0;
4035 if (low == 0 && high == 0)
4036 return build_int_cst (type, 1);
4038 if (low == 0)
4039 return fold_build2 (LE_EXPR, type, exp,
4040 fold_convert (etype, high));
4042 if (high == 0)
4043 return fold_build2 (GE_EXPR, type, exp,
4044 fold_convert (etype, low));
4046 if (operand_equal_p (low, high, 0))
4047 return fold_build2 (EQ_EXPR, type, exp,
4048 fold_convert (etype, low));
4050 if (integer_zerop (low))
4052 if (! TYPE_UNSIGNED (etype))
4054 etype = lang_hooks.types.unsigned_type (etype);
4055 high = fold_convert (etype, high);
4056 exp = fold_convert (etype, exp);
4058 return build_range_check (type, exp, 1, 0, high);
4061 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4062 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4064 unsigned HOST_WIDE_INT lo;
4065 HOST_WIDE_INT hi;
4066 int prec;
4068 prec = TYPE_PRECISION (etype);
4069 if (prec <= HOST_BITS_PER_WIDE_INT)
4071 hi = 0;
4072 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4074 else
4076 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4077 lo = (unsigned HOST_WIDE_INT) -1;
4080 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4082 if (TYPE_UNSIGNED (etype))
4084 etype = lang_hooks.types.signed_type (etype);
4085 exp = fold_convert (etype, exp);
4087 return fold_build2 (GT_EXPR, type, exp,
4088 build_int_cst (etype, 0));
4092 value = const_binop (MINUS_EXPR, high, low, 0);
4093 if (value != 0 && (!flag_wrapv || TREE_OVERFLOW (value))
4094 && ! TYPE_UNSIGNED (etype))
4096 tree utype, minv, maxv;
4098 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4099 for the type in question, as we rely on this here. */
4100 switch (TREE_CODE (etype))
4102 case INTEGER_TYPE:
4103 case ENUMERAL_TYPE:
4104 case CHAR_TYPE:
4105 /* There is no requirement that LOW be within the range of ETYPE
4106 if the latter is a subtype. It must, however, be within the base
4107 type of ETYPE. So be sure we do the subtraction in that type. */
4108 if (TREE_TYPE (etype))
4109 etype = TREE_TYPE (etype);
4110 utype = lang_hooks.types.unsigned_type (etype);
4111 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4112 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4113 integer_one_node, 1);
4114 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4115 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4116 minv, 1, maxv, 1)))
4118 etype = utype;
4119 high = fold_convert (etype, high);
4120 low = fold_convert (etype, low);
4121 exp = fold_convert (etype, exp);
4122 value = const_binop (MINUS_EXPR, high, low, 0);
4124 break;
4125 default:
4126 break;
4130 if (value != 0 && ! TREE_OVERFLOW (value))
4132 /* There is no requirement that LOW be within the range of ETYPE
4133 if the latter is a subtype. It must, however, be within the base
4134 type of ETYPE. So be sure we do the subtraction in that type. */
4135 if (INTEGRAL_TYPE_P (etype) && TREE_TYPE (etype))
4137 etype = TREE_TYPE (etype);
4138 exp = fold_convert (etype, exp);
4139 low = fold_convert (etype, low);
4140 value = fold_convert (etype, value);
4143 return build_range_check (type,
4144 fold_build2 (MINUS_EXPR, etype, exp, low),
4145 1, build_int_cst (etype, 0), value);
4148 return 0;
4151 /* Given two ranges, see if we can merge them into one. Return 1 if we
4152 can, 0 if we can't. Set the output range into the specified parameters. */
4154 static int
4155 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4156 tree high0, int in1_p, tree low1, tree high1)
4158 int no_overlap;
4159 int subset;
4160 int temp;
4161 tree tem;
4162 int in_p;
4163 tree low, high;
4164 int lowequal = ((low0 == 0 && low1 == 0)
4165 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4166 low0, 0, low1, 0)));
4167 int highequal = ((high0 == 0 && high1 == 0)
4168 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4169 high0, 1, high1, 1)));
4171 /* Make range 0 be the range that starts first, or ends last if they
4172 start at the same value. Swap them if it isn't. */
4173 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4174 low0, 0, low1, 0))
4175 || (lowequal
4176 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4177 high1, 1, high0, 1))))
4179 temp = in0_p, in0_p = in1_p, in1_p = temp;
4180 tem = low0, low0 = low1, low1 = tem;
4181 tem = high0, high0 = high1, high1 = tem;
4184 /* Now flag two cases, whether the ranges are disjoint or whether the
4185 second range is totally subsumed in the first. Note that the tests
4186 below are simplified by the ones above. */
4187 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4188 high0, 1, low1, 0));
4189 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4190 high1, 1, high0, 1));
4192 /* We now have four cases, depending on whether we are including or
4193 excluding the two ranges. */
4194 if (in0_p && in1_p)
4196 /* If they don't overlap, the result is false. If the second range
4197 is a subset it is the result. Otherwise, the range is from the start
4198 of the second to the end of the first. */
4199 if (no_overlap)
4200 in_p = 0, low = high = 0;
4201 else if (subset)
4202 in_p = 1, low = low1, high = high1;
4203 else
4204 in_p = 1, low = low1, high = high0;
4207 else if (in0_p && ! in1_p)
4209 /* If they don't overlap, the result is the first range. If they are
4210 equal, the result is false. If the second range is a subset of the
4211 first, and the ranges begin at the same place, we go from just after
4212 the end of the first range to the end of the second. If the second
4213 range is not a subset of the first, or if it is a subset and both
4214 ranges end at the same place, the range starts at the start of the
4215 first range and ends just before the second range.
4216 Otherwise, we can't describe this as a single range. */
4217 if (no_overlap)
4218 in_p = 1, low = low0, high = high0;
4219 else if (lowequal && highequal)
4220 in_p = 0, low = high = 0;
4221 else if (subset && lowequal)
4223 in_p = 1, high = high0;
4224 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4225 integer_one_node, 0);
4227 else if (! subset || highequal)
4229 in_p = 1, low = low0;
4230 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4231 integer_one_node, 0);
4233 else
4234 return 0;
4237 else if (! in0_p && in1_p)
4239 /* If they don't overlap, the result is the second range. If the second
4240 is a subset of the first, the result is false. Otherwise,
4241 the range starts just after the first range and ends at the
4242 end of the second. */
4243 if (no_overlap)
4244 in_p = 1, low = low1, high = high1;
4245 else if (subset || highequal)
4246 in_p = 0, low = high = 0;
4247 else
4249 in_p = 1, high = high1;
4250 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4251 integer_one_node, 0);
4255 else
4257 /* The case where we are excluding both ranges. Here the complex case
4258 is if they don't overlap. In that case, the only time we have a
4259 range is if they are adjacent. If the second is a subset of the
4260 first, the result is the first. Otherwise, the range to exclude
4261 starts at the beginning of the first range and ends at the end of the
4262 second. */
4263 if (no_overlap)
4265 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4266 range_binop (PLUS_EXPR, NULL_TREE,
4267 high0, 1,
4268 integer_one_node, 1),
4269 1, low1, 0)))
4270 in_p = 0, low = low0, high = high1;
4271 else
4273 /* Canonicalize - [min, x] into - [-, x]. */
4274 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4275 switch (TREE_CODE (TREE_TYPE (low0)))
4277 case ENUMERAL_TYPE:
4278 if (TYPE_PRECISION (TREE_TYPE (low0))
4279 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4280 break;
4281 /* FALLTHROUGH */
4282 case INTEGER_TYPE:
4283 case CHAR_TYPE:
4284 if (tree_int_cst_equal (low0,
4285 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4286 low0 = 0;
4287 break;
4288 case POINTER_TYPE:
4289 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4290 && integer_zerop (low0))
4291 low0 = 0;
4292 break;
4293 default:
4294 break;
4297 /* Canonicalize - [x, max] into - [x, -]. */
4298 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4299 switch (TREE_CODE (TREE_TYPE (high1)))
4301 case ENUMERAL_TYPE:
4302 if (TYPE_PRECISION (TREE_TYPE (high1))
4303 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4304 break;
4305 /* FALLTHROUGH */
4306 case INTEGER_TYPE:
4307 case CHAR_TYPE:
4308 if (tree_int_cst_equal (high1,
4309 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4310 high1 = 0;
4311 break;
4312 case POINTER_TYPE:
4313 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4314 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4315 high1, 1,
4316 integer_one_node, 1)))
4317 high1 = 0;
4318 break;
4319 default:
4320 break;
4323 /* The ranges might be also adjacent between the maximum and
4324 minimum values of the given type. For
4325 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4326 return + [x + 1, y - 1]. */
4327 if (low0 == 0 && high1 == 0)
4329 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4330 integer_one_node, 1);
4331 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4332 integer_one_node, 0);
4333 if (low == 0 || high == 0)
4334 return 0;
4336 in_p = 1;
4338 else
4339 return 0;
4342 else if (subset)
4343 in_p = 0, low = low0, high = high0;
4344 else
4345 in_p = 0, low = low0, high = high1;
4348 *pin_p = in_p, *plow = low, *phigh = high;
4349 return 1;
4353 /* Subroutine of fold, looking inside expressions of the form
4354 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4355 of the COND_EXPR. This function is being used also to optimize
4356 A op B ? C : A, by reversing the comparison first.
4358 Return a folded expression whose code is not a COND_EXPR
4359 anymore, or NULL_TREE if no folding opportunity is found. */
4361 static tree
4362 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4364 enum tree_code comp_code = TREE_CODE (arg0);
4365 tree arg00 = TREE_OPERAND (arg0, 0);
4366 tree arg01 = TREE_OPERAND (arg0, 1);
4367 tree arg1_type = TREE_TYPE (arg1);
4368 tree tem;
4370 STRIP_NOPS (arg1);
4371 STRIP_NOPS (arg2);
4373 /* If we have A op 0 ? A : -A, consider applying the following
4374 transformations:
4376 A == 0? A : -A same as -A
4377 A != 0? A : -A same as A
4378 A >= 0? A : -A same as abs (A)
4379 A > 0? A : -A same as abs (A)
4380 A <= 0? A : -A same as -abs (A)
4381 A < 0? A : -A same as -abs (A)
4383 None of these transformations work for modes with signed
4384 zeros. If A is +/-0, the first two transformations will
4385 change the sign of the result (from +0 to -0, or vice
4386 versa). The last four will fix the sign of the result,
4387 even though the original expressions could be positive or
4388 negative, depending on the sign of A.
4390 Note that all these transformations are correct if A is
4391 NaN, since the two alternatives (A and -A) are also NaNs. */
4392 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4393 ? real_zerop (arg01)
4394 : integer_zerop (arg01))
4395 && ((TREE_CODE (arg2) == NEGATE_EXPR
4396 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4397 /* In the case that A is of the form X-Y, '-A' (arg2) may
4398 have already been folded to Y-X, check for that. */
4399 || (TREE_CODE (arg1) == MINUS_EXPR
4400 && TREE_CODE (arg2) == MINUS_EXPR
4401 && operand_equal_p (TREE_OPERAND (arg1, 0),
4402 TREE_OPERAND (arg2, 1), 0)
4403 && operand_equal_p (TREE_OPERAND (arg1, 1),
4404 TREE_OPERAND (arg2, 0), 0))))
4405 switch (comp_code)
4407 case EQ_EXPR:
4408 case UNEQ_EXPR:
4409 tem = fold_convert (arg1_type, arg1);
4410 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4411 case NE_EXPR:
4412 case LTGT_EXPR:
4413 return pedantic_non_lvalue (fold_convert (type, arg1));
4414 case UNGE_EXPR:
4415 case UNGT_EXPR:
4416 if (flag_trapping_math)
4417 break;
4418 /* Fall through. */
4419 case GE_EXPR:
4420 case GT_EXPR:
4421 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4422 arg1 = fold_convert (lang_hooks.types.signed_type
4423 (TREE_TYPE (arg1)), arg1);
4424 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4425 return pedantic_non_lvalue (fold_convert (type, tem));
4426 case UNLE_EXPR:
4427 case UNLT_EXPR:
4428 if (flag_trapping_math)
4429 break;
4430 case LE_EXPR:
4431 case LT_EXPR:
4432 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4433 arg1 = fold_convert (lang_hooks.types.signed_type
4434 (TREE_TYPE (arg1)), arg1);
4435 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4436 return negate_expr (fold_convert (type, tem));
4437 default:
4438 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4439 break;
4442 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4443 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4444 both transformations are correct when A is NaN: A != 0
4445 is then true, and A == 0 is false. */
4447 if (integer_zerop (arg01) && integer_zerop (arg2))
4449 if (comp_code == NE_EXPR)
4450 return pedantic_non_lvalue (fold_convert (type, arg1));
4451 else if (comp_code == EQ_EXPR)
4452 return build_int_cst (type, 0);
4455 /* Try some transformations of A op B ? A : B.
4457 A == B? A : B same as B
4458 A != B? A : B same as A
4459 A >= B? A : B same as max (A, B)
4460 A > B? A : B same as max (B, A)
4461 A <= B? A : B same as min (A, B)
4462 A < B? A : B same as min (B, A)
4464 As above, these transformations don't work in the presence
4465 of signed zeros. For example, if A and B are zeros of
4466 opposite sign, the first two transformations will change
4467 the sign of the result. In the last four, the original
4468 expressions give different results for (A=+0, B=-0) and
4469 (A=-0, B=+0), but the transformed expressions do not.
4471 The first two transformations are correct if either A or B
4472 is a NaN. In the first transformation, the condition will
4473 be false, and B will indeed be chosen. In the case of the
4474 second transformation, the condition A != B will be true,
4475 and A will be chosen.
4477 The conversions to max() and min() are not correct if B is
4478 a number and A is not. The conditions in the original
4479 expressions will be false, so all four give B. The min()
4480 and max() versions would give a NaN instead. */
4481 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4482 /* Avoid these transformations if the COND_EXPR may be used
4483 as an lvalue in the C++ front-end. PR c++/19199. */
4484 && (in_gimple_form
4485 || strcmp (lang_hooks.name, "GNU C++") != 0
4486 || ! maybe_lvalue_p (arg1)
4487 || ! maybe_lvalue_p (arg2)))
4489 tree comp_op0 = arg00;
4490 tree comp_op1 = arg01;
4491 tree comp_type = TREE_TYPE (comp_op0);
4493 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4494 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4496 comp_type = type;
4497 comp_op0 = arg1;
4498 comp_op1 = arg2;
4501 switch (comp_code)
4503 case EQ_EXPR:
4504 return pedantic_non_lvalue (fold_convert (type, arg2));
4505 case NE_EXPR:
4506 return pedantic_non_lvalue (fold_convert (type, arg1));
4507 case LE_EXPR:
4508 case LT_EXPR:
4509 case UNLE_EXPR:
4510 case UNLT_EXPR:
4511 /* In C++ a ?: expression can be an lvalue, so put the
4512 operand which will be used if they are equal first
4513 so that we can convert this back to the
4514 corresponding COND_EXPR. */
4515 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4517 comp_op0 = fold_convert (comp_type, comp_op0);
4518 comp_op1 = fold_convert (comp_type, comp_op1);
4519 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4520 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4521 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4522 return pedantic_non_lvalue (fold_convert (type, tem));
4524 break;
4525 case GE_EXPR:
4526 case GT_EXPR:
4527 case UNGE_EXPR:
4528 case UNGT_EXPR:
4529 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4531 comp_op0 = fold_convert (comp_type, comp_op0);
4532 comp_op1 = fold_convert (comp_type, comp_op1);
4533 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4534 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4535 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4536 return pedantic_non_lvalue (fold_convert (type, tem));
4538 break;
4539 case UNEQ_EXPR:
4540 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4541 return pedantic_non_lvalue (fold_convert (type, arg2));
4542 break;
4543 case LTGT_EXPR:
4544 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4545 return pedantic_non_lvalue (fold_convert (type, arg1));
4546 break;
4547 default:
4548 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4549 break;
4553 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4554 we might still be able to simplify this. For example,
4555 if C1 is one less or one more than C2, this might have started
4556 out as a MIN or MAX and been transformed by this function.
4557 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4559 if (INTEGRAL_TYPE_P (type)
4560 && TREE_CODE (arg01) == INTEGER_CST
4561 && TREE_CODE (arg2) == INTEGER_CST)
4562 switch (comp_code)
4564 case EQ_EXPR:
4565 /* We can replace A with C1 in this case. */
4566 arg1 = fold_convert (type, arg01);
4567 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4569 case LT_EXPR:
4570 /* If C1 is C2 + 1, this is min(A, C2). */
4571 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4572 OEP_ONLY_CONST)
4573 && operand_equal_p (arg01,
4574 const_binop (PLUS_EXPR, arg2,
4575 integer_one_node, 0),
4576 OEP_ONLY_CONST))
4577 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4578 type, arg1, arg2));
4579 break;
4581 case LE_EXPR:
4582 /* If C1 is C2 - 1, this is min(A, C2). */
4583 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4584 OEP_ONLY_CONST)
4585 && operand_equal_p (arg01,
4586 const_binop (MINUS_EXPR, arg2,
4587 integer_one_node, 0),
4588 OEP_ONLY_CONST))
4589 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4590 type, arg1, arg2));
4591 break;
4593 case GT_EXPR:
4594 /* If C1 is C2 - 1, this is max(A, C2). */
4595 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4596 OEP_ONLY_CONST)
4597 && operand_equal_p (arg01,
4598 const_binop (MINUS_EXPR, arg2,
4599 integer_one_node, 0),
4600 OEP_ONLY_CONST))
4601 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4602 type, arg1, arg2));
4603 break;
4605 case GE_EXPR:
4606 /* If C1 is C2 + 1, this is max(A, C2). */
4607 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4608 OEP_ONLY_CONST)
4609 && operand_equal_p (arg01,
4610 const_binop (PLUS_EXPR, arg2,
4611 integer_one_node, 0),
4612 OEP_ONLY_CONST))
4613 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4614 type, arg1, arg2));
4615 break;
4616 case NE_EXPR:
4617 break;
4618 default:
4619 gcc_unreachable ();
4622 return NULL_TREE;
4627 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4628 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4629 #endif
4631 /* EXP is some logical combination of boolean tests. See if we can
4632 merge it into some range test. Return the new tree if so. */
4634 static tree
4635 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4637 int or_op = (code == TRUTH_ORIF_EXPR
4638 || code == TRUTH_OR_EXPR);
4639 int in0_p, in1_p, in_p;
4640 tree low0, low1, low, high0, high1, high;
4641 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4642 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4643 tree tem;
4645 /* If this is an OR operation, invert both sides; we will invert
4646 again at the end. */
4647 if (or_op)
4648 in0_p = ! in0_p, in1_p = ! in1_p;
4650 /* If both expressions are the same, if we can merge the ranges, and we
4651 can build the range test, return it or it inverted. If one of the
4652 ranges is always true or always false, consider it to be the same
4653 expression as the other. */
4654 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4655 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4656 in1_p, low1, high1)
4657 && 0 != (tem = (build_range_check (type,
4658 lhs != 0 ? lhs
4659 : rhs != 0 ? rhs : integer_zero_node,
4660 in_p, low, high))))
4661 return or_op ? invert_truthvalue (tem) : tem;
4663 /* On machines where the branch cost is expensive, if this is a
4664 short-circuited branch and the underlying object on both sides
4665 is the same, make a non-short-circuit operation. */
4666 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4667 && lhs != 0 && rhs != 0
4668 && (code == TRUTH_ANDIF_EXPR
4669 || code == TRUTH_ORIF_EXPR)
4670 && operand_equal_p (lhs, rhs, 0))
4672 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4673 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4674 which cases we can't do this. */
4675 if (simple_operand_p (lhs))
4676 return build2 (code == TRUTH_ANDIF_EXPR
4677 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4678 type, op0, op1);
4680 else if (lang_hooks.decls.global_bindings_p () == 0
4681 && ! CONTAINS_PLACEHOLDER_P (lhs))
4683 tree common = save_expr (lhs);
4685 if (0 != (lhs = build_range_check (type, common,
4686 or_op ? ! in0_p : in0_p,
4687 low0, high0))
4688 && (0 != (rhs = build_range_check (type, common,
4689 or_op ? ! in1_p : in1_p,
4690 low1, high1))))
4691 return build2 (code == TRUTH_ANDIF_EXPR
4692 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4693 type, lhs, rhs);
4697 return 0;
4700 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4701 bit value. Arrange things so the extra bits will be set to zero if and
4702 only if C is signed-extended to its full width. If MASK is nonzero,
4703 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4705 static tree
4706 unextend (tree c, int p, int unsignedp, tree mask)
4708 tree type = TREE_TYPE (c);
4709 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4710 tree temp;
4712 if (p == modesize || unsignedp)
4713 return c;
4715 /* We work by getting just the sign bit into the low-order bit, then
4716 into the high-order bit, then sign-extend. We then XOR that value
4717 with C. */
4718 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4719 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4721 /* We must use a signed type in order to get an arithmetic right shift.
4722 However, we must also avoid introducing accidental overflows, so that
4723 a subsequent call to integer_zerop will work. Hence we must
4724 do the type conversion here. At this point, the constant is either
4725 zero or one, and the conversion to a signed type can never overflow.
4726 We could get an overflow if this conversion is done anywhere else. */
4727 if (TYPE_UNSIGNED (type))
4728 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4730 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4731 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4732 if (mask != 0)
4733 temp = const_binop (BIT_AND_EXPR, temp,
4734 fold_convert (TREE_TYPE (c), mask), 0);
4735 /* If necessary, convert the type back to match the type of C. */
4736 if (TYPE_UNSIGNED (type))
4737 temp = fold_convert (type, temp);
4739 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4742 /* Find ways of folding logical expressions of LHS and RHS:
4743 Try to merge two comparisons to the same innermost item.
4744 Look for range tests like "ch >= '0' && ch <= '9'".
4745 Look for combinations of simple terms on machines with expensive branches
4746 and evaluate the RHS unconditionally.
4748 For example, if we have p->a == 2 && p->b == 4 and we can make an
4749 object large enough to span both A and B, we can do this with a comparison
4750 against the object ANDed with the a mask.
4752 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4753 operations to do this with one comparison.
4755 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4756 function and the one above.
4758 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4759 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4761 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4762 two operands.
4764 We return the simplified tree or 0 if no optimization is possible. */
4766 static tree
4767 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4769 /* If this is the "or" of two comparisons, we can do something if
4770 the comparisons are NE_EXPR. If this is the "and", we can do something
4771 if the comparisons are EQ_EXPR. I.e.,
4772 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4774 WANTED_CODE is this operation code. For single bit fields, we can
4775 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4776 comparison for one-bit fields. */
4778 enum tree_code wanted_code;
4779 enum tree_code lcode, rcode;
4780 tree ll_arg, lr_arg, rl_arg, rr_arg;
4781 tree ll_inner, lr_inner, rl_inner, rr_inner;
4782 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4783 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4784 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4785 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4786 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4787 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4788 enum machine_mode lnmode, rnmode;
4789 tree ll_mask, lr_mask, rl_mask, rr_mask;
4790 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4791 tree l_const, r_const;
4792 tree lntype, rntype, result;
4793 int first_bit, end_bit;
4794 int volatilep;
4796 /* Start by getting the comparison codes. Fail if anything is volatile.
4797 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4798 it were surrounded with a NE_EXPR. */
4800 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4801 return 0;
4803 lcode = TREE_CODE (lhs);
4804 rcode = TREE_CODE (rhs);
4806 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4808 lhs = build2 (NE_EXPR, truth_type, lhs,
4809 build_int_cst (TREE_TYPE (lhs), 0));
4810 lcode = NE_EXPR;
4813 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4815 rhs = build2 (NE_EXPR, truth_type, rhs,
4816 build_int_cst (TREE_TYPE (rhs), 0));
4817 rcode = NE_EXPR;
4820 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4821 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4822 return 0;
4824 ll_arg = TREE_OPERAND (lhs, 0);
4825 lr_arg = TREE_OPERAND (lhs, 1);
4826 rl_arg = TREE_OPERAND (rhs, 0);
4827 rr_arg = TREE_OPERAND (rhs, 1);
4829 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4830 if (simple_operand_p (ll_arg)
4831 && simple_operand_p (lr_arg))
4833 tree result;
4834 if (operand_equal_p (ll_arg, rl_arg, 0)
4835 && operand_equal_p (lr_arg, rr_arg, 0))
4837 result = combine_comparisons (code, lcode, rcode,
4838 truth_type, ll_arg, lr_arg);
4839 if (result)
4840 return result;
4842 else if (operand_equal_p (ll_arg, rr_arg, 0)
4843 && operand_equal_p (lr_arg, rl_arg, 0))
4845 result = combine_comparisons (code, lcode,
4846 swap_tree_comparison (rcode),
4847 truth_type, ll_arg, lr_arg);
4848 if (result)
4849 return result;
4853 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4854 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4856 /* If the RHS can be evaluated unconditionally and its operands are
4857 simple, it wins to evaluate the RHS unconditionally on machines
4858 with expensive branches. In this case, this isn't a comparison
4859 that can be merged. Avoid doing this if the RHS is a floating-point
4860 comparison since those can trap. */
4862 if (BRANCH_COST >= 2
4863 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4864 && simple_operand_p (rl_arg)
4865 && simple_operand_p (rr_arg))
4867 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4868 if (code == TRUTH_OR_EXPR
4869 && lcode == NE_EXPR && integer_zerop (lr_arg)
4870 && rcode == NE_EXPR && integer_zerop (rr_arg)
4871 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4872 return build2 (NE_EXPR, truth_type,
4873 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4874 ll_arg, rl_arg),
4875 build_int_cst (TREE_TYPE (ll_arg), 0));
4877 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4878 if (code == TRUTH_AND_EXPR
4879 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4880 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4881 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4882 return build2 (EQ_EXPR, truth_type,
4883 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4884 ll_arg, rl_arg),
4885 build_int_cst (TREE_TYPE (ll_arg), 0));
4887 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4888 return build2 (code, truth_type, lhs, rhs);
4891 /* See if the comparisons can be merged. Then get all the parameters for
4892 each side. */
4894 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4895 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4896 return 0;
4898 volatilep = 0;
4899 ll_inner = decode_field_reference (ll_arg,
4900 &ll_bitsize, &ll_bitpos, &ll_mode,
4901 &ll_unsignedp, &volatilep, &ll_mask,
4902 &ll_and_mask);
4903 lr_inner = decode_field_reference (lr_arg,
4904 &lr_bitsize, &lr_bitpos, &lr_mode,
4905 &lr_unsignedp, &volatilep, &lr_mask,
4906 &lr_and_mask);
4907 rl_inner = decode_field_reference (rl_arg,
4908 &rl_bitsize, &rl_bitpos, &rl_mode,
4909 &rl_unsignedp, &volatilep, &rl_mask,
4910 &rl_and_mask);
4911 rr_inner = decode_field_reference (rr_arg,
4912 &rr_bitsize, &rr_bitpos, &rr_mode,
4913 &rr_unsignedp, &volatilep, &rr_mask,
4914 &rr_and_mask);
4916 /* It must be true that the inner operation on the lhs of each
4917 comparison must be the same if we are to be able to do anything.
4918 Then see if we have constants. If not, the same must be true for
4919 the rhs's. */
4920 if (volatilep || ll_inner == 0 || rl_inner == 0
4921 || ! operand_equal_p (ll_inner, rl_inner, 0))
4922 return 0;
4924 if (TREE_CODE (lr_arg) == INTEGER_CST
4925 && TREE_CODE (rr_arg) == INTEGER_CST)
4926 l_const = lr_arg, r_const = rr_arg;
4927 else if (lr_inner == 0 || rr_inner == 0
4928 || ! operand_equal_p (lr_inner, rr_inner, 0))
4929 return 0;
4930 else
4931 l_const = r_const = 0;
4933 /* If either comparison code is not correct for our logical operation,
4934 fail. However, we can convert a one-bit comparison against zero into
4935 the opposite comparison against that bit being set in the field. */
4937 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4938 if (lcode != wanted_code)
4940 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4942 /* Make the left operand unsigned, since we are only interested
4943 in the value of one bit. Otherwise we are doing the wrong
4944 thing below. */
4945 ll_unsignedp = 1;
4946 l_const = ll_mask;
4948 else
4949 return 0;
4952 /* This is analogous to the code for l_const above. */
4953 if (rcode != wanted_code)
4955 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4957 rl_unsignedp = 1;
4958 r_const = rl_mask;
4960 else
4961 return 0;
4964 /* After this point all optimizations will generate bit-field
4965 references, which we might not want. */
4966 if (! lang_hooks.can_use_bit_fields_p ())
4967 return 0;
4969 /* See if we can find a mode that contains both fields being compared on
4970 the left. If we can't, fail. Otherwise, update all constants and masks
4971 to be relative to a field of that size. */
4972 first_bit = MIN (ll_bitpos, rl_bitpos);
4973 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4974 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4975 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4976 volatilep);
4977 if (lnmode == VOIDmode)
4978 return 0;
4980 lnbitsize = GET_MODE_BITSIZE (lnmode);
4981 lnbitpos = first_bit & ~ (lnbitsize - 1);
4982 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4983 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4985 if (BYTES_BIG_ENDIAN)
4987 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4988 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4991 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4992 size_int (xll_bitpos), 0);
4993 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4994 size_int (xrl_bitpos), 0);
4996 if (l_const)
4998 l_const = fold_convert (lntype, l_const);
4999 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5000 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5001 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5002 fold_build1 (BIT_NOT_EXPR,
5003 lntype, ll_mask),
5004 0)))
5006 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5008 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5011 if (r_const)
5013 r_const = fold_convert (lntype, r_const);
5014 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5015 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5016 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5017 fold_build1 (BIT_NOT_EXPR,
5018 lntype, rl_mask),
5019 0)))
5021 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5023 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5027 /* If the right sides are not constant, do the same for it. Also,
5028 disallow this optimization if a size or signedness mismatch occurs
5029 between the left and right sides. */
5030 if (l_const == 0)
5032 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5033 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5034 /* Make sure the two fields on the right
5035 correspond to the left without being swapped. */
5036 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5037 return 0;
5039 first_bit = MIN (lr_bitpos, rr_bitpos);
5040 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5041 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5042 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5043 volatilep);
5044 if (rnmode == VOIDmode)
5045 return 0;
5047 rnbitsize = GET_MODE_BITSIZE (rnmode);
5048 rnbitpos = first_bit & ~ (rnbitsize - 1);
5049 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5050 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5052 if (BYTES_BIG_ENDIAN)
5054 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5055 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5058 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5059 size_int (xlr_bitpos), 0);
5060 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5061 size_int (xrr_bitpos), 0);
5063 /* Make a mask that corresponds to both fields being compared.
5064 Do this for both items being compared. If the operands are the
5065 same size and the bits being compared are in the same position
5066 then we can do this by masking both and comparing the masked
5067 results. */
5068 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5069 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5070 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5072 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5073 ll_unsignedp || rl_unsignedp);
5074 if (! all_ones_mask_p (ll_mask, lnbitsize))
5075 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5077 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5078 lr_unsignedp || rr_unsignedp);
5079 if (! all_ones_mask_p (lr_mask, rnbitsize))
5080 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5082 return build2 (wanted_code, truth_type, lhs, rhs);
5085 /* There is still another way we can do something: If both pairs of
5086 fields being compared are adjacent, we may be able to make a wider
5087 field containing them both.
5089 Note that we still must mask the lhs/rhs expressions. Furthermore,
5090 the mask must be shifted to account for the shift done by
5091 make_bit_field_ref. */
5092 if ((ll_bitsize + ll_bitpos == rl_bitpos
5093 && lr_bitsize + lr_bitpos == rr_bitpos)
5094 || (ll_bitpos == rl_bitpos + rl_bitsize
5095 && lr_bitpos == rr_bitpos + rr_bitsize))
5097 tree type;
5099 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5100 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5101 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5102 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5104 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5105 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5106 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5107 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5109 /* Convert to the smaller type before masking out unwanted bits. */
5110 type = lntype;
5111 if (lntype != rntype)
5113 if (lnbitsize > rnbitsize)
5115 lhs = fold_convert (rntype, lhs);
5116 ll_mask = fold_convert (rntype, ll_mask);
5117 type = rntype;
5119 else if (lnbitsize < rnbitsize)
5121 rhs = fold_convert (lntype, rhs);
5122 lr_mask = fold_convert (lntype, lr_mask);
5123 type = lntype;
5127 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5128 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5130 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5131 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5133 return build2 (wanted_code, truth_type, lhs, rhs);
5136 return 0;
5139 /* Handle the case of comparisons with constants. If there is something in
5140 common between the masks, those bits of the constants must be the same.
5141 If not, the condition is always false. Test for this to avoid generating
5142 incorrect code below. */
5143 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5144 if (! integer_zerop (result)
5145 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5146 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5148 if (wanted_code == NE_EXPR)
5150 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5151 return constant_boolean_node (true, truth_type);
5153 else
5155 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5156 return constant_boolean_node (false, truth_type);
5160 /* Construct the expression we will return. First get the component
5161 reference we will make. Unless the mask is all ones the width of
5162 that field, perform the mask operation. Then compare with the
5163 merged constant. */
5164 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5165 ll_unsignedp || rl_unsignedp);
5167 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5168 if (! all_ones_mask_p (ll_mask, lnbitsize))
5169 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5171 return build2 (wanted_code, truth_type, result,
5172 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5175 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5176 constant. */
5178 static tree
5179 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5181 tree arg0 = op0;
5182 enum tree_code op_code;
5183 tree comp_const = op1;
5184 tree minmax_const;
5185 int consts_equal, consts_lt;
5186 tree inner;
5188 STRIP_SIGN_NOPS (arg0);
5190 op_code = TREE_CODE (arg0);
5191 minmax_const = TREE_OPERAND (arg0, 1);
5192 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5193 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5194 inner = TREE_OPERAND (arg0, 0);
5196 /* If something does not permit us to optimize, return the original tree. */
5197 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5198 || TREE_CODE (comp_const) != INTEGER_CST
5199 || TREE_CONSTANT_OVERFLOW (comp_const)
5200 || TREE_CODE (minmax_const) != INTEGER_CST
5201 || TREE_CONSTANT_OVERFLOW (minmax_const))
5202 return NULL_TREE;
5204 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5205 and GT_EXPR, doing the rest with recursive calls using logical
5206 simplifications. */
5207 switch (code)
5209 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5211 /* FIXME: We should be able to invert code without building a
5212 scratch tree node, but doing so would require us to
5213 duplicate a part of invert_truthvalue here. */
5214 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5215 tem = optimize_minmax_comparison (TREE_CODE (tem),
5216 TREE_TYPE (tem),
5217 TREE_OPERAND (tem, 0),
5218 TREE_OPERAND (tem, 1));
5219 return invert_truthvalue (tem);
5222 case GE_EXPR:
5223 return
5224 fold_build2 (TRUTH_ORIF_EXPR, type,
5225 optimize_minmax_comparison
5226 (EQ_EXPR, type, arg0, comp_const),
5227 optimize_minmax_comparison
5228 (GT_EXPR, type, arg0, comp_const));
5230 case EQ_EXPR:
5231 if (op_code == MAX_EXPR && consts_equal)
5232 /* MAX (X, 0) == 0 -> X <= 0 */
5233 return fold_build2 (LE_EXPR, type, inner, comp_const);
5235 else if (op_code == MAX_EXPR && consts_lt)
5236 /* MAX (X, 0) == 5 -> X == 5 */
5237 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5239 else if (op_code == MAX_EXPR)
5240 /* MAX (X, 0) == -1 -> false */
5241 return omit_one_operand (type, integer_zero_node, inner);
5243 else if (consts_equal)
5244 /* MIN (X, 0) == 0 -> X >= 0 */
5245 return fold_build2 (GE_EXPR, type, inner, comp_const);
5247 else if (consts_lt)
5248 /* MIN (X, 0) == 5 -> false */
5249 return omit_one_operand (type, integer_zero_node, inner);
5251 else
5252 /* MIN (X, 0) == -1 -> X == -1 */
5253 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5255 case GT_EXPR:
5256 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5257 /* MAX (X, 0) > 0 -> X > 0
5258 MAX (X, 0) > 5 -> X > 5 */
5259 return fold_build2 (GT_EXPR, type, inner, comp_const);
5261 else if (op_code == MAX_EXPR)
5262 /* MAX (X, 0) > -1 -> true */
5263 return omit_one_operand (type, integer_one_node, inner);
5265 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5266 /* MIN (X, 0) > 0 -> false
5267 MIN (X, 0) > 5 -> false */
5268 return omit_one_operand (type, integer_zero_node, inner);
5270 else
5271 /* MIN (X, 0) > -1 -> X > -1 */
5272 return fold_build2 (GT_EXPR, type, inner, comp_const);
5274 default:
5275 return NULL_TREE;
5279 /* T is an integer expression that is being multiplied, divided, or taken a
5280 modulus (CODE says which and what kind of divide or modulus) by a
5281 constant C. See if we can eliminate that operation by folding it with
5282 other operations already in T. WIDE_TYPE, if non-null, is a type that
5283 should be used for the computation if wider than our type.
5285 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5286 (X * 2) + (Y * 4). We must, however, be assured that either the original
5287 expression would not overflow or that overflow is undefined for the type
5288 in the language in question.
5290 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5291 the machine has a multiply-accumulate insn or that this is part of an
5292 addressing calculation.
5294 If we return a non-null expression, it is an equivalent form of the
5295 original computation, but need not be in the original type. */
5297 static tree
5298 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5300 /* To avoid exponential search depth, refuse to allow recursion past
5301 three levels. Beyond that (1) it's highly unlikely that we'll find
5302 something interesting and (2) we've probably processed it before
5303 when we built the inner expression. */
5305 static int depth;
5306 tree ret;
5308 if (depth > 3)
5309 return NULL;
5311 depth++;
5312 ret = extract_muldiv_1 (t, c, code, wide_type);
5313 depth--;
5315 return ret;
5318 static tree
5319 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5321 tree type = TREE_TYPE (t);
5322 enum tree_code tcode = TREE_CODE (t);
5323 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5324 > GET_MODE_SIZE (TYPE_MODE (type)))
5325 ? wide_type : type);
5326 tree t1, t2;
5327 int same_p = tcode == code;
5328 tree op0 = NULL_TREE, op1 = NULL_TREE;
5330 /* Don't deal with constants of zero here; they confuse the code below. */
5331 if (integer_zerop (c))
5332 return NULL_TREE;
5334 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5335 op0 = TREE_OPERAND (t, 0);
5337 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5338 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5340 /* Note that we need not handle conditional operations here since fold
5341 already handles those cases. So just do arithmetic here. */
5342 switch (tcode)
5344 case INTEGER_CST:
5345 /* For a constant, we can always simplify if we are a multiply
5346 or (for divide and modulus) if it is a multiple of our constant. */
5347 if (code == MULT_EXPR
5348 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5349 return const_binop (code, fold_convert (ctype, t),
5350 fold_convert (ctype, c), 0);
5351 break;
5353 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5354 /* If op0 is an expression ... */
5355 if ((COMPARISON_CLASS_P (op0)
5356 || UNARY_CLASS_P (op0)
5357 || BINARY_CLASS_P (op0)
5358 || EXPRESSION_CLASS_P (op0))
5359 /* ... and is unsigned, and its type is smaller than ctype,
5360 then we cannot pass through as widening. */
5361 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5362 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5363 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5364 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5365 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5366 /* ... or this is a truncation (t is narrower than op0),
5367 then we cannot pass through this narrowing. */
5368 || (GET_MODE_SIZE (TYPE_MODE (type))
5369 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5370 /* ... or signedness changes for division or modulus,
5371 then we cannot pass through this conversion. */
5372 || (code != MULT_EXPR
5373 && (TYPE_UNSIGNED (ctype)
5374 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5375 break;
5377 /* Pass the constant down and see if we can make a simplification. If
5378 we can, replace this expression with the inner simplification for
5379 possible later conversion to our or some other type. */
5380 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5381 && TREE_CODE (t2) == INTEGER_CST
5382 && ! TREE_CONSTANT_OVERFLOW (t2)
5383 && (0 != (t1 = extract_muldiv (op0, t2, code,
5384 code == MULT_EXPR
5385 ? ctype : NULL_TREE))))
5386 return t1;
5387 break;
5389 case ABS_EXPR:
5390 /* If widening the type changes it from signed to unsigned, then we
5391 must avoid building ABS_EXPR itself as unsigned. */
5392 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5394 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5395 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5397 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5398 return fold_convert (ctype, t1);
5400 break;
5402 /* FALLTHROUGH */
5403 case NEGATE_EXPR:
5404 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5405 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5406 break;
5408 case MIN_EXPR: case MAX_EXPR:
5409 /* If widening the type changes the signedness, then we can't perform
5410 this optimization as that changes the result. */
5411 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5412 break;
5414 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5415 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5416 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5418 if (tree_int_cst_sgn (c) < 0)
5419 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5421 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5422 fold_convert (ctype, t2));
5424 break;
5426 case LSHIFT_EXPR: case RSHIFT_EXPR:
5427 /* If the second operand is constant, this is a multiplication
5428 or floor division, by a power of two, so we can treat it that
5429 way unless the multiplier or divisor overflows. Signed
5430 left-shift overflow is implementation-defined rather than
5431 undefined in C90, so do not convert signed left shift into
5432 multiplication. */
5433 if (TREE_CODE (op1) == INTEGER_CST
5434 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5435 /* const_binop may not detect overflow correctly,
5436 so check for it explicitly here. */
5437 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5438 && TREE_INT_CST_HIGH (op1) == 0
5439 && 0 != (t1 = fold_convert (ctype,
5440 const_binop (LSHIFT_EXPR,
5441 size_one_node,
5442 op1, 0)))
5443 && ! TREE_OVERFLOW (t1))
5444 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5445 ? MULT_EXPR : FLOOR_DIV_EXPR,
5446 ctype, fold_convert (ctype, op0), t1),
5447 c, code, wide_type);
5448 break;
5450 case PLUS_EXPR: case MINUS_EXPR:
5451 /* See if we can eliminate the operation on both sides. If we can, we
5452 can return a new PLUS or MINUS. If we can't, the only remaining
5453 cases where we can do anything are if the second operand is a
5454 constant. */
5455 t1 = extract_muldiv (op0, c, code, wide_type);
5456 t2 = extract_muldiv (op1, c, code, wide_type);
5457 if (t1 != 0 && t2 != 0
5458 && (code == MULT_EXPR
5459 /* If not multiplication, we can only do this if both operands
5460 are divisible by c. */
5461 || (multiple_of_p (ctype, op0, c)
5462 && multiple_of_p (ctype, op1, c))))
5463 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5464 fold_convert (ctype, t2));
5466 /* If this was a subtraction, negate OP1 and set it to be an addition.
5467 This simplifies the logic below. */
5468 if (tcode == MINUS_EXPR)
5469 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5471 if (TREE_CODE (op1) != INTEGER_CST)
5472 break;
5474 /* If either OP1 or C are negative, this optimization is not safe for
5475 some of the division and remainder types while for others we need
5476 to change the code. */
5477 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5479 if (code == CEIL_DIV_EXPR)
5480 code = FLOOR_DIV_EXPR;
5481 else if (code == FLOOR_DIV_EXPR)
5482 code = CEIL_DIV_EXPR;
5483 else if (code != MULT_EXPR
5484 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5485 break;
5488 /* If it's a multiply or a division/modulus operation of a multiple
5489 of our constant, do the operation and verify it doesn't overflow. */
5490 if (code == MULT_EXPR
5491 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5493 op1 = const_binop (code, fold_convert (ctype, op1),
5494 fold_convert (ctype, c), 0);
5495 /* We allow the constant to overflow with wrapping semantics. */
5496 if (op1 == 0
5497 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5498 break;
5500 else
5501 break;
5503 /* If we have an unsigned type is not a sizetype, we cannot widen
5504 the operation since it will change the result if the original
5505 computation overflowed. */
5506 if (TYPE_UNSIGNED (ctype)
5507 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5508 && ctype != type)
5509 break;
5511 /* If we were able to eliminate our operation from the first side,
5512 apply our operation to the second side and reform the PLUS. */
5513 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5514 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5516 /* The last case is if we are a multiply. In that case, we can
5517 apply the distributive law to commute the multiply and addition
5518 if the multiplication of the constants doesn't overflow. */
5519 if (code == MULT_EXPR)
5520 return fold_build2 (tcode, ctype,
5521 fold_build2 (code, ctype,
5522 fold_convert (ctype, op0),
5523 fold_convert (ctype, c)),
5524 op1);
5526 break;
5528 case MULT_EXPR:
5529 /* We have a special case here if we are doing something like
5530 (C * 8) % 4 since we know that's zero. */
5531 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5532 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5533 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5534 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5535 return omit_one_operand (type, integer_zero_node, op0);
5537 /* ... fall through ... */
5539 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5540 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5541 /* If we can extract our operation from the LHS, do so and return a
5542 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5543 do something only if the second operand is a constant. */
5544 if (same_p
5545 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5546 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5547 fold_convert (ctype, op1));
5548 else if (tcode == MULT_EXPR && code == MULT_EXPR
5549 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5550 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5551 fold_convert (ctype, t1));
5552 else if (TREE_CODE (op1) != INTEGER_CST)
5553 return 0;
5555 /* If these are the same operation types, we can associate them
5556 assuming no overflow. */
5557 if (tcode == code
5558 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5559 fold_convert (ctype, c), 0))
5560 && ! TREE_OVERFLOW (t1))
5561 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5563 /* If these operations "cancel" each other, we have the main
5564 optimizations of this pass, which occur when either constant is a
5565 multiple of the other, in which case we replace this with either an
5566 operation or CODE or TCODE.
5568 If we have an unsigned type that is not a sizetype, we cannot do
5569 this since it will change the result if the original computation
5570 overflowed. */
5571 if ((! TYPE_UNSIGNED (ctype)
5572 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5573 && ! flag_wrapv
5574 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5575 || (tcode == MULT_EXPR
5576 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5577 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5579 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5580 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5581 fold_convert (ctype,
5582 const_binop (TRUNC_DIV_EXPR,
5583 op1, c, 0)));
5584 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5585 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5586 fold_convert (ctype,
5587 const_binop (TRUNC_DIV_EXPR,
5588 c, op1, 0)));
5590 break;
5592 default:
5593 break;
5596 return 0;
5599 /* Return a node which has the indicated constant VALUE (either 0 or
5600 1), and is of the indicated TYPE. */
5602 tree
5603 constant_boolean_node (int value, tree type)
5605 if (type == integer_type_node)
5606 return value ? integer_one_node : integer_zero_node;
5607 else if (type == boolean_type_node)
5608 return value ? boolean_true_node : boolean_false_node;
5609 else
5610 return build_int_cst (type, value);
5614 /* Return true if expr looks like an ARRAY_REF and set base and
5615 offset to the appropriate trees. If there is no offset,
5616 offset is set to NULL_TREE. Base will be canonicalized to
5617 something you can get the element type from using
5618 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5619 in bytes to the base. */
5621 static bool
5622 extract_array_ref (tree expr, tree *base, tree *offset)
5624 /* One canonical form is a PLUS_EXPR with the first
5625 argument being an ADDR_EXPR with a possible NOP_EXPR
5626 attached. */
5627 if (TREE_CODE (expr) == PLUS_EXPR)
5629 tree op0 = TREE_OPERAND (expr, 0);
5630 tree inner_base, dummy1;
5631 /* Strip NOP_EXPRs here because the C frontends and/or
5632 folders present us (int *)&x.a + 4B possibly. */
5633 STRIP_NOPS (op0);
5634 if (extract_array_ref (op0, &inner_base, &dummy1))
5636 *base = inner_base;
5637 if (dummy1 == NULL_TREE)
5638 *offset = TREE_OPERAND (expr, 1);
5639 else
5640 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5641 dummy1, TREE_OPERAND (expr, 1));
5642 return true;
5645 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5646 which we transform into an ADDR_EXPR with appropriate
5647 offset. For other arguments to the ADDR_EXPR we assume
5648 zero offset and as such do not care about the ADDR_EXPR
5649 type and strip possible nops from it. */
5650 else if (TREE_CODE (expr) == ADDR_EXPR)
5652 tree op0 = TREE_OPERAND (expr, 0);
5653 if (TREE_CODE (op0) == ARRAY_REF)
5655 tree idx = TREE_OPERAND (op0, 1);
5656 *base = TREE_OPERAND (op0, 0);
5657 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5658 array_ref_element_size (op0));
5660 else
5662 /* Handle array-to-pointer decay as &a. */
5663 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5664 *base = TREE_OPERAND (expr, 0);
5665 else
5666 *base = expr;
5667 *offset = NULL_TREE;
5669 return true;
5671 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5672 else if (SSA_VAR_P (expr)
5673 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5675 *base = expr;
5676 *offset = NULL_TREE;
5677 return true;
5680 return false;
5684 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5685 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5686 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5687 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5688 COND is the first argument to CODE; otherwise (as in the example
5689 given here), it is the second argument. TYPE is the type of the
5690 original expression. Return NULL_TREE if no simplification is
5691 possible. */
5693 static tree
5694 fold_binary_op_with_conditional_arg (enum tree_code code,
5695 tree type, tree op0, tree op1,
5696 tree cond, tree arg, int cond_first_p)
5698 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5699 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5700 tree test, true_value, false_value;
5701 tree lhs = NULL_TREE;
5702 tree rhs = NULL_TREE;
5704 /* This transformation is only worthwhile if we don't have to wrap
5705 arg in a SAVE_EXPR, and the operation can be simplified on at least
5706 one of the branches once its pushed inside the COND_EXPR. */
5707 if (!TREE_CONSTANT (arg))
5708 return NULL_TREE;
5710 if (TREE_CODE (cond) == COND_EXPR)
5712 test = TREE_OPERAND (cond, 0);
5713 true_value = TREE_OPERAND (cond, 1);
5714 false_value = TREE_OPERAND (cond, 2);
5715 /* If this operand throws an expression, then it does not make
5716 sense to try to perform a logical or arithmetic operation
5717 involving it. */
5718 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5719 lhs = true_value;
5720 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5721 rhs = false_value;
5723 else
5725 tree testtype = TREE_TYPE (cond);
5726 test = cond;
5727 true_value = constant_boolean_node (true, testtype);
5728 false_value = constant_boolean_node (false, testtype);
5731 arg = fold_convert (arg_type, arg);
5732 if (lhs == 0)
5734 true_value = fold_convert (cond_type, true_value);
5735 if (cond_first_p)
5736 lhs = fold_build2 (code, type, true_value, arg);
5737 else
5738 lhs = fold_build2 (code, type, arg, true_value);
5740 if (rhs == 0)
5742 false_value = fold_convert (cond_type, false_value);
5743 if (cond_first_p)
5744 rhs = fold_build2 (code, type, false_value, arg);
5745 else
5746 rhs = fold_build2 (code, type, arg, false_value);
5749 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5750 return fold_convert (type, test);
5754 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5756 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5757 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5758 ADDEND is the same as X.
5760 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5761 and finite. The problematic cases are when X is zero, and its mode
5762 has signed zeros. In the case of rounding towards -infinity,
5763 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5764 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5766 static bool
5767 fold_real_zero_addition_p (tree type, tree addend, int negate)
5769 if (!real_zerop (addend))
5770 return false;
5772 /* Don't allow the fold with -fsignaling-nans. */
5773 if (HONOR_SNANS (TYPE_MODE (type)))
5774 return false;
5776 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5777 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5778 return true;
5780 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5781 if (TREE_CODE (addend) == REAL_CST
5782 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5783 negate = !negate;
5785 /* The mode has signed zeros, and we have to honor their sign.
5786 In this situation, there is only one case we can return true for.
5787 X - 0 is the same as X unless rounding towards -infinity is
5788 supported. */
5789 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5792 /* Subroutine of fold() that checks comparisons of built-in math
5793 functions against real constants.
5795 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5796 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5797 is the type of the result and ARG0 and ARG1 are the operands of the
5798 comparison. ARG1 must be a TREE_REAL_CST.
5800 The function returns the constant folded tree if a simplification
5801 can be made, and NULL_TREE otherwise. */
5803 static tree
5804 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5805 tree type, tree arg0, tree arg1)
5807 REAL_VALUE_TYPE c;
5809 if (BUILTIN_SQRT_P (fcode))
5811 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5812 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5814 c = TREE_REAL_CST (arg1);
5815 if (REAL_VALUE_NEGATIVE (c))
5817 /* sqrt(x) < y is always false, if y is negative. */
5818 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5819 return omit_one_operand (type, integer_zero_node, arg);
5821 /* sqrt(x) > y is always true, if y is negative and we
5822 don't care about NaNs, i.e. negative values of x. */
5823 if (code == NE_EXPR || !HONOR_NANS (mode))
5824 return omit_one_operand (type, integer_one_node, arg);
5826 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5827 return fold_build2 (GE_EXPR, type, arg,
5828 build_real (TREE_TYPE (arg), dconst0));
5830 else if (code == GT_EXPR || code == GE_EXPR)
5832 REAL_VALUE_TYPE c2;
5834 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5835 real_convert (&c2, mode, &c2);
5837 if (REAL_VALUE_ISINF (c2))
5839 /* sqrt(x) > y is x == +Inf, when y is very large. */
5840 if (HONOR_INFINITIES (mode))
5841 return fold_build2 (EQ_EXPR, type, arg,
5842 build_real (TREE_TYPE (arg), c2));
5844 /* sqrt(x) > y is always false, when y is very large
5845 and we don't care about infinities. */
5846 return omit_one_operand (type, integer_zero_node, arg);
5849 /* sqrt(x) > c is the same as x > c*c. */
5850 return fold_build2 (code, type, arg,
5851 build_real (TREE_TYPE (arg), c2));
5853 else if (code == LT_EXPR || code == LE_EXPR)
5855 REAL_VALUE_TYPE c2;
5857 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5858 real_convert (&c2, mode, &c2);
5860 if (REAL_VALUE_ISINF (c2))
5862 /* sqrt(x) < y is always true, when y is a very large
5863 value and we don't care about NaNs or Infinities. */
5864 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5865 return omit_one_operand (type, integer_one_node, arg);
5867 /* sqrt(x) < y is x != +Inf when y is very large and we
5868 don't care about NaNs. */
5869 if (! HONOR_NANS (mode))
5870 return fold_build2 (NE_EXPR, type, arg,
5871 build_real (TREE_TYPE (arg), c2));
5873 /* sqrt(x) < y is x >= 0 when y is very large and we
5874 don't care about Infinities. */
5875 if (! HONOR_INFINITIES (mode))
5876 return fold_build2 (GE_EXPR, type, arg,
5877 build_real (TREE_TYPE (arg), dconst0));
5879 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5880 if (lang_hooks.decls.global_bindings_p () != 0
5881 || CONTAINS_PLACEHOLDER_P (arg))
5882 return NULL_TREE;
5884 arg = save_expr (arg);
5885 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5886 fold_build2 (GE_EXPR, type, arg,
5887 build_real (TREE_TYPE (arg),
5888 dconst0)),
5889 fold_build2 (NE_EXPR, type, arg,
5890 build_real (TREE_TYPE (arg),
5891 c2)));
5894 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5895 if (! HONOR_NANS (mode))
5896 return fold_build2 (code, type, arg,
5897 build_real (TREE_TYPE (arg), c2));
5899 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5900 if (lang_hooks.decls.global_bindings_p () == 0
5901 && ! CONTAINS_PLACEHOLDER_P (arg))
5903 arg = save_expr (arg);
5904 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5905 fold_build2 (GE_EXPR, type, arg,
5906 build_real (TREE_TYPE (arg),
5907 dconst0)),
5908 fold_build2 (code, type, arg,
5909 build_real (TREE_TYPE (arg),
5910 c2)));
5915 return NULL_TREE;
5918 /* Subroutine of fold() that optimizes comparisons against Infinities,
5919 either +Inf or -Inf.
5921 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5922 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5923 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5925 The function returns the constant folded tree if a simplification
5926 can be made, and NULL_TREE otherwise. */
5928 static tree
5929 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5931 enum machine_mode mode;
5932 REAL_VALUE_TYPE max;
5933 tree temp;
5934 bool neg;
5936 mode = TYPE_MODE (TREE_TYPE (arg0));
5938 /* For negative infinity swap the sense of the comparison. */
5939 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5940 if (neg)
5941 code = swap_tree_comparison (code);
5943 switch (code)
5945 case GT_EXPR:
5946 /* x > +Inf is always false, if with ignore sNANs. */
5947 if (HONOR_SNANS (mode))
5948 return NULL_TREE;
5949 return omit_one_operand (type, integer_zero_node, arg0);
5951 case LE_EXPR:
5952 /* x <= +Inf is always true, if we don't case about NaNs. */
5953 if (! HONOR_NANS (mode))
5954 return omit_one_operand (type, integer_one_node, arg0);
5956 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5957 if (lang_hooks.decls.global_bindings_p () == 0
5958 && ! CONTAINS_PLACEHOLDER_P (arg0))
5960 arg0 = save_expr (arg0);
5961 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5963 break;
5965 case EQ_EXPR:
5966 case GE_EXPR:
5967 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5968 real_maxval (&max, neg, mode);
5969 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5970 arg0, build_real (TREE_TYPE (arg0), max));
5972 case LT_EXPR:
5973 /* x < +Inf is always equal to x <= DBL_MAX. */
5974 real_maxval (&max, neg, mode);
5975 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5976 arg0, build_real (TREE_TYPE (arg0), max));
5978 case NE_EXPR:
5979 /* x != +Inf is always equal to !(x > DBL_MAX). */
5980 real_maxval (&max, neg, mode);
5981 if (! HONOR_NANS (mode))
5982 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5983 arg0, build_real (TREE_TYPE (arg0), max));
5985 /* The transformation below creates non-gimple code and thus is
5986 not appropriate if we are in gimple form. */
5987 if (in_gimple_form)
5988 return NULL_TREE;
5990 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5991 arg0, build_real (TREE_TYPE (arg0), max));
5992 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5994 default:
5995 break;
5998 return NULL_TREE;
6001 /* Subroutine of fold() that optimizes comparisons of a division by
6002 a nonzero integer constant against an integer constant, i.e.
6003 X/C1 op C2.
6005 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6006 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6007 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6009 The function returns the constant folded tree if a simplification
6010 can be made, and NULL_TREE otherwise. */
6012 static tree
6013 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6015 tree prod, tmp, hi, lo;
6016 tree arg00 = TREE_OPERAND (arg0, 0);
6017 tree arg01 = TREE_OPERAND (arg0, 1);
6018 unsigned HOST_WIDE_INT lpart;
6019 HOST_WIDE_INT hpart;
6020 int overflow;
6022 /* We have to do this the hard way to detect unsigned overflow.
6023 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6024 overflow = mul_double (TREE_INT_CST_LOW (arg01),
6025 TREE_INT_CST_HIGH (arg01),
6026 TREE_INT_CST_LOW (arg1),
6027 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
6028 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6029 prod = force_fit_type (prod, -1, overflow, false);
6031 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
6033 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6034 lo = prod;
6036 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6037 overflow = add_double (TREE_INT_CST_LOW (prod),
6038 TREE_INT_CST_HIGH (prod),
6039 TREE_INT_CST_LOW (tmp),
6040 TREE_INT_CST_HIGH (tmp),
6041 &lpart, &hpart);
6042 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6043 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6044 TREE_CONSTANT_OVERFLOW (prod));
6046 else if (tree_int_cst_sgn (arg01) >= 0)
6048 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6049 switch (tree_int_cst_sgn (arg1))
6051 case -1:
6052 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6053 hi = prod;
6054 break;
6056 case 0:
6057 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6058 hi = tmp;
6059 break;
6061 case 1:
6062 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6063 lo = prod;
6064 break;
6066 default:
6067 gcc_unreachable ();
6070 else
6072 /* A negative divisor reverses the relational operators. */
6073 code = swap_tree_comparison (code);
6075 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6076 switch (tree_int_cst_sgn (arg1))
6078 case -1:
6079 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6080 lo = prod;
6081 break;
6083 case 0:
6084 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6085 lo = tmp;
6086 break;
6088 case 1:
6089 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6090 hi = prod;
6091 break;
6093 default:
6094 gcc_unreachable ();
6098 switch (code)
6100 case EQ_EXPR:
6101 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6102 return omit_one_operand (type, integer_zero_node, arg00);
6103 if (TREE_OVERFLOW (hi))
6104 return fold_build2 (GE_EXPR, type, arg00, lo);
6105 if (TREE_OVERFLOW (lo))
6106 return fold_build2 (LE_EXPR, type, arg00, hi);
6107 return build_range_check (type, arg00, 1, lo, hi);
6109 case NE_EXPR:
6110 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6111 return omit_one_operand (type, integer_one_node, arg00);
6112 if (TREE_OVERFLOW (hi))
6113 return fold_build2 (LT_EXPR, type, arg00, lo);
6114 if (TREE_OVERFLOW (lo))
6115 return fold_build2 (GT_EXPR, type, arg00, hi);
6116 return build_range_check (type, arg00, 0, lo, hi);
6118 case LT_EXPR:
6119 if (TREE_OVERFLOW (lo))
6120 return omit_one_operand (type, integer_zero_node, arg00);
6121 return fold_build2 (LT_EXPR, type, arg00, lo);
6123 case LE_EXPR:
6124 if (TREE_OVERFLOW (hi))
6125 return omit_one_operand (type, integer_one_node, arg00);
6126 return fold_build2 (LE_EXPR, type, arg00, hi);
6128 case GT_EXPR:
6129 if (TREE_OVERFLOW (hi))
6130 return omit_one_operand (type, integer_zero_node, arg00);
6131 return fold_build2 (GT_EXPR, type, arg00, hi);
6133 case GE_EXPR:
6134 if (TREE_OVERFLOW (lo))
6135 return omit_one_operand (type, integer_one_node, arg00);
6136 return fold_build2 (GE_EXPR, type, arg00, lo);
6138 default:
6139 break;
6142 return NULL_TREE;
6146 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6147 equality/inequality test, then return a simplified form of the test
6148 using a sign testing. Otherwise return NULL. TYPE is the desired
6149 result type. */
6151 static tree
6152 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6153 tree result_type)
6155 /* If this is testing a single bit, we can optimize the test. */
6156 if ((code == NE_EXPR || code == EQ_EXPR)
6157 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6158 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6160 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6161 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6162 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6164 if (arg00 != NULL_TREE
6165 /* This is only a win if casting to a signed type is cheap,
6166 i.e. when arg00's type is not a partial mode. */
6167 && TYPE_PRECISION (TREE_TYPE (arg00))
6168 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6170 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6171 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6172 result_type, fold_convert (stype, arg00),
6173 build_int_cst (stype, 0));
6177 return NULL_TREE;
6180 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6181 equality/inequality test, then return a simplified form of
6182 the test using shifts and logical operations. Otherwise return
6183 NULL. TYPE is the desired result type. */
6185 tree
6186 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6187 tree result_type)
6189 /* If this is testing a single bit, we can optimize the test. */
6190 if ((code == NE_EXPR || code == EQ_EXPR)
6191 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6192 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6194 tree inner = TREE_OPERAND (arg0, 0);
6195 tree type = TREE_TYPE (arg0);
6196 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6197 enum machine_mode operand_mode = TYPE_MODE (type);
6198 int ops_unsigned;
6199 tree signed_type, unsigned_type, intermediate_type;
6200 tree tem;
6202 /* First, see if we can fold the single bit test into a sign-bit
6203 test. */
6204 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6205 result_type);
6206 if (tem)
6207 return tem;
6209 /* Otherwise we have (A & C) != 0 where C is a single bit,
6210 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6211 Similarly for (A & C) == 0. */
6213 /* If INNER is a right shift of a constant and it plus BITNUM does
6214 not overflow, adjust BITNUM and INNER. */
6215 if (TREE_CODE (inner) == RSHIFT_EXPR
6216 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6217 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6218 && bitnum < TYPE_PRECISION (type)
6219 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6220 bitnum - TYPE_PRECISION (type)))
6222 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6223 inner = TREE_OPERAND (inner, 0);
6226 /* If we are going to be able to omit the AND below, we must do our
6227 operations as unsigned. If we must use the AND, we have a choice.
6228 Normally unsigned is faster, but for some machines signed is. */
6229 #ifdef LOAD_EXTEND_OP
6230 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6231 && !flag_syntax_only) ? 0 : 1;
6232 #else
6233 ops_unsigned = 1;
6234 #endif
6236 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6237 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6238 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6239 inner = fold_convert (intermediate_type, inner);
6241 if (bitnum != 0)
6242 inner = build2 (RSHIFT_EXPR, intermediate_type,
6243 inner, size_int (bitnum));
6245 if (code == EQ_EXPR)
6246 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6247 inner, integer_one_node);
6249 /* Put the AND last so it can combine with more things. */
6250 inner = build2 (BIT_AND_EXPR, intermediate_type,
6251 inner, integer_one_node);
6253 /* Make sure to return the proper type. */
6254 inner = fold_convert (result_type, inner);
6256 return inner;
6258 return NULL_TREE;
6261 /* Check whether we are allowed to reorder operands arg0 and arg1,
6262 such that the evaluation of arg1 occurs before arg0. */
6264 static bool
6265 reorder_operands_p (tree arg0, tree arg1)
6267 if (! flag_evaluation_order)
6268 return true;
6269 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6270 return true;
6271 return ! TREE_SIDE_EFFECTS (arg0)
6272 && ! TREE_SIDE_EFFECTS (arg1);
6275 /* Test whether it is preferable two swap two operands, ARG0 and
6276 ARG1, for example because ARG0 is an integer constant and ARG1
6277 isn't. If REORDER is true, only recommend swapping if we can
6278 evaluate the operands in reverse order. */
6280 bool
6281 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6283 STRIP_SIGN_NOPS (arg0);
6284 STRIP_SIGN_NOPS (arg1);
6286 if (TREE_CODE (arg1) == INTEGER_CST)
6287 return 0;
6288 if (TREE_CODE (arg0) == INTEGER_CST)
6289 return 1;
6291 if (TREE_CODE (arg1) == REAL_CST)
6292 return 0;
6293 if (TREE_CODE (arg0) == REAL_CST)
6294 return 1;
6296 if (TREE_CODE (arg1) == COMPLEX_CST)
6297 return 0;
6298 if (TREE_CODE (arg0) == COMPLEX_CST)
6299 return 1;
6301 if (TREE_CONSTANT (arg1))
6302 return 0;
6303 if (TREE_CONSTANT (arg0))
6304 return 1;
6306 if (optimize_size)
6307 return 0;
6309 if (reorder && flag_evaluation_order
6310 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6311 return 0;
6313 if (DECL_P (arg1))
6314 return 0;
6315 if (DECL_P (arg0))
6316 return 1;
6318 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6319 for commutative and comparison operators. Ensuring a canonical
6320 form allows the optimizers to find additional redundancies without
6321 having to explicitly check for both orderings. */
6322 if (TREE_CODE (arg0) == SSA_NAME
6323 && TREE_CODE (arg1) == SSA_NAME
6324 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6325 return 1;
6327 return 0;
6330 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6331 ARG0 is extended to a wider type. */
6333 static tree
6334 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6336 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6337 tree arg1_unw;
6338 tree shorter_type, outer_type;
6339 tree min, max;
6340 bool above, below;
6342 if (arg0_unw == arg0)
6343 return NULL_TREE;
6344 shorter_type = TREE_TYPE (arg0_unw);
6346 #ifdef HAVE_canonicalize_funcptr_for_compare
6347 /* Disable this optimization if we're casting a function pointer
6348 type on targets that require function pointer canonicalization. */
6349 if (HAVE_canonicalize_funcptr_for_compare
6350 && TREE_CODE (shorter_type) == POINTER_TYPE
6351 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6352 return NULL_TREE;
6353 #endif
6355 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6356 return NULL_TREE;
6358 arg1_unw = get_unwidened (arg1, shorter_type);
6360 /* If possible, express the comparison in the shorter mode. */
6361 if ((code == EQ_EXPR || code == NE_EXPR
6362 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6363 && (TREE_TYPE (arg1_unw) == shorter_type
6364 || (TREE_CODE (arg1_unw) == INTEGER_CST
6365 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6366 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6367 && int_fits_type_p (arg1_unw, shorter_type))))
6368 return fold_build2 (code, type, arg0_unw,
6369 fold_convert (shorter_type, arg1_unw));
6371 if (TREE_CODE (arg1_unw) != INTEGER_CST
6372 || TREE_CODE (shorter_type) != INTEGER_TYPE
6373 || !int_fits_type_p (arg1_unw, shorter_type))
6374 return NULL_TREE;
6376 /* If we are comparing with the integer that does not fit into the range
6377 of the shorter type, the result is known. */
6378 outer_type = TREE_TYPE (arg1_unw);
6379 min = lower_bound_in_type (outer_type, shorter_type);
6380 max = upper_bound_in_type (outer_type, shorter_type);
6382 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6383 max, arg1_unw));
6384 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6385 arg1_unw, min));
6387 switch (code)
6389 case EQ_EXPR:
6390 if (above || below)
6391 return omit_one_operand (type, integer_zero_node, arg0);
6392 break;
6394 case NE_EXPR:
6395 if (above || below)
6396 return omit_one_operand (type, integer_one_node, arg0);
6397 break;
6399 case LT_EXPR:
6400 case LE_EXPR:
6401 if (above)
6402 return omit_one_operand (type, integer_one_node, arg0);
6403 else if (below)
6404 return omit_one_operand (type, integer_zero_node, arg0);
6406 case GT_EXPR:
6407 case GE_EXPR:
6408 if (above)
6409 return omit_one_operand (type, integer_zero_node, arg0);
6410 else if (below)
6411 return omit_one_operand (type, integer_one_node, arg0);
6413 default:
6414 break;
6417 return NULL_TREE;
6420 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6421 ARG0 just the signedness is changed. */
6423 static tree
6424 fold_sign_changed_comparison (enum tree_code code, tree type,
6425 tree arg0, tree arg1)
6427 tree arg0_inner, tmp;
6428 tree inner_type, outer_type;
6430 if (TREE_CODE (arg0) != NOP_EXPR
6431 && TREE_CODE (arg0) != CONVERT_EXPR)
6432 return NULL_TREE;
6434 outer_type = TREE_TYPE (arg0);
6435 arg0_inner = TREE_OPERAND (arg0, 0);
6436 inner_type = TREE_TYPE (arg0_inner);
6438 #ifdef HAVE_canonicalize_funcptr_for_compare
6439 /* Disable this optimization if we're casting a function pointer
6440 type on targets that require function pointer canonicalization. */
6441 if (HAVE_canonicalize_funcptr_for_compare
6442 && TREE_CODE (inner_type) == POINTER_TYPE
6443 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6444 return NULL_TREE;
6445 #endif
6447 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6448 return NULL_TREE;
6450 if (TREE_CODE (arg1) != INTEGER_CST
6451 && !((TREE_CODE (arg1) == NOP_EXPR
6452 || TREE_CODE (arg1) == CONVERT_EXPR)
6453 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6454 return NULL_TREE;
6456 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6457 && code != NE_EXPR
6458 && code != EQ_EXPR)
6459 return NULL_TREE;
6461 if (TREE_CODE (arg1) == INTEGER_CST)
6463 tmp = build_int_cst_wide (inner_type,
6464 TREE_INT_CST_LOW (arg1),
6465 TREE_INT_CST_HIGH (arg1));
6466 arg1 = force_fit_type (tmp, 0,
6467 TREE_OVERFLOW (arg1),
6468 TREE_CONSTANT_OVERFLOW (arg1));
6470 else
6471 arg1 = fold_convert (inner_type, arg1);
6473 return fold_build2 (code, type, arg0_inner, arg1);
6476 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6477 step of the array. Reconstructs s and delta in the case of s * delta
6478 being an integer constant (and thus already folded).
6479 ADDR is the address. MULT is the multiplicative expression.
6480 If the function succeeds, the new address expression is returned. Otherwise
6481 NULL_TREE is returned. */
6483 static tree
6484 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6486 tree s, delta, step;
6487 tree ref = TREE_OPERAND (addr, 0), pref;
6488 tree ret, pos;
6489 tree itype;
6491 /* Canonicalize op1 into a possibly non-constant delta
6492 and an INTEGER_CST s. */
6493 if (TREE_CODE (op1) == MULT_EXPR)
6495 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6497 STRIP_NOPS (arg0);
6498 STRIP_NOPS (arg1);
6500 if (TREE_CODE (arg0) == INTEGER_CST)
6502 s = arg0;
6503 delta = arg1;
6505 else if (TREE_CODE (arg1) == INTEGER_CST)
6507 s = arg1;
6508 delta = arg0;
6510 else
6511 return NULL_TREE;
6513 else if (TREE_CODE (op1) == INTEGER_CST)
6515 delta = op1;
6516 s = NULL_TREE;
6518 else
6520 /* Simulate we are delta * 1. */
6521 delta = op1;
6522 s = integer_one_node;
6525 for (;; ref = TREE_OPERAND (ref, 0))
6527 if (TREE_CODE (ref) == ARRAY_REF)
6529 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6530 if (! itype)
6531 continue;
6533 step = array_ref_element_size (ref);
6534 if (TREE_CODE (step) != INTEGER_CST)
6535 continue;
6537 if (s)
6539 if (! tree_int_cst_equal (step, s))
6540 continue;
6542 else
6544 /* Try if delta is a multiple of step. */
6545 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6546 if (! tmp)
6547 continue;
6548 delta = tmp;
6551 break;
6554 if (!handled_component_p (ref))
6555 return NULL_TREE;
6558 /* We found the suitable array reference. So copy everything up to it,
6559 and replace the index. */
6561 pref = TREE_OPERAND (addr, 0);
6562 ret = copy_node (pref);
6563 pos = ret;
6565 while (pref != ref)
6567 pref = TREE_OPERAND (pref, 0);
6568 TREE_OPERAND (pos, 0) = copy_node (pref);
6569 pos = TREE_OPERAND (pos, 0);
6572 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6573 fold_convert (itype,
6574 TREE_OPERAND (pos, 1)),
6575 fold_convert (itype, delta));
6577 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6581 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6582 means A >= Y && A != MAX, but in this case we know that
6583 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6585 static tree
6586 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6588 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6590 if (TREE_CODE (bound) == LT_EXPR)
6591 a = TREE_OPERAND (bound, 0);
6592 else if (TREE_CODE (bound) == GT_EXPR)
6593 a = TREE_OPERAND (bound, 1);
6594 else
6595 return NULL_TREE;
6597 typea = TREE_TYPE (a);
6598 if (!INTEGRAL_TYPE_P (typea)
6599 && !POINTER_TYPE_P (typea))
6600 return NULL_TREE;
6602 if (TREE_CODE (ineq) == LT_EXPR)
6604 a1 = TREE_OPERAND (ineq, 1);
6605 y = TREE_OPERAND (ineq, 0);
6607 else if (TREE_CODE (ineq) == GT_EXPR)
6609 a1 = TREE_OPERAND (ineq, 0);
6610 y = TREE_OPERAND (ineq, 1);
6612 else
6613 return NULL_TREE;
6615 if (TREE_TYPE (a1) != typea)
6616 return NULL_TREE;
6618 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6619 if (!integer_onep (diff))
6620 return NULL_TREE;
6622 return fold_build2 (GE_EXPR, type, a, y);
6625 /* Fold a sum or difference of at least one multiplication.
6626 Returns the folded tree or NULL if no simplification could be made. */
6628 static tree
6629 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6631 tree arg00, arg01, arg10, arg11;
6632 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6634 /* (A * C) +- (B * C) -> (A+-B) * C.
6635 (A * C) +- A -> A * (C+-1).
6636 We are most concerned about the case where C is a constant,
6637 but other combinations show up during loop reduction. Since
6638 it is not difficult, try all four possibilities. */
6640 if (TREE_CODE (arg0) == MULT_EXPR)
6642 arg00 = TREE_OPERAND (arg0, 0);
6643 arg01 = TREE_OPERAND (arg0, 1);
6645 else
6647 arg00 = arg0;
6648 if (!FLOAT_TYPE_P (type))
6649 arg01 = build_int_cst (type, 1);
6650 else
6651 arg01 = build_real (type, dconst1);
6653 if (TREE_CODE (arg1) == MULT_EXPR)
6655 arg10 = TREE_OPERAND (arg1, 0);
6656 arg11 = TREE_OPERAND (arg1, 1);
6658 else
6660 arg10 = arg1;
6661 if (!FLOAT_TYPE_P (type))
6662 arg11 = build_int_cst (type, 1);
6663 else
6664 arg11 = build_real (type, dconst1);
6666 same = NULL_TREE;
6668 if (operand_equal_p (arg01, arg11, 0))
6669 same = arg01, alt0 = arg00, alt1 = arg10;
6670 else if (operand_equal_p (arg00, arg10, 0))
6671 same = arg00, alt0 = arg01, alt1 = arg11;
6672 else if (operand_equal_p (arg00, arg11, 0))
6673 same = arg00, alt0 = arg01, alt1 = arg10;
6674 else if (operand_equal_p (arg01, arg10, 0))
6675 same = arg01, alt0 = arg00, alt1 = arg11;
6677 /* No identical multiplicands; see if we can find a common
6678 power-of-two factor in non-power-of-two multiplies. This
6679 can help in multi-dimensional array access. */
6680 else if (host_integerp (arg01, 0)
6681 && host_integerp (arg11, 0))
6683 HOST_WIDE_INT int01, int11, tmp;
6684 bool swap = false;
6685 tree maybe_same;
6686 int01 = TREE_INT_CST_LOW (arg01);
6687 int11 = TREE_INT_CST_LOW (arg11);
6689 /* Move min of absolute values to int11. */
6690 if ((int01 >= 0 ? int01 : -int01)
6691 < (int11 >= 0 ? int11 : -int11))
6693 tmp = int01, int01 = int11, int11 = tmp;
6694 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6695 maybe_same = arg01;
6696 swap = true;
6698 else
6699 maybe_same = arg11;
6701 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6703 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6704 build_int_cst (TREE_TYPE (arg00),
6705 int01 / int11));
6706 alt1 = arg10;
6707 same = maybe_same;
6708 if (swap)
6709 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6713 if (same)
6714 return fold_build2 (MULT_EXPR, type,
6715 fold_build2 (code, type,
6716 fold_convert (type, alt0),
6717 fold_convert (type, alt1)),
6718 fold_convert (type, same));
6720 return NULL_TREE;
6723 /* Fold a unary expression of code CODE and type TYPE with operand
6724 OP0. Return the folded expression if folding is successful.
6725 Otherwise, return NULL_TREE. */
6727 tree
6728 fold_unary (enum tree_code code, tree type, tree op0)
6730 tree tem;
6731 tree arg0;
6732 enum tree_code_class kind = TREE_CODE_CLASS (code);
6734 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6735 && TREE_CODE_LENGTH (code) == 1);
6737 arg0 = op0;
6738 if (arg0)
6740 if (code == NOP_EXPR || code == CONVERT_EXPR
6741 || code == FLOAT_EXPR || code == ABS_EXPR)
6743 /* Don't use STRIP_NOPS, because signedness of argument type
6744 matters. */
6745 STRIP_SIGN_NOPS (arg0);
6747 else
6749 /* Strip any conversions that don't change the mode. This
6750 is safe for every expression, except for a comparison
6751 expression because its signedness is derived from its
6752 operands.
6754 Note that this is done as an internal manipulation within
6755 the constant folder, in order to find the simplest
6756 representation of the arguments so that their form can be
6757 studied. In any cases, the appropriate type conversions
6758 should be put back in the tree that will get out of the
6759 constant folder. */
6760 STRIP_NOPS (arg0);
6764 if (TREE_CODE_CLASS (code) == tcc_unary)
6766 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6767 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6768 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6769 else if (TREE_CODE (arg0) == COND_EXPR)
6771 tree arg01 = TREE_OPERAND (arg0, 1);
6772 tree arg02 = TREE_OPERAND (arg0, 2);
6773 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6774 arg01 = fold_build1 (code, type, arg01);
6775 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6776 arg02 = fold_build1 (code, type, arg02);
6777 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6778 arg01, arg02);
6780 /* If this was a conversion, and all we did was to move into
6781 inside the COND_EXPR, bring it back out. But leave it if
6782 it is a conversion from integer to integer and the
6783 result precision is no wider than a word since such a
6784 conversion is cheap and may be optimized away by combine,
6785 while it couldn't if it were outside the COND_EXPR. Then return
6786 so we don't get into an infinite recursion loop taking the
6787 conversion out and then back in. */
6789 if ((code == NOP_EXPR || code == CONVERT_EXPR
6790 || code == NON_LVALUE_EXPR)
6791 && TREE_CODE (tem) == COND_EXPR
6792 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6793 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6794 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6795 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6796 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6797 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6798 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6799 && (INTEGRAL_TYPE_P
6800 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6801 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6802 || flag_syntax_only))
6803 tem = build1 (code, type,
6804 build3 (COND_EXPR,
6805 TREE_TYPE (TREE_OPERAND
6806 (TREE_OPERAND (tem, 1), 0)),
6807 TREE_OPERAND (tem, 0),
6808 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6809 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6810 return tem;
6812 else if (COMPARISON_CLASS_P (arg0))
6814 if (TREE_CODE (type) == BOOLEAN_TYPE)
6816 arg0 = copy_node (arg0);
6817 TREE_TYPE (arg0) = type;
6818 return arg0;
6820 else if (TREE_CODE (type) != INTEGER_TYPE)
6821 return fold_build3 (COND_EXPR, type, arg0,
6822 fold_build1 (code, type,
6823 integer_one_node),
6824 fold_build1 (code, type,
6825 integer_zero_node));
6829 switch (code)
6831 case NOP_EXPR:
6832 case FLOAT_EXPR:
6833 case CONVERT_EXPR:
6834 case FIX_TRUNC_EXPR:
6835 case FIX_CEIL_EXPR:
6836 case FIX_FLOOR_EXPR:
6837 case FIX_ROUND_EXPR:
6838 if (TREE_TYPE (op0) == type)
6839 return op0;
6841 /* If we have (type) (a CMP b) and type is an integral type, return
6842 new expression involving the new type. */
6843 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
6844 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
6845 TREE_OPERAND (op0, 1));
6847 /* Handle cases of two conversions in a row. */
6848 if (TREE_CODE (op0) == NOP_EXPR
6849 || TREE_CODE (op0) == CONVERT_EXPR)
6851 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6852 tree inter_type = TREE_TYPE (op0);
6853 int inside_int = INTEGRAL_TYPE_P (inside_type);
6854 int inside_ptr = POINTER_TYPE_P (inside_type);
6855 int inside_float = FLOAT_TYPE_P (inside_type);
6856 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6857 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6858 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6859 int inter_int = INTEGRAL_TYPE_P (inter_type);
6860 int inter_ptr = POINTER_TYPE_P (inter_type);
6861 int inter_float = FLOAT_TYPE_P (inter_type);
6862 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6863 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6864 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6865 int final_int = INTEGRAL_TYPE_P (type);
6866 int final_ptr = POINTER_TYPE_P (type);
6867 int final_float = FLOAT_TYPE_P (type);
6868 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6869 unsigned int final_prec = TYPE_PRECISION (type);
6870 int final_unsignedp = TYPE_UNSIGNED (type);
6872 /* In addition to the cases of two conversions in a row
6873 handled below, if we are converting something to its own
6874 type via an object of identical or wider precision, neither
6875 conversion is needed. */
6876 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6877 && ((inter_int && final_int) || (inter_float && final_float))
6878 && inter_prec >= final_prec)
6879 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6881 /* Likewise, if the intermediate and final types are either both
6882 float or both integer, we don't need the middle conversion if
6883 it is wider than the final type and doesn't change the signedness
6884 (for integers). Avoid this if the final type is a pointer
6885 since then we sometimes need the inner conversion. Likewise if
6886 the outer has a precision not equal to the size of its mode. */
6887 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6888 || (inter_float && inside_float)
6889 || (inter_vec && inside_vec))
6890 && inter_prec >= inside_prec
6891 && (inter_float || inter_vec
6892 || inter_unsignedp == inside_unsignedp)
6893 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6894 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6895 && ! final_ptr
6896 && (! final_vec || inter_prec == inside_prec))
6897 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6899 /* If we have a sign-extension of a zero-extended value, we can
6900 replace that by a single zero-extension. */
6901 if (inside_int && inter_int && final_int
6902 && inside_prec < inter_prec && inter_prec < final_prec
6903 && inside_unsignedp && !inter_unsignedp)
6904 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6906 /* Two conversions in a row are not needed unless:
6907 - some conversion is floating-point (overstrict for now), or
6908 - some conversion is a vector (overstrict for now), or
6909 - the intermediate type is narrower than both initial and
6910 final, or
6911 - the intermediate type and innermost type differ in signedness,
6912 and the outermost type is wider than the intermediate, or
6913 - the initial type is a pointer type and the precisions of the
6914 intermediate and final types differ, or
6915 - the final type is a pointer type and the precisions of the
6916 initial and intermediate types differ. */
6917 if (! inside_float && ! inter_float && ! final_float
6918 && ! inside_vec && ! inter_vec && ! final_vec
6919 && (inter_prec > inside_prec || inter_prec > final_prec)
6920 && ! (inside_int && inter_int
6921 && inter_unsignedp != inside_unsignedp
6922 && inter_prec < final_prec)
6923 && ((inter_unsignedp && inter_prec > inside_prec)
6924 == (final_unsignedp && final_prec > inter_prec))
6925 && ! (inside_ptr && inter_prec != final_prec)
6926 && ! (final_ptr && inside_prec != inter_prec)
6927 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6928 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6929 && ! final_ptr)
6930 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6933 /* Handle (T *)&A.B.C for A being of type T and B and C
6934 living at offset zero. This occurs frequently in
6935 C++ upcasting and then accessing the base. */
6936 if (TREE_CODE (op0) == ADDR_EXPR
6937 && POINTER_TYPE_P (type)
6938 && handled_component_p (TREE_OPERAND (op0, 0)))
6940 HOST_WIDE_INT bitsize, bitpos;
6941 tree offset;
6942 enum machine_mode mode;
6943 int unsignedp, volatilep;
6944 tree base = TREE_OPERAND (op0, 0);
6945 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6946 &mode, &unsignedp, &volatilep, false);
6947 /* If the reference was to a (constant) zero offset, we can use
6948 the address of the base if it has the same base type
6949 as the result type. */
6950 if (! offset && bitpos == 0
6951 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
6952 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
6953 return fold_convert (type, build_fold_addr_expr (base));
6956 if (TREE_CODE (op0) == MODIFY_EXPR
6957 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6958 /* Detect assigning a bitfield. */
6959 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6960 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6962 /* Don't leave an assignment inside a conversion
6963 unless assigning a bitfield. */
6964 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6965 /* First do the assignment, then return converted constant. */
6966 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6967 TREE_NO_WARNING (tem) = 1;
6968 TREE_USED (tem) = 1;
6969 return tem;
6972 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6973 constants (if x has signed type, the sign bit cannot be set
6974 in c). This folds extension into the BIT_AND_EXPR. */
6975 if (INTEGRAL_TYPE_P (type)
6976 && TREE_CODE (type) != BOOLEAN_TYPE
6977 && TREE_CODE (op0) == BIT_AND_EXPR
6978 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6980 tree and = op0;
6981 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6982 int change = 0;
6984 if (TYPE_UNSIGNED (TREE_TYPE (and))
6985 || (TYPE_PRECISION (type)
6986 <= TYPE_PRECISION (TREE_TYPE (and))))
6987 change = 1;
6988 else if (TYPE_PRECISION (TREE_TYPE (and1))
6989 <= HOST_BITS_PER_WIDE_INT
6990 && host_integerp (and1, 1))
6992 unsigned HOST_WIDE_INT cst;
6994 cst = tree_low_cst (and1, 1);
6995 cst &= (HOST_WIDE_INT) -1
6996 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6997 change = (cst == 0);
6998 #ifdef LOAD_EXTEND_OP
6999 if (change
7000 && !flag_syntax_only
7001 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7002 == ZERO_EXTEND))
7004 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7005 and0 = fold_convert (uns, and0);
7006 and1 = fold_convert (uns, and1);
7008 #endif
7010 if (change)
7012 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7013 TREE_INT_CST_HIGH (and1));
7014 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7015 TREE_CONSTANT_OVERFLOW (and1));
7016 return fold_build2 (BIT_AND_EXPR, type,
7017 fold_convert (type, and0), tem);
7021 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7022 T2 being pointers to types of the same size. */
7023 if (POINTER_TYPE_P (type)
7024 && BINARY_CLASS_P (arg0)
7025 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7026 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7028 tree arg00 = TREE_OPERAND (arg0, 0);
7029 tree t0 = type;
7030 tree t1 = TREE_TYPE (arg00);
7031 tree tt0 = TREE_TYPE (t0);
7032 tree tt1 = TREE_TYPE (t1);
7033 tree s0 = TYPE_SIZE (tt0);
7034 tree s1 = TYPE_SIZE (tt1);
7036 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7037 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7038 TREE_OPERAND (arg0, 1));
7041 tem = fold_convert_const (code, type, arg0);
7042 return tem ? tem : NULL_TREE;
7044 case VIEW_CONVERT_EXPR:
7045 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7046 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7047 return NULL_TREE;
7049 case NEGATE_EXPR:
7050 if (negate_expr_p (arg0))
7051 return fold_convert (type, negate_expr (arg0));
7052 return NULL_TREE;
7054 case ABS_EXPR:
7055 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7056 return fold_abs_const (arg0, type);
7057 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7058 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7059 /* Convert fabs((double)float) into (double)fabsf(float). */
7060 else if (TREE_CODE (arg0) == NOP_EXPR
7061 && TREE_CODE (type) == REAL_TYPE)
7063 tree targ0 = strip_float_extensions (arg0);
7064 if (targ0 != arg0)
7065 return fold_convert (type, fold_build1 (ABS_EXPR,
7066 TREE_TYPE (targ0),
7067 targ0));
7069 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7070 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7071 return arg0;
7073 /* Strip sign ops from argument. */
7074 if (TREE_CODE (type) == REAL_TYPE)
7076 tem = fold_strip_sign_ops (arg0);
7077 if (tem)
7078 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7080 return NULL_TREE;
7082 case CONJ_EXPR:
7083 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7084 return fold_convert (type, arg0);
7085 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7086 return build2 (COMPLEX_EXPR, type,
7087 TREE_OPERAND (arg0, 0),
7088 negate_expr (TREE_OPERAND (arg0, 1)));
7089 else if (TREE_CODE (arg0) == COMPLEX_CST)
7090 return build_complex (type, TREE_REALPART (arg0),
7091 negate_expr (TREE_IMAGPART (arg0)));
7092 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7093 return fold_build2 (TREE_CODE (arg0), type,
7094 fold_build1 (CONJ_EXPR, type,
7095 TREE_OPERAND (arg0, 0)),
7096 fold_build1 (CONJ_EXPR, type,
7097 TREE_OPERAND (arg0, 1)));
7098 else if (TREE_CODE (arg0) == CONJ_EXPR)
7099 return TREE_OPERAND (arg0, 0);
7100 return NULL_TREE;
7102 case BIT_NOT_EXPR:
7103 if (TREE_CODE (arg0) == INTEGER_CST)
7104 return fold_not_const (arg0, type);
7105 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7106 return TREE_OPERAND (arg0, 0);
7107 /* Convert ~ (-A) to A - 1. */
7108 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7109 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7110 build_int_cst (type, 1));
7111 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7112 else if (INTEGRAL_TYPE_P (type)
7113 && ((TREE_CODE (arg0) == MINUS_EXPR
7114 && integer_onep (TREE_OPERAND (arg0, 1)))
7115 || (TREE_CODE (arg0) == PLUS_EXPR
7116 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7117 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7118 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7119 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7120 && (tem = fold_unary (BIT_NOT_EXPR, type,
7121 fold_convert (type,
7122 TREE_OPERAND (arg0, 0)))))
7123 return fold_build2 (BIT_XOR_EXPR, type, tem,
7124 fold_convert (type, TREE_OPERAND (arg0, 1)));
7125 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7126 && (tem = fold_unary (BIT_NOT_EXPR, type,
7127 fold_convert (type,
7128 TREE_OPERAND (arg0, 1)))))
7129 return fold_build2 (BIT_XOR_EXPR, type,
7130 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7132 return NULL_TREE;
7134 case TRUTH_NOT_EXPR:
7135 /* The argument to invert_truthvalue must have Boolean type. */
7136 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7137 arg0 = fold_convert (boolean_type_node, arg0);
7139 /* Note that the operand of this must be an int
7140 and its values must be 0 or 1.
7141 ("true" is a fixed value perhaps depending on the language,
7142 but we don't handle values other than 1 correctly yet.) */
7143 tem = invert_truthvalue (arg0);
7144 /* Avoid infinite recursion. */
7145 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7146 return NULL_TREE;
7147 return fold_convert (type, tem);
7149 case REALPART_EXPR:
7150 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7151 return NULL_TREE;
7152 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7153 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7154 TREE_OPERAND (arg0, 1));
7155 else if (TREE_CODE (arg0) == COMPLEX_CST)
7156 return TREE_REALPART (arg0);
7157 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7158 return fold_build2 (TREE_CODE (arg0), type,
7159 fold_build1 (REALPART_EXPR, type,
7160 TREE_OPERAND (arg0, 0)),
7161 fold_build1 (REALPART_EXPR, type,
7162 TREE_OPERAND (arg0, 1)));
7163 return NULL_TREE;
7165 case IMAGPART_EXPR:
7166 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7167 return fold_convert (type, integer_zero_node);
7168 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7169 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7170 TREE_OPERAND (arg0, 0));
7171 else if (TREE_CODE (arg0) == COMPLEX_CST)
7172 return TREE_IMAGPART (arg0);
7173 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7174 return fold_build2 (TREE_CODE (arg0), type,
7175 fold_build1 (IMAGPART_EXPR, type,
7176 TREE_OPERAND (arg0, 0)),
7177 fold_build1 (IMAGPART_EXPR, type,
7178 TREE_OPERAND (arg0, 1)));
7179 return NULL_TREE;
7181 default:
7182 return NULL_TREE;
7183 } /* switch (code) */
7186 /* Fold a binary expression of code CODE and type TYPE with operands
7187 OP0 and OP1. Return the folded expression if folding is
7188 successful. Otherwise, return NULL_TREE. */
7190 tree
7191 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7193 tree t1 = NULL_TREE;
7194 tree tem;
7195 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7196 enum tree_code_class kind = TREE_CODE_CLASS (code);
7198 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7199 && TREE_CODE_LENGTH (code) == 2
7200 && op0 != NULL_TREE
7201 && op1 != NULL_TREE);
7203 arg0 = op0;
7204 arg1 = op1;
7206 /* Strip any conversions that don't change the mode. This is
7207 safe for every expression, except for a comparison expression
7208 because its signedness is derived from its operands. So, in
7209 the latter case, only strip conversions that don't change the
7210 signedness.
7212 Note that this is done as an internal manipulation within the
7213 constant folder, in order to find the simplest representation
7214 of the arguments so that their form can be studied. In any
7215 cases, the appropriate type conversions should be put back in
7216 the tree that will get out of the constant folder. */
7218 if (kind == tcc_comparison)
7220 STRIP_SIGN_NOPS (arg0);
7221 STRIP_SIGN_NOPS (arg1);
7223 else
7225 STRIP_NOPS (arg0);
7226 STRIP_NOPS (arg1);
7229 /* Note that TREE_CONSTANT isn't enough: static var addresses are
7230 constant but we can't do arithmetic on them. */
7231 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7232 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7233 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
7234 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
7236 if (kind == tcc_binary)
7237 tem = const_binop (code, arg0, arg1, 0);
7238 else if (kind == tcc_comparison)
7239 tem = fold_relational_const (code, type, arg0, arg1);
7240 else
7241 tem = NULL_TREE;
7243 if (tem != NULL_TREE)
7245 if (TREE_TYPE (tem) != type)
7246 tem = fold_convert (type, tem);
7247 return tem;
7251 /* If this is a commutative operation, and ARG0 is a constant, move it
7252 to ARG1 to reduce the number of tests below. */
7253 if (commutative_tree_code (code)
7254 && tree_swap_operands_p (arg0, arg1, true))
7255 return fold_build2 (code, type, op1, op0);
7257 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
7259 First check for cases where an arithmetic operation is applied to a
7260 compound, conditional, or comparison operation. Push the arithmetic
7261 operation inside the compound or conditional to see if any folding
7262 can then be done. Convert comparison to conditional for this purpose.
7263 The also optimizes non-constant cases that used to be done in
7264 expand_expr.
7266 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7267 one of the operands is a comparison and the other is a comparison, a
7268 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7269 code below would make the expression more complex. Change it to a
7270 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7271 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7273 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7274 || code == EQ_EXPR || code == NE_EXPR)
7275 && ((truth_value_p (TREE_CODE (arg0))
7276 && (truth_value_p (TREE_CODE (arg1))
7277 || (TREE_CODE (arg1) == BIT_AND_EXPR
7278 && integer_onep (TREE_OPERAND (arg1, 1)))))
7279 || (truth_value_p (TREE_CODE (arg1))
7280 && (truth_value_p (TREE_CODE (arg0))
7281 || (TREE_CODE (arg0) == BIT_AND_EXPR
7282 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7284 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7285 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7286 : TRUTH_XOR_EXPR,
7287 boolean_type_node,
7288 fold_convert (boolean_type_node, arg0),
7289 fold_convert (boolean_type_node, arg1));
7291 if (code == EQ_EXPR)
7292 tem = invert_truthvalue (tem);
7294 return fold_convert (type, tem);
7297 if (TREE_CODE_CLASS (code) == tcc_binary
7298 || TREE_CODE_CLASS (code) == tcc_comparison)
7300 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7301 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7302 fold_build2 (code, type,
7303 TREE_OPERAND (arg0, 1), op1));
7304 if (TREE_CODE (arg1) == COMPOUND_EXPR
7305 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7306 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7307 fold_build2 (code, type,
7308 op0, TREE_OPERAND (arg1, 1)));
7310 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7312 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7313 arg0, arg1,
7314 /*cond_first_p=*/1);
7315 if (tem != NULL_TREE)
7316 return tem;
7319 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7321 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7322 arg1, arg0,
7323 /*cond_first_p=*/0);
7324 if (tem != NULL_TREE)
7325 return tem;
7329 switch (code)
7331 case PLUS_EXPR:
7332 /* A + (-B) -> A - B */
7333 if (TREE_CODE (arg1) == NEGATE_EXPR)
7334 return fold_build2 (MINUS_EXPR, type,
7335 fold_convert (type, arg0),
7336 fold_convert (type, TREE_OPERAND (arg1, 0)));
7337 /* (-A) + B -> B - A */
7338 if (TREE_CODE (arg0) == NEGATE_EXPR
7339 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7340 return fold_build2 (MINUS_EXPR, type,
7341 fold_convert (type, arg1),
7342 fold_convert (type, TREE_OPERAND (arg0, 0)));
7343 /* Convert ~A + 1 to -A. */
7344 if (INTEGRAL_TYPE_P (type)
7345 && TREE_CODE (arg0) == BIT_NOT_EXPR
7346 && integer_onep (arg1))
7347 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7349 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
7350 same or one. */
7351 if ((TREE_CODE (arg0) == MULT_EXPR
7352 || TREE_CODE (arg1) == MULT_EXPR)
7353 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7355 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
7356 if (tem)
7357 return tem;
7360 if (! FLOAT_TYPE_P (type))
7362 if (integer_zerop (arg1))
7363 return non_lvalue (fold_convert (type, arg0));
7365 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7366 with a constant, and the two constants have no bits in common,
7367 we should treat this as a BIT_IOR_EXPR since this may produce more
7368 simplifications. */
7369 if (TREE_CODE (arg0) == BIT_AND_EXPR
7370 && TREE_CODE (arg1) == BIT_AND_EXPR
7371 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7372 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7373 && integer_zerop (const_binop (BIT_AND_EXPR,
7374 TREE_OPERAND (arg0, 1),
7375 TREE_OPERAND (arg1, 1), 0)))
7377 code = BIT_IOR_EXPR;
7378 goto bit_ior;
7381 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7382 (plus (plus (mult) (mult)) (foo)) so that we can
7383 take advantage of the factoring cases below. */
7384 if (((TREE_CODE (arg0) == PLUS_EXPR
7385 || TREE_CODE (arg0) == MINUS_EXPR)
7386 && TREE_CODE (arg1) == MULT_EXPR)
7387 || ((TREE_CODE (arg1) == PLUS_EXPR
7388 || TREE_CODE (arg1) == MINUS_EXPR)
7389 && TREE_CODE (arg0) == MULT_EXPR))
7391 tree parg0, parg1, parg, marg;
7392 enum tree_code pcode;
7394 if (TREE_CODE (arg1) == MULT_EXPR)
7395 parg = arg0, marg = arg1;
7396 else
7397 parg = arg1, marg = arg0;
7398 pcode = TREE_CODE (parg);
7399 parg0 = TREE_OPERAND (parg, 0);
7400 parg1 = TREE_OPERAND (parg, 1);
7401 STRIP_NOPS (parg0);
7402 STRIP_NOPS (parg1);
7404 if (TREE_CODE (parg0) == MULT_EXPR
7405 && TREE_CODE (parg1) != MULT_EXPR)
7406 return fold_build2 (pcode, type,
7407 fold_build2 (PLUS_EXPR, type,
7408 fold_convert (type, parg0),
7409 fold_convert (type, marg)),
7410 fold_convert (type, parg1));
7411 if (TREE_CODE (parg0) != MULT_EXPR
7412 && TREE_CODE (parg1) == MULT_EXPR)
7413 return fold_build2 (PLUS_EXPR, type,
7414 fold_convert (type, parg0),
7415 fold_build2 (pcode, type,
7416 fold_convert (type, marg),
7417 fold_convert (type,
7418 parg1)));
7421 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7422 of the array. Loop optimizer sometimes produce this type of
7423 expressions. */
7424 if (TREE_CODE (arg0) == ADDR_EXPR)
7426 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7427 if (tem)
7428 return fold_convert (type, tem);
7430 else if (TREE_CODE (arg1) == ADDR_EXPR)
7432 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7433 if (tem)
7434 return fold_convert (type, tem);
7437 else
7439 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7440 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7441 return non_lvalue (fold_convert (type, arg0));
7443 /* Likewise if the operands are reversed. */
7444 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7445 return non_lvalue (fold_convert (type, arg1));
7447 /* Convert X + -C into X - C. */
7448 if (TREE_CODE (arg1) == REAL_CST
7449 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7451 tem = fold_negate_const (arg1, type);
7452 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7453 return fold_build2 (MINUS_EXPR, type,
7454 fold_convert (type, arg0),
7455 fold_convert (type, tem));
7458 if (flag_unsafe_math_optimizations
7459 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7460 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7461 && (tem = distribute_real_division (code, type, arg0, arg1)))
7462 return tem;
7464 /* Convert x+x into x*2.0. */
7465 if (operand_equal_p (arg0, arg1, 0)
7466 && SCALAR_FLOAT_TYPE_P (type))
7467 return fold_build2 (MULT_EXPR, type, arg0,
7468 build_real (type, dconst2));
7470 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7471 if (flag_unsafe_math_optimizations
7472 && TREE_CODE (arg1) == PLUS_EXPR
7473 && TREE_CODE (arg0) != MULT_EXPR)
7475 tree tree10 = TREE_OPERAND (arg1, 0);
7476 tree tree11 = TREE_OPERAND (arg1, 1);
7477 if (TREE_CODE (tree11) == MULT_EXPR
7478 && TREE_CODE (tree10) == MULT_EXPR)
7480 tree tree0;
7481 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7482 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7485 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7486 if (flag_unsafe_math_optimizations
7487 && TREE_CODE (arg0) == PLUS_EXPR
7488 && TREE_CODE (arg1) != MULT_EXPR)
7490 tree tree00 = TREE_OPERAND (arg0, 0);
7491 tree tree01 = TREE_OPERAND (arg0, 1);
7492 if (TREE_CODE (tree01) == MULT_EXPR
7493 && TREE_CODE (tree00) == MULT_EXPR)
7495 tree tree0;
7496 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7497 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7502 bit_rotate:
7503 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7504 is a rotate of A by C1 bits. */
7505 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7506 is a rotate of A by B bits. */
7508 enum tree_code code0, code1;
7509 code0 = TREE_CODE (arg0);
7510 code1 = TREE_CODE (arg1);
7511 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7512 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7513 && operand_equal_p (TREE_OPERAND (arg0, 0),
7514 TREE_OPERAND (arg1, 0), 0)
7515 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7517 tree tree01, tree11;
7518 enum tree_code code01, code11;
7520 tree01 = TREE_OPERAND (arg0, 1);
7521 tree11 = TREE_OPERAND (arg1, 1);
7522 STRIP_NOPS (tree01);
7523 STRIP_NOPS (tree11);
7524 code01 = TREE_CODE (tree01);
7525 code11 = TREE_CODE (tree11);
7526 if (code01 == INTEGER_CST
7527 && code11 == INTEGER_CST
7528 && TREE_INT_CST_HIGH (tree01) == 0
7529 && TREE_INT_CST_HIGH (tree11) == 0
7530 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7531 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7532 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7533 code0 == LSHIFT_EXPR ? tree01 : tree11);
7534 else if (code11 == MINUS_EXPR)
7536 tree tree110, tree111;
7537 tree110 = TREE_OPERAND (tree11, 0);
7538 tree111 = TREE_OPERAND (tree11, 1);
7539 STRIP_NOPS (tree110);
7540 STRIP_NOPS (tree111);
7541 if (TREE_CODE (tree110) == INTEGER_CST
7542 && 0 == compare_tree_int (tree110,
7543 TYPE_PRECISION
7544 (TREE_TYPE (TREE_OPERAND
7545 (arg0, 0))))
7546 && operand_equal_p (tree01, tree111, 0))
7547 return build2 ((code0 == LSHIFT_EXPR
7548 ? LROTATE_EXPR
7549 : RROTATE_EXPR),
7550 type, TREE_OPERAND (arg0, 0), tree01);
7552 else if (code01 == MINUS_EXPR)
7554 tree tree010, tree011;
7555 tree010 = TREE_OPERAND (tree01, 0);
7556 tree011 = TREE_OPERAND (tree01, 1);
7557 STRIP_NOPS (tree010);
7558 STRIP_NOPS (tree011);
7559 if (TREE_CODE (tree010) == INTEGER_CST
7560 && 0 == compare_tree_int (tree010,
7561 TYPE_PRECISION
7562 (TREE_TYPE (TREE_OPERAND
7563 (arg0, 0))))
7564 && operand_equal_p (tree11, tree011, 0))
7565 return build2 ((code0 != LSHIFT_EXPR
7566 ? LROTATE_EXPR
7567 : RROTATE_EXPR),
7568 type, TREE_OPERAND (arg0, 0), tree11);
7573 associate:
7574 /* In most languages, can't associate operations on floats through
7575 parentheses. Rather than remember where the parentheses were, we
7576 don't associate floats at all, unless the user has specified
7577 -funsafe-math-optimizations. */
7579 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7581 tree var0, con0, lit0, minus_lit0;
7582 tree var1, con1, lit1, minus_lit1;
7584 /* Split both trees into variables, constants, and literals. Then
7585 associate each group together, the constants with literals,
7586 then the result with variables. This increases the chances of
7587 literals being recombined later and of generating relocatable
7588 expressions for the sum of a constant and literal. */
7589 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7590 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7591 code == MINUS_EXPR);
7593 /* Only do something if we found more than two objects. Otherwise,
7594 nothing has changed and we risk infinite recursion. */
7595 if (2 < ((var0 != 0) + (var1 != 0)
7596 + (con0 != 0) + (con1 != 0)
7597 + (lit0 != 0) + (lit1 != 0)
7598 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7600 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7601 if (code == MINUS_EXPR)
7602 code = PLUS_EXPR;
7604 var0 = associate_trees (var0, var1, code, type);
7605 con0 = associate_trees (con0, con1, code, type);
7606 lit0 = associate_trees (lit0, lit1, code, type);
7607 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7609 /* Preserve the MINUS_EXPR if the negative part of the literal is
7610 greater than the positive part. Otherwise, the multiplicative
7611 folding code (i.e extract_muldiv) may be fooled in case
7612 unsigned constants are subtracted, like in the following
7613 example: ((X*2 + 4) - 8U)/2. */
7614 if (minus_lit0 && lit0)
7616 if (TREE_CODE (lit0) == INTEGER_CST
7617 && TREE_CODE (minus_lit0) == INTEGER_CST
7618 && tree_int_cst_lt (lit0, minus_lit0))
7620 minus_lit0 = associate_trees (minus_lit0, lit0,
7621 MINUS_EXPR, type);
7622 lit0 = 0;
7624 else
7626 lit0 = associate_trees (lit0, minus_lit0,
7627 MINUS_EXPR, type);
7628 minus_lit0 = 0;
7631 if (minus_lit0)
7633 if (con0 == 0)
7634 return fold_convert (type,
7635 associate_trees (var0, minus_lit0,
7636 MINUS_EXPR, type));
7637 else
7639 con0 = associate_trees (con0, minus_lit0,
7640 MINUS_EXPR, type);
7641 return fold_convert (type,
7642 associate_trees (var0, con0,
7643 PLUS_EXPR, type));
7647 con0 = associate_trees (con0, lit0, code, type);
7648 return fold_convert (type, associate_trees (var0, con0,
7649 code, type));
7653 return NULL_TREE;
7655 case MINUS_EXPR:
7656 /* A - (-B) -> A + B */
7657 if (TREE_CODE (arg1) == NEGATE_EXPR)
7658 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7659 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7660 if (TREE_CODE (arg0) == NEGATE_EXPR
7661 && (FLOAT_TYPE_P (type)
7662 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7663 && negate_expr_p (arg1)
7664 && reorder_operands_p (arg0, arg1))
7665 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7666 TREE_OPERAND (arg0, 0));
7667 /* Convert -A - 1 to ~A. */
7668 if (INTEGRAL_TYPE_P (type)
7669 && TREE_CODE (arg0) == NEGATE_EXPR
7670 && integer_onep (arg1))
7671 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7673 /* Convert -1 - A to ~A. */
7674 if (INTEGRAL_TYPE_P (type)
7675 && integer_all_onesp (arg0))
7676 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7678 if (! FLOAT_TYPE_P (type))
7680 if (integer_zerop (arg0))
7681 return negate_expr (fold_convert (type, arg1));
7682 if (integer_zerop (arg1))
7683 return non_lvalue (fold_convert (type, arg0));
7685 /* Fold A - (A & B) into ~B & A. */
7686 if (!TREE_SIDE_EFFECTS (arg0)
7687 && TREE_CODE (arg1) == BIT_AND_EXPR)
7689 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7690 return fold_build2 (BIT_AND_EXPR, type,
7691 fold_build1 (BIT_NOT_EXPR, type,
7692 TREE_OPERAND (arg1, 0)),
7693 arg0);
7694 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7695 return fold_build2 (BIT_AND_EXPR, type,
7696 fold_build1 (BIT_NOT_EXPR, type,
7697 TREE_OPERAND (arg1, 1)),
7698 arg0);
7701 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7702 any power of 2 minus 1. */
7703 if (TREE_CODE (arg0) == BIT_AND_EXPR
7704 && TREE_CODE (arg1) == BIT_AND_EXPR
7705 && operand_equal_p (TREE_OPERAND (arg0, 0),
7706 TREE_OPERAND (arg1, 0), 0))
7708 tree mask0 = TREE_OPERAND (arg0, 1);
7709 tree mask1 = TREE_OPERAND (arg1, 1);
7710 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7712 if (operand_equal_p (tem, mask1, 0))
7714 tem = fold_build2 (BIT_XOR_EXPR, type,
7715 TREE_OPERAND (arg0, 0), mask1);
7716 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7721 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7722 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7723 return non_lvalue (fold_convert (type, arg0));
7725 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7726 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7727 (-ARG1 + ARG0) reduces to -ARG1. */
7728 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7729 return negate_expr (fold_convert (type, arg1));
7731 /* Fold &x - &x. This can happen from &x.foo - &x.
7732 This is unsafe for certain floats even in non-IEEE formats.
7733 In IEEE, it is unsafe because it does wrong for NaNs.
7734 Also note that operand_equal_p is always false if an operand
7735 is volatile. */
7737 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7738 && operand_equal_p (arg0, arg1, 0))
7739 return fold_convert (type, integer_zero_node);
7741 /* A - B -> A + (-B) if B is easily negatable. */
7742 if (negate_expr_p (arg1)
7743 && ((FLOAT_TYPE_P (type)
7744 /* Avoid this transformation if B is a positive REAL_CST. */
7745 && (TREE_CODE (arg1) != REAL_CST
7746 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7747 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7748 return fold_build2 (PLUS_EXPR, type,
7749 fold_convert (type, arg0),
7750 fold_convert (type, negate_expr (arg1)));
7752 /* Try folding difference of addresses. */
7754 HOST_WIDE_INT diff;
7756 if ((TREE_CODE (arg0) == ADDR_EXPR
7757 || TREE_CODE (arg1) == ADDR_EXPR)
7758 && ptr_difference_const (arg0, arg1, &diff))
7759 return build_int_cst_type (type, diff);
7762 /* Fold &a[i] - &a[j] to i-j. */
7763 if (TREE_CODE (arg0) == ADDR_EXPR
7764 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7765 && TREE_CODE (arg1) == ADDR_EXPR
7766 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7768 tree aref0 = TREE_OPERAND (arg0, 0);
7769 tree aref1 = TREE_OPERAND (arg1, 0);
7770 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7771 TREE_OPERAND (aref1, 0), 0))
7773 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7774 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7775 tree esz = array_ref_element_size (aref0);
7776 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7777 return fold_build2 (MULT_EXPR, type, diff,
7778 fold_convert (type, esz));
7783 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7784 of the array. Loop optimizer sometimes produce this type of
7785 expressions. */
7786 if (TREE_CODE (arg0) == ADDR_EXPR)
7788 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7789 if (tem)
7790 return fold_convert (type, tem);
7793 if (flag_unsafe_math_optimizations
7794 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7795 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7796 && (tem = distribute_real_division (code, type, arg0, arg1)))
7797 return tem;
7799 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
7800 same or one. */
7801 if ((TREE_CODE (arg0) == MULT_EXPR
7802 || TREE_CODE (arg1) == MULT_EXPR)
7803 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7805 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
7806 if (tem)
7807 return tem;
7810 goto associate;
7812 case MULT_EXPR:
7813 /* (-A) * (-B) -> A * B */
7814 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7815 return fold_build2 (MULT_EXPR, type,
7816 TREE_OPERAND (arg0, 0),
7817 negate_expr (arg1));
7818 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7819 return fold_build2 (MULT_EXPR, type,
7820 negate_expr (arg0),
7821 TREE_OPERAND (arg1, 0));
7823 if (! FLOAT_TYPE_P (type))
7825 if (integer_zerop (arg1))
7826 return omit_one_operand (type, arg1, arg0);
7827 if (integer_onep (arg1))
7828 return non_lvalue (fold_convert (type, arg0));
7829 /* Transform x * -1 into -x. */
7830 if (integer_all_onesp (arg1))
7831 return fold_convert (type, negate_expr (arg0));
7833 /* (a * (1 << b)) is (a << b) */
7834 if (TREE_CODE (arg1) == LSHIFT_EXPR
7835 && integer_onep (TREE_OPERAND (arg1, 0)))
7836 return fold_build2 (LSHIFT_EXPR, type, arg0,
7837 TREE_OPERAND (arg1, 1));
7838 if (TREE_CODE (arg0) == LSHIFT_EXPR
7839 && integer_onep (TREE_OPERAND (arg0, 0)))
7840 return fold_build2 (LSHIFT_EXPR, type, arg1,
7841 TREE_OPERAND (arg0, 1));
7843 if (TREE_CODE (arg1) == INTEGER_CST
7844 && 0 != (tem = extract_muldiv (op0,
7845 fold_convert (type, arg1),
7846 code, NULL_TREE)))
7847 return fold_convert (type, tem);
7850 else
7852 /* Maybe fold x * 0 to 0. The expressions aren't the same
7853 when x is NaN, since x * 0 is also NaN. Nor are they the
7854 same in modes with signed zeros, since multiplying a
7855 negative value by 0 gives -0, not +0. */
7856 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7857 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7858 && real_zerop (arg1))
7859 return omit_one_operand (type, arg1, arg0);
7860 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7861 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7862 && real_onep (arg1))
7863 return non_lvalue (fold_convert (type, arg0));
7865 /* Transform x * -1.0 into -x. */
7866 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7867 && real_minus_onep (arg1))
7868 return fold_convert (type, negate_expr (arg0));
7870 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7871 if (flag_unsafe_math_optimizations
7872 && TREE_CODE (arg0) == RDIV_EXPR
7873 && TREE_CODE (arg1) == REAL_CST
7874 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7876 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7877 arg1, 0);
7878 if (tem)
7879 return fold_build2 (RDIV_EXPR, type, tem,
7880 TREE_OPERAND (arg0, 1));
7883 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7884 if (operand_equal_p (arg0, arg1, 0))
7886 tree tem = fold_strip_sign_ops (arg0);
7887 if (tem != NULL_TREE)
7889 tem = fold_convert (type, tem);
7890 return fold_build2 (MULT_EXPR, type, tem, tem);
7894 if (flag_unsafe_math_optimizations)
7896 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7897 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7899 /* Optimizations of root(...)*root(...). */
7900 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7902 tree rootfn, arg, arglist;
7903 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7904 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7906 /* Optimize sqrt(x)*sqrt(x) as x. */
7907 if (BUILTIN_SQRT_P (fcode0)
7908 && operand_equal_p (arg00, arg10, 0)
7909 && ! HONOR_SNANS (TYPE_MODE (type)))
7910 return arg00;
7912 /* Optimize root(x)*root(y) as root(x*y). */
7913 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7914 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7915 arglist = build_tree_list (NULL_TREE, arg);
7916 return build_function_call_expr (rootfn, arglist);
7919 /* Optimize expN(x)*expN(y) as expN(x+y). */
7920 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7922 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7923 tree arg = fold_build2 (PLUS_EXPR, type,
7924 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7925 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7926 tree arglist = build_tree_list (NULL_TREE, arg);
7927 return build_function_call_expr (expfn, arglist);
7930 /* Optimizations of pow(...)*pow(...). */
7931 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7932 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7933 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7935 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7936 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7937 1)));
7938 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7939 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7940 1)));
7942 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7943 if (operand_equal_p (arg01, arg11, 0))
7945 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7946 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7947 tree arglist = tree_cons (NULL_TREE, arg,
7948 build_tree_list (NULL_TREE,
7949 arg01));
7950 return build_function_call_expr (powfn, arglist);
7953 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7954 if (operand_equal_p (arg00, arg10, 0))
7956 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7957 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7958 tree arglist = tree_cons (NULL_TREE, arg00,
7959 build_tree_list (NULL_TREE,
7960 arg));
7961 return build_function_call_expr (powfn, arglist);
7965 /* Optimize tan(x)*cos(x) as sin(x). */
7966 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7967 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7968 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7969 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7970 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7971 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7972 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7973 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7975 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7977 if (sinfn != NULL_TREE)
7978 return build_function_call_expr (sinfn,
7979 TREE_OPERAND (arg0, 1));
7982 /* Optimize x*pow(x,c) as pow(x,c+1). */
7983 if (fcode1 == BUILT_IN_POW
7984 || fcode1 == BUILT_IN_POWF
7985 || fcode1 == BUILT_IN_POWL)
7987 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7988 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7989 1)));
7990 if (TREE_CODE (arg11) == REAL_CST
7991 && ! TREE_CONSTANT_OVERFLOW (arg11)
7992 && operand_equal_p (arg0, arg10, 0))
7994 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7995 REAL_VALUE_TYPE c;
7996 tree arg, arglist;
7998 c = TREE_REAL_CST (arg11);
7999 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8000 arg = build_real (type, c);
8001 arglist = build_tree_list (NULL_TREE, arg);
8002 arglist = tree_cons (NULL_TREE, arg0, arglist);
8003 return build_function_call_expr (powfn, arglist);
8007 /* Optimize pow(x,c)*x as pow(x,c+1). */
8008 if (fcode0 == BUILT_IN_POW
8009 || fcode0 == BUILT_IN_POWF
8010 || fcode0 == BUILT_IN_POWL)
8012 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8013 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8014 1)));
8015 if (TREE_CODE (arg01) == REAL_CST
8016 && ! TREE_CONSTANT_OVERFLOW (arg01)
8017 && operand_equal_p (arg1, arg00, 0))
8019 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8020 REAL_VALUE_TYPE c;
8021 tree arg, arglist;
8023 c = TREE_REAL_CST (arg01);
8024 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8025 arg = build_real (type, c);
8026 arglist = build_tree_list (NULL_TREE, arg);
8027 arglist = tree_cons (NULL_TREE, arg1, arglist);
8028 return build_function_call_expr (powfn, arglist);
8032 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8033 if (! optimize_size
8034 && operand_equal_p (arg0, arg1, 0))
8036 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8038 if (powfn)
8040 tree arg = build_real (type, dconst2);
8041 tree arglist = build_tree_list (NULL_TREE, arg);
8042 arglist = tree_cons (NULL_TREE, arg0, arglist);
8043 return build_function_call_expr (powfn, arglist);
8048 goto associate;
8050 case BIT_IOR_EXPR:
8051 bit_ior:
8052 if (integer_all_onesp (arg1))
8053 return omit_one_operand (type, arg1, arg0);
8054 if (integer_zerop (arg1))
8055 return non_lvalue (fold_convert (type, arg0));
8056 if (operand_equal_p (arg0, arg1, 0))
8057 return non_lvalue (fold_convert (type, arg0));
8059 /* ~X | X is -1. */
8060 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8061 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8063 t1 = build_int_cst (type, -1);
8064 t1 = force_fit_type (t1, 0, false, false);
8065 return omit_one_operand (type, t1, arg1);
8068 /* X | ~X is -1. */
8069 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8070 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8072 t1 = build_int_cst (type, -1);
8073 t1 = force_fit_type (t1, 0, false, false);
8074 return omit_one_operand (type, t1, arg0);
8077 t1 = distribute_bit_expr (code, type, arg0, arg1);
8078 if (t1 != NULL_TREE)
8079 return t1;
8081 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8083 This results in more efficient code for machines without a NAND
8084 instruction. Combine will canonicalize to the first form
8085 which will allow use of NAND instructions provided by the
8086 backend if they exist. */
8087 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8088 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8090 return fold_build1 (BIT_NOT_EXPR, type,
8091 build2 (BIT_AND_EXPR, type,
8092 TREE_OPERAND (arg0, 0),
8093 TREE_OPERAND (arg1, 0)));
8096 /* See if this can be simplified into a rotate first. If that
8097 is unsuccessful continue in the association code. */
8098 goto bit_rotate;
8100 case BIT_XOR_EXPR:
8101 if (integer_zerop (arg1))
8102 return non_lvalue (fold_convert (type, arg0));
8103 if (integer_all_onesp (arg1))
8104 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8105 if (operand_equal_p (arg0, arg1, 0))
8106 return omit_one_operand (type, integer_zero_node, arg0);
8108 /* ~X ^ X is -1. */
8109 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8110 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8112 t1 = build_int_cst (type, -1);
8113 t1 = force_fit_type (t1, 0, false, false);
8114 return omit_one_operand (type, t1, arg1);
8117 /* X ^ ~X is -1. */
8118 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8119 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8121 t1 = build_int_cst (type, -1);
8122 t1 = force_fit_type (t1, 0, false, false);
8123 return omit_one_operand (type, t1, arg0);
8126 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8127 with a constant, and the two constants have no bits in common,
8128 we should treat this as a BIT_IOR_EXPR since this may produce more
8129 simplifications. */
8130 if (TREE_CODE (arg0) == BIT_AND_EXPR
8131 && TREE_CODE (arg1) == BIT_AND_EXPR
8132 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8133 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8134 && integer_zerop (const_binop (BIT_AND_EXPR,
8135 TREE_OPERAND (arg0, 1),
8136 TREE_OPERAND (arg1, 1), 0)))
8138 code = BIT_IOR_EXPR;
8139 goto bit_ior;
8142 /* (X | Y) ^ X -> Y & ~ X*/
8143 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8144 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8146 tree t2 = TREE_OPERAND (arg0, 1);
8147 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8148 arg1);
8149 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8150 fold_convert (type, t1));
8151 return t1;
8154 /* (Y | X) ^ X -> Y & ~ X*/
8155 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8156 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8158 tree t2 = TREE_OPERAND (arg0, 0);
8159 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8160 arg1);
8161 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8162 fold_convert (type, t1));
8163 return t1;
8166 /* X ^ (X | Y) -> Y & ~ X*/
8167 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8168 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8170 tree t2 = TREE_OPERAND (arg1, 1);
8171 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8172 arg0);
8173 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8174 fold_convert (type, t1));
8175 return t1;
8178 /* X ^ (Y | X) -> Y & ~ X*/
8179 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8180 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8182 tree t2 = TREE_OPERAND (arg1, 0);
8183 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8184 arg0);
8185 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8186 fold_convert (type, t1));
8187 return t1;
8190 /* Convert ~X ^ ~Y to X ^ Y. */
8191 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8192 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8193 return fold_build2 (code, type,
8194 fold_convert (type, TREE_OPERAND (arg0, 0)),
8195 fold_convert (type, TREE_OPERAND (arg1, 0)));
8197 /* See if this can be simplified into a rotate first. If that
8198 is unsuccessful continue in the association code. */
8199 goto bit_rotate;
8201 case BIT_AND_EXPR:
8202 if (integer_all_onesp (arg1))
8203 return non_lvalue (fold_convert (type, arg0));
8204 if (integer_zerop (arg1))
8205 return omit_one_operand (type, arg1, arg0);
8206 if (operand_equal_p (arg0, arg1, 0))
8207 return non_lvalue (fold_convert (type, arg0));
8209 /* ~X & X is always zero. */
8210 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8211 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8212 return omit_one_operand (type, integer_zero_node, arg1);
8214 /* X & ~X is always zero. */
8215 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8216 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8217 return omit_one_operand (type, integer_zero_node, arg0);
8219 t1 = distribute_bit_expr (code, type, arg0, arg1);
8220 if (t1 != NULL_TREE)
8221 return t1;
8222 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8223 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8224 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8226 unsigned int prec
8227 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8229 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8230 && (~TREE_INT_CST_LOW (arg1)
8231 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8232 return fold_convert (type, TREE_OPERAND (arg0, 0));
8235 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8237 This results in more efficient code for machines without a NOR
8238 instruction. Combine will canonicalize to the first form
8239 which will allow use of NOR instructions provided by the
8240 backend if they exist. */
8241 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8242 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8244 return fold_build1 (BIT_NOT_EXPR, type,
8245 build2 (BIT_IOR_EXPR, type,
8246 TREE_OPERAND (arg0, 0),
8247 TREE_OPERAND (arg1, 0)));
8250 goto associate;
8252 case RDIV_EXPR:
8253 /* Don't touch a floating-point divide by zero unless the mode
8254 of the constant can represent infinity. */
8255 if (TREE_CODE (arg1) == REAL_CST
8256 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8257 && real_zerop (arg1))
8258 return NULL_TREE;
8260 /* Optimize A / A to 1.0 if we don't care about
8261 NaNs or Infinities. */
8262 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8263 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
8264 && operand_equal_p (arg0, arg1, 0))
8266 tree r = build_real (TREE_TYPE (arg0), dconst1);
8268 return omit_two_operands (type, r, arg0, arg1);
8271 /* (-A) / (-B) -> A / B */
8272 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8273 return fold_build2 (RDIV_EXPR, type,
8274 TREE_OPERAND (arg0, 0),
8275 negate_expr (arg1));
8276 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8277 return fold_build2 (RDIV_EXPR, type,
8278 negate_expr (arg0),
8279 TREE_OPERAND (arg1, 0));
8281 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8282 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8283 && real_onep (arg1))
8284 return non_lvalue (fold_convert (type, arg0));
8286 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8287 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8288 && real_minus_onep (arg1))
8289 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8291 /* If ARG1 is a constant, we can convert this to a multiply by the
8292 reciprocal. This does not have the same rounding properties,
8293 so only do this if -funsafe-math-optimizations. We can actually
8294 always safely do it if ARG1 is a power of two, but it's hard to
8295 tell if it is or not in a portable manner. */
8296 if (TREE_CODE (arg1) == REAL_CST)
8298 if (flag_unsafe_math_optimizations
8299 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8300 arg1, 0)))
8301 return fold_build2 (MULT_EXPR, type, arg0, tem);
8302 /* Find the reciprocal if optimizing and the result is exact. */
8303 if (optimize)
8305 REAL_VALUE_TYPE r;
8306 r = TREE_REAL_CST (arg1);
8307 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8309 tem = build_real (type, r);
8310 return fold_build2 (MULT_EXPR, type,
8311 fold_convert (type, arg0), tem);
8315 /* Convert A/B/C to A/(B*C). */
8316 if (flag_unsafe_math_optimizations
8317 && TREE_CODE (arg0) == RDIV_EXPR)
8318 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8319 fold_build2 (MULT_EXPR, type,
8320 TREE_OPERAND (arg0, 1), arg1));
8322 /* Convert A/(B/C) to (A/B)*C. */
8323 if (flag_unsafe_math_optimizations
8324 && TREE_CODE (arg1) == RDIV_EXPR)
8325 return fold_build2 (MULT_EXPR, type,
8326 fold_build2 (RDIV_EXPR, type, arg0,
8327 TREE_OPERAND (arg1, 0)),
8328 TREE_OPERAND (arg1, 1));
8330 /* Convert C1/(X*C2) into (C1/C2)/X. */
8331 if (flag_unsafe_math_optimizations
8332 && TREE_CODE (arg1) == MULT_EXPR
8333 && TREE_CODE (arg0) == REAL_CST
8334 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8336 tree tem = const_binop (RDIV_EXPR, arg0,
8337 TREE_OPERAND (arg1, 1), 0);
8338 if (tem)
8339 return fold_build2 (RDIV_EXPR, type, tem,
8340 TREE_OPERAND (arg1, 0));
8343 if (flag_unsafe_math_optimizations)
8345 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8346 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8348 /* Optimize sin(x)/cos(x) as tan(x). */
8349 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8350 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8351 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8352 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8353 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8355 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8357 if (tanfn != NULL_TREE)
8358 return build_function_call_expr (tanfn,
8359 TREE_OPERAND (arg0, 1));
8362 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8363 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8364 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8365 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8366 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8367 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8369 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8371 if (tanfn != NULL_TREE)
8373 tree tmp = TREE_OPERAND (arg0, 1);
8374 tmp = build_function_call_expr (tanfn, tmp);
8375 return fold_build2 (RDIV_EXPR, type,
8376 build_real (type, dconst1), tmp);
8380 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
8381 NaNs or Infinities. */
8382 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
8383 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
8384 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
8386 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8387 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8389 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
8390 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
8391 && operand_equal_p (arg00, arg01, 0))
8393 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
8395 if (cosfn != NULL_TREE)
8396 return build_function_call_expr (cosfn,
8397 TREE_OPERAND (arg0, 1));
8401 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
8402 NaNs or Infinities. */
8403 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
8404 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
8405 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
8407 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8408 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8410 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
8411 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
8412 && operand_equal_p (arg00, arg01, 0))
8414 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
8416 if (cosfn != NULL_TREE)
8418 tree tmp = TREE_OPERAND (arg0, 1);
8419 tmp = build_function_call_expr (cosfn, tmp);
8420 return fold_build2 (RDIV_EXPR, type,
8421 build_real (type, dconst1),
8422 tmp);
8427 /* Optimize pow(x,c)/x as pow(x,c-1). */
8428 if (fcode0 == BUILT_IN_POW
8429 || fcode0 == BUILT_IN_POWF
8430 || fcode0 == BUILT_IN_POWL)
8432 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8433 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8434 if (TREE_CODE (arg01) == REAL_CST
8435 && ! TREE_CONSTANT_OVERFLOW (arg01)
8436 && operand_equal_p (arg1, arg00, 0))
8438 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8439 REAL_VALUE_TYPE c;
8440 tree arg, arglist;
8442 c = TREE_REAL_CST (arg01);
8443 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8444 arg = build_real (type, c);
8445 arglist = build_tree_list (NULL_TREE, arg);
8446 arglist = tree_cons (NULL_TREE, arg1, arglist);
8447 return build_function_call_expr (powfn, arglist);
8451 /* Optimize x/expN(y) into x*expN(-y). */
8452 if (BUILTIN_EXPONENT_P (fcode1))
8454 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8455 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8456 tree arglist = build_tree_list (NULL_TREE,
8457 fold_convert (type, arg));
8458 arg1 = build_function_call_expr (expfn, arglist);
8459 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8462 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8463 if (fcode1 == BUILT_IN_POW
8464 || fcode1 == BUILT_IN_POWF
8465 || fcode1 == BUILT_IN_POWL)
8467 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8468 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8469 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8470 tree neg11 = fold_convert (type, negate_expr (arg11));
8471 tree arglist = tree_cons(NULL_TREE, arg10,
8472 build_tree_list (NULL_TREE, neg11));
8473 arg1 = build_function_call_expr (powfn, arglist);
8474 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8477 return NULL_TREE;
8479 case TRUNC_DIV_EXPR:
8480 case ROUND_DIV_EXPR:
8481 case FLOOR_DIV_EXPR:
8482 case CEIL_DIV_EXPR:
8483 case EXACT_DIV_EXPR:
8484 if (integer_onep (arg1))
8485 return non_lvalue (fold_convert (type, arg0));
8486 if (integer_zerop (arg1))
8487 return NULL_TREE;
8488 /* X / -1 is -X. */
8489 if (!TYPE_UNSIGNED (type)
8490 && TREE_CODE (arg1) == INTEGER_CST
8491 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8492 && TREE_INT_CST_HIGH (arg1) == -1)
8493 return fold_convert (type, negate_expr (arg0));
8495 /* Convert -A / -B to A / B when the type is signed and overflow is
8496 undefined. */
8497 if (!TYPE_UNSIGNED (type) && !flag_wrapv
8498 && TREE_CODE (arg0) == NEGATE_EXPR
8499 && negate_expr_p (arg1))
8500 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8501 negate_expr (arg1));
8502 if (!TYPE_UNSIGNED (type) && !flag_wrapv
8503 && TREE_CODE (arg1) == NEGATE_EXPR
8504 && negate_expr_p (arg0))
8505 return fold_build2 (code, type, negate_expr (arg0),
8506 TREE_OPERAND (arg1, 0));
8508 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8509 operation, EXACT_DIV_EXPR.
8511 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8512 At one time others generated faster code, it's not clear if they do
8513 after the last round to changes to the DIV code in expmed.c. */
8514 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8515 && multiple_of_p (type, arg0, arg1))
8516 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8518 if (TREE_CODE (arg1) == INTEGER_CST
8519 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8520 return fold_convert (type, tem);
8522 return NULL_TREE;
8524 case CEIL_MOD_EXPR:
8525 case FLOOR_MOD_EXPR:
8526 case ROUND_MOD_EXPR:
8527 case TRUNC_MOD_EXPR:
8528 /* X % 1 is always zero, but be sure to preserve any side
8529 effects in X. */
8530 if (integer_onep (arg1))
8531 return omit_one_operand (type, integer_zero_node, arg0);
8533 /* X % 0, return X % 0 unchanged so that we can get the
8534 proper warnings and errors. */
8535 if (integer_zerop (arg1))
8536 return NULL_TREE;
8538 /* 0 % X is always zero, but be sure to preserve any side
8539 effects in X. Place this after checking for X == 0. */
8540 if (integer_zerop (arg0))
8541 return omit_one_operand (type, integer_zero_node, arg1);
8543 /* X % -1 is zero. */
8544 if (!TYPE_UNSIGNED (type)
8545 && TREE_CODE (arg1) == INTEGER_CST
8546 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8547 && TREE_INT_CST_HIGH (arg1) == -1)
8548 return omit_one_operand (type, integer_zero_node, arg0);
8550 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8551 i.e. "X % C" into "X & C2", if X and C are positive. */
8552 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8553 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8554 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8556 unsigned HOST_WIDE_INT high, low;
8557 tree mask;
8558 int l;
8560 l = tree_log2 (arg1);
8561 if (l >= HOST_BITS_PER_WIDE_INT)
8563 high = ((unsigned HOST_WIDE_INT) 1
8564 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8565 low = -1;
8567 else
8569 high = 0;
8570 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8573 mask = build_int_cst_wide (type, low, high);
8574 return fold_build2 (BIT_AND_EXPR, type,
8575 fold_convert (type, arg0), mask);
8578 /* X % -C is the same as X % C. */
8579 if (code == TRUNC_MOD_EXPR
8580 && !TYPE_UNSIGNED (type)
8581 && TREE_CODE (arg1) == INTEGER_CST
8582 && !TREE_CONSTANT_OVERFLOW (arg1)
8583 && TREE_INT_CST_HIGH (arg1) < 0
8584 && !flag_trapv
8585 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8586 && !sign_bit_p (arg1, arg1))
8587 return fold_build2 (code, type, fold_convert (type, arg0),
8588 fold_convert (type, negate_expr (arg1)));
8590 /* X % -Y is the same as X % Y. */
8591 if (code == TRUNC_MOD_EXPR
8592 && !TYPE_UNSIGNED (type)
8593 && TREE_CODE (arg1) == NEGATE_EXPR
8594 && !flag_trapv)
8595 return fold_build2 (code, type, fold_convert (type, arg0),
8596 fold_convert (type, TREE_OPERAND (arg1, 0)));
8598 if (TREE_CODE (arg1) == INTEGER_CST
8599 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8600 return fold_convert (type, tem);
8602 return NULL_TREE;
8604 case LROTATE_EXPR:
8605 case RROTATE_EXPR:
8606 if (integer_all_onesp (arg0))
8607 return omit_one_operand (type, arg0, arg1);
8608 goto shift;
8610 case RSHIFT_EXPR:
8611 /* Optimize -1 >> x for arithmetic right shifts. */
8612 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8613 return omit_one_operand (type, arg0, arg1);
8614 /* ... fall through ... */
8616 case LSHIFT_EXPR:
8617 shift:
8618 if (integer_zerop (arg1))
8619 return non_lvalue (fold_convert (type, arg0));
8620 if (integer_zerop (arg0))
8621 return omit_one_operand (type, arg0, arg1);
8623 /* Since negative shift count is not well-defined,
8624 don't try to compute it in the compiler. */
8625 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8626 return NULL_TREE;
8628 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8629 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
8630 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8631 && host_integerp (TREE_OPERAND (arg0, 1), false)
8632 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8634 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8635 + TREE_INT_CST_LOW (arg1));
8637 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8638 being well defined. */
8639 if (low >= TYPE_PRECISION (type))
8641 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8642 low = low % TYPE_PRECISION (type);
8643 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8644 return build_int_cst (type, 0);
8645 else
8646 low = TYPE_PRECISION (type) - 1;
8649 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8650 build_int_cst (type, low));
8653 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8654 into x & ((unsigned)-1 >> c) for unsigned types. */
8655 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8656 || (TYPE_UNSIGNED (type)
8657 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8658 && host_integerp (arg1, false)
8659 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8660 && host_integerp (TREE_OPERAND (arg0, 1), false)
8661 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8663 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8664 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8665 tree lshift;
8666 tree arg00;
8668 if (low0 == low1)
8670 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8672 lshift = build_int_cst (type, -1);
8673 lshift = int_const_binop (code, lshift, arg1, 0);
8675 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8679 /* Rewrite an LROTATE_EXPR by a constant into an
8680 RROTATE_EXPR by a new constant. */
8681 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8683 tree tem = build_int_cst (NULL_TREE,
8684 GET_MODE_BITSIZE (TYPE_MODE (type)));
8685 tem = fold_convert (TREE_TYPE (arg1), tem);
8686 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8687 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8690 /* If we have a rotate of a bit operation with the rotate count and
8691 the second operand of the bit operation both constant,
8692 permute the two operations. */
8693 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8694 && (TREE_CODE (arg0) == BIT_AND_EXPR
8695 || TREE_CODE (arg0) == BIT_IOR_EXPR
8696 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8697 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8698 return fold_build2 (TREE_CODE (arg0), type,
8699 fold_build2 (code, type,
8700 TREE_OPERAND (arg0, 0), arg1),
8701 fold_build2 (code, type,
8702 TREE_OPERAND (arg0, 1), arg1));
8704 /* Two consecutive rotates adding up to the width of the mode can
8705 be ignored. */
8706 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8707 && TREE_CODE (arg0) == RROTATE_EXPR
8708 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8709 && TREE_INT_CST_HIGH (arg1) == 0
8710 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8711 && ((TREE_INT_CST_LOW (arg1)
8712 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8713 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8714 return TREE_OPERAND (arg0, 0);
8716 return NULL_TREE;
8718 case MIN_EXPR:
8719 if (operand_equal_p (arg0, arg1, 0))
8720 return omit_one_operand (type, arg0, arg1);
8721 if (INTEGRAL_TYPE_P (type)
8722 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8723 return omit_one_operand (type, arg1, arg0);
8724 goto associate;
8726 case MAX_EXPR:
8727 if (operand_equal_p (arg0, arg1, 0))
8728 return omit_one_operand (type, arg0, arg1);
8729 if (INTEGRAL_TYPE_P (type)
8730 && TYPE_MAX_VALUE (type)
8731 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8732 return omit_one_operand (type, arg1, arg0);
8733 goto associate;
8735 case TRUTH_ANDIF_EXPR:
8736 /* Note that the operands of this must be ints
8737 and their values must be 0 or 1.
8738 ("true" is a fixed value perhaps depending on the language.) */
8739 /* If first arg is constant zero, return it. */
8740 if (integer_zerop (arg0))
8741 return fold_convert (type, arg0);
8742 case TRUTH_AND_EXPR:
8743 /* If either arg is constant true, drop it. */
8744 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8745 return non_lvalue (fold_convert (type, arg1));
8746 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8747 /* Preserve sequence points. */
8748 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8749 return non_lvalue (fold_convert (type, arg0));
8750 /* If second arg is constant zero, result is zero, but first arg
8751 must be evaluated. */
8752 if (integer_zerop (arg1))
8753 return omit_one_operand (type, arg1, arg0);
8754 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8755 case will be handled here. */
8756 if (integer_zerop (arg0))
8757 return omit_one_operand (type, arg0, arg1);
8759 /* !X && X is always false. */
8760 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8761 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8762 return omit_one_operand (type, integer_zero_node, arg1);
8763 /* X && !X is always false. */
8764 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8765 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8766 return omit_one_operand (type, integer_zero_node, arg0);
8768 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8769 means A >= Y && A != MAX, but in this case we know that
8770 A < X <= MAX. */
8772 if (!TREE_SIDE_EFFECTS (arg0)
8773 && !TREE_SIDE_EFFECTS (arg1))
8775 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8776 if (tem && !operand_equal_p (tem, arg0, 0))
8777 return fold_build2 (code, type, tem, arg1);
8779 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8780 if (tem && !operand_equal_p (tem, arg1, 0))
8781 return fold_build2 (code, type, arg0, tem);
8784 truth_andor:
8785 /* We only do these simplifications if we are optimizing. */
8786 if (!optimize)
8787 return NULL_TREE;
8789 /* Check for things like (A || B) && (A || C). We can convert this
8790 to A || (B && C). Note that either operator can be any of the four
8791 truth and/or operations and the transformation will still be
8792 valid. Also note that we only care about order for the
8793 ANDIF and ORIF operators. If B contains side effects, this
8794 might change the truth-value of A. */
8795 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8796 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8797 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8798 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8799 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8800 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8802 tree a00 = TREE_OPERAND (arg0, 0);
8803 tree a01 = TREE_OPERAND (arg0, 1);
8804 tree a10 = TREE_OPERAND (arg1, 0);
8805 tree a11 = TREE_OPERAND (arg1, 1);
8806 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8807 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8808 && (code == TRUTH_AND_EXPR
8809 || code == TRUTH_OR_EXPR));
8811 if (operand_equal_p (a00, a10, 0))
8812 return fold_build2 (TREE_CODE (arg0), type, a00,
8813 fold_build2 (code, type, a01, a11));
8814 else if (commutative && operand_equal_p (a00, a11, 0))
8815 return fold_build2 (TREE_CODE (arg0), type, a00,
8816 fold_build2 (code, type, a01, a10));
8817 else if (commutative && operand_equal_p (a01, a10, 0))
8818 return fold_build2 (TREE_CODE (arg0), type, a01,
8819 fold_build2 (code, type, a00, a11));
8821 /* This case if tricky because we must either have commutative
8822 operators or else A10 must not have side-effects. */
8824 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8825 && operand_equal_p (a01, a11, 0))
8826 return fold_build2 (TREE_CODE (arg0), type,
8827 fold_build2 (code, type, a00, a10),
8828 a01);
8831 /* See if we can build a range comparison. */
8832 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8833 return tem;
8835 /* Check for the possibility of merging component references. If our
8836 lhs is another similar operation, try to merge its rhs with our
8837 rhs. Then try to merge our lhs and rhs. */
8838 if (TREE_CODE (arg0) == code
8839 && 0 != (tem = fold_truthop (code, type,
8840 TREE_OPERAND (arg0, 1), arg1)))
8841 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8843 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8844 return tem;
8846 return NULL_TREE;
8848 case TRUTH_ORIF_EXPR:
8849 /* Note that the operands of this must be ints
8850 and their values must be 0 or true.
8851 ("true" is a fixed value perhaps depending on the language.) */
8852 /* If first arg is constant true, return it. */
8853 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8854 return fold_convert (type, arg0);
8855 case TRUTH_OR_EXPR:
8856 /* If either arg is constant zero, drop it. */
8857 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8858 return non_lvalue (fold_convert (type, arg1));
8859 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8860 /* Preserve sequence points. */
8861 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8862 return non_lvalue (fold_convert (type, arg0));
8863 /* If second arg is constant true, result is true, but we must
8864 evaluate first arg. */
8865 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8866 return omit_one_operand (type, arg1, arg0);
8867 /* Likewise for first arg, but note this only occurs here for
8868 TRUTH_OR_EXPR. */
8869 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8870 return omit_one_operand (type, arg0, arg1);
8872 /* !X || X is always true. */
8873 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8874 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8875 return omit_one_operand (type, integer_one_node, arg1);
8876 /* X || !X is always true. */
8877 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8878 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8879 return omit_one_operand (type, integer_one_node, arg0);
8881 goto truth_andor;
8883 case TRUTH_XOR_EXPR:
8884 /* If the second arg is constant zero, drop it. */
8885 if (integer_zerop (arg1))
8886 return non_lvalue (fold_convert (type, arg0));
8887 /* If the second arg is constant true, this is a logical inversion. */
8888 if (integer_onep (arg1))
8890 /* Only call invert_truthvalue if operand is a truth value. */
8891 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8892 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8893 else
8894 tem = invert_truthvalue (arg0);
8895 return non_lvalue (fold_convert (type, tem));
8897 /* Identical arguments cancel to zero. */
8898 if (operand_equal_p (arg0, arg1, 0))
8899 return omit_one_operand (type, integer_zero_node, arg0);
8901 /* !X ^ X is always true. */
8902 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8903 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8904 return omit_one_operand (type, integer_one_node, arg1);
8906 /* X ^ !X is always true. */
8907 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8908 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8909 return omit_one_operand (type, integer_one_node, arg0);
8911 return NULL_TREE;
8913 case EQ_EXPR:
8914 case NE_EXPR:
8915 case LT_EXPR:
8916 case GT_EXPR:
8917 case LE_EXPR:
8918 case GE_EXPR:
8919 /* If one arg is a real or integer constant, put it last. */
8920 if (tree_swap_operands_p (arg0, arg1, true))
8921 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8923 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
8924 if (TREE_CODE (arg0) == BIT_NOT_EXPR && TREE_CODE (arg1) == INTEGER_CST
8925 && (code == NE_EXPR || code == EQ_EXPR))
8926 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8927 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8928 arg1));
8930 /* bool_var != 0 becomes bool_var. */
8931 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8932 && code == NE_EXPR)
8933 return non_lvalue (fold_convert (type, arg0));
8935 /* bool_var == 1 becomes bool_var. */
8936 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8937 && code == EQ_EXPR)
8938 return non_lvalue (fold_convert (type, arg0));
8940 /* bool_var != 1 becomes !bool_var. */
8941 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8942 && code == NE_EXPR)
8943 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
8945 /* bool_var == 0 becomes !bool_var. */
8946 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8947 && code == EQ_EXPR)
8948 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
8950 /* If this is an equality comparison of the address of a non-weak
8951 object against zero, then we know the result. */
8952 if ((code == EQ_EXPR || code == NE_EXPR)
8953 && TREE_CODE (arg0) == ADDR_EXPR
8954 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8955 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8956 && integer_zerop (arg1))
8957 return constant_boolean_node (code != EQ_EXPR, type);
8959 /* If this is an equality comparison of the address of two non-weak,
8960 unaliased symbols neither of which are extern (since we do not
8961 have access to attributes for externs), then we know the result. */
8962 if ((code == EQ_EXPR || code == NE_EXPR)
8963 && TREE_CODE (arg0) == ADDR_EXPR
8964 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8965 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8966 && ! lookup_attribute ("alias",
8967 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8968 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8969 && TREE_CODE (arg1) == ADDR_EXPR
8970 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
8971 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8972 && ! lookup_attribute ("alias",
8973 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8974 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8976 /* We know that we're looking at the address of two
8977 non-weak, unaliased, static _DECL nodes.
8979 It is both wasteful and incorrect to call operand_equal_p
8980 to compare the two ADDR_EXPR nodes. It is wasteful in that
8981 all we need to do is test pointer equality for the arguments
8982 to the two ADDR_EXPR nodes. It is incorrect to use
8983 operand_equal_p as that function is NOT equivalent to a
8984 C equality test. It can in fact return false for two
8985 objects which would test as equal using the C equality
8986 operator. */
8987 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
8988 return constant_boolean_node (equal
8989 ? code == EQ_EXPR : code != EQ_EXPR,
8990 type);
8993 /* If this is a comparison of two exprs that look like an
8994 ARRAY_REF of the same object, then we can fold this to a
8995 comparison of the two offsets. */
8996 if (TREE_CODE_CLASS (code) == tcc_comparison)
8998 tree base0, offset0, base1, offset1;
9000 if (extract_array_ref (arg0, &base0, &offset0)
9001 && extract_array_ref (arg1, &base1, &offset1)
9002 && operand_equal_p (base0, base1, 0))
9004 /* Handle no offsets on both sides specially. */
9005 if (offset0 == NULL_TREE
9006 && offset1 == NULL_TREE)
9007 return fold_build2 (code, type, integer_zero_node,
9008 integer_zero_node);
9010 if (!offset0 || !offset1
9011 || TREE_TYPE (offset0) == TREE_TYPE (offset1))
9013 if (offset0 == NULL_TREE)
9014 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
9015 if (offset1 == NULL_TREE)
9016 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
9017 return fold_build2 (code, type, offset0, offset1);
9022 /* Transform comparisons of the form X +- C CMP X. */
9023 if ((code != EQ_EXPR && code != NE_EXPR)
9024 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9025 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9026 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9027 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
9028 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9029 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9030 && !(flag_wrapv || flag_trapv))))
9032 tree arg01 = TREE_OPERAND (arg0, 1);
9033 enum tree_code code0 = TREE_CODE (arg0);
9034 int is_positive;
9036 if (TREE_CODE (arg01) == REAL_CST)
9037 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
9038 else
9039 is_positive = tree_int_cst_sgn (arg01);
9041 /* (X - c) > X becomes false. */
9042 if (code == GT_EXPR
9043 && ((code0 == MINUS_EXPR && is_positive >= 0)
9044 || (code0 == PLUS_EXPR && is_positive <= 0)))
9045 return constant_boolean_node (0, type);
9047 /* Likewise (X + c) < X becomes false. */
9048 if (code == LT_EXPR
9049 && ((code0 == PLUS_EXPR && is_positive >= 0)
9050 || (code0 == MINUS_EXPR && is_positive <= 0)))
9051 return constant_boolean_node (0, type);
9053 /* Convert (X - c) <= X to true. */
9054 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9055 && code == LE_EXPR
9056 && ((code0 == MINUS_EXPR && is_positive >= 0)
9057 || (code0 == PLUS_EXPR && is_positive <= 0)))
9058 return constant_boolean_node (1, type);
9060 /* Convert (X + c) >= X to true. */
9061 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9062 && code == GE_EXPR
9063 && ((code0 == PLUS_EXPR && is_positive >= 0)
9064 || (code0 == MINUS_EXPR && is_positive <= 0)))
9065 return constant_boolean_node (1, type);
9067 if (TREE_CODE (arg01) == INTEGER_CST)
9069 /* Convert X + c > X and X - c < X to true for integers. */
9070 if (code == GT_EXPR
9071 && ((code0 == PLUS_EXPR && is_positive > 0)
9072 || (code0 == MINUS_EXPR && is_positive < 0)))
9073 return constant_boolean_node (1, type);
9075 if (code == LT_EXPR
9076 && ((code0 == MINUS_EXPR && is_positive > 0)
9077 || (code0 == PLUS_EXPR && is_positive < 0)))
9078 return constant_boolean_node (1, type);
9080 /* Convert X + c <= X and X - c >= X to false for integers. */
9081 if (code == LE_EXPR
9082 && ((code0 == PLUS_EXPR && is_positive > 0)
9083 || (code0 == MINUS_EXPR && is_positive < 0)))
9084 return constant_boolean_node (0, type);
9086 if (code == GE_EXPR
9087 && ((code0 == MINUS_EXPR && is_positive > 0)
9088 || (code0 == PLUS_EXPR && is_positive < 0)))
9089 return constant_boolean_node (0, type);
9093 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9094 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9095 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9096 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9097 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9098 && !(flag_wrapv || flag_trapv))
9099 && (TREE_CODE (arg1) == INTEGER_CST
9100 && !TREE_OVERFLOW (arg1)))
9102 tree const1 = TREE_OPERAND (arg0, 1);
9103 tree const2 = arg1;
9104 tree variable = TREE_OPERAND (arg0, 0);
9105 tree lhs;
9106 int lhs_add;
9107 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9109 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
9110 TREE_TYPE (arg1), const2, const1);
9111 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9112 && (TREE_CODE (lhs) != INTEGER_CST
9113 || !TREE_OVERFLOW (lhs)))
9114 return fold_build2 (code, type, variable, lhs);
9117 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9119 tree targ0 = strip_float_extensions (arg0);
9120 tree targ1 = strip_float_extensions (arg1);
9121 tree newtype = TREE_TYPE (targ0);
9123 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9124 newtype = TREE_TYPE (targ1);
9126 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9127 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9128 return fold_build2 (code, type, fold_convert (newtype, targ0),
9129 fold_convert (newtype, targ1));
9131 /* (-a) CMP (-b) -> b CMP a */
9132 if (TREE_CODE (arg0) == NEGATE_EXPR
9133 && TREE_CODE (arg1) == NEGATE_EXPR)
9134 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9135 TREE_OPERAND (arg0, 0));
9137 if (TREE_CODE (arg1) == REAL_CST)
9139 REAL_VALUE_TYPE cst;
9140 cst = TREE_REAL_CST (arg1);
9142 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9143 if (TREE_CODE (arg0) == NEGATE_EXPR)
9144 return
9145 fold_build2 (swap_tree_comparison (code), type,
9146 TREE_OPERAND (arg0, 0),
9147 build_real (TREE_TYPE (arg1),
9148 REAL_VALUE_NEGATE (cst)));
9150 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9151 /* a CMP (-0) -> a CMP 0 */
9152 if (REAL_VALUE_MINUS_ZERO (cst))
9153 return fold_build2 (code, type, arg0,
9154 build_real (TREE_TYPE (arg1), dconst0));
9156 /* x != NaN is always true, other ops are always false. */
9157 if (REAL_VALUE_ISNAN (cst)
9158 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9160 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9161 return omit_one_operand (type, tem, arg0);
9164 /* Fold comparisons against infinity. */
9165 if (REAL_VALUE_ISINF (cst))
9167 tem = fold_inf_compare (code, type, arg0, arg1);
9168 if (tem != NULL_TREE)
9169 return tem;
9173 /* If this is a comparison of a real constant with a PLUS_EXPR
9174 or a MINUS_EXPR of a real constant, we can convert it into a
9175 comparison with a revised real constant as long as no overflow
9176 occurs when unsafe_math_optimizations are enabled. */
9177 if (flag_unsafe_math_optimizations
9178 && TREE_CODE (arg1) == REAL_CST
9179 && (TREE_CODE (arg0) == PLUS_EXPR
9180 || TREE_CODE (arg0) == MINUS_EXPR)
9181 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9182 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9183 ? MINUS_EXPR : PLUS_EXPR,
9184 arg1, TREE_OPERAND (arg0, 1), 0))
9185 && ! TREE_CONSTANT_OVERFLOW (tem))
9186 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9188 /* Likewise, we can simplify a comparison of a real constant with
9189 a MINUS_EXPR whose first operand is also a real constant, i.e.
9190 (c1 - x) < c2 becomes x > c1-c2. */
9191 if (flag_unsafe_math_optimizations
9192 && TREE_CODE (arg1) == REAL_CST
9193 && TREE_CODE (arg0) == MINUS_EXPR
9194 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9195 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9196 arg1, 0))
9197 && ! TREE_CONSTANT_OVERFLOW (tem))
9198 return fold_build2 (swap_tree_comparison (code), type,
9199 TREE_OPERAND (arg0, 1), tem);
9201 /* Fold comparisons against built-in math functions. */
9202 if (TREE_CODE (arg1) == REAL_CST
9203 && flag_unsafe_math_optimizations
9204 && ! flag_errno_math)
9206 enum built_in_function fcode = builtin_mathfn_code (arg0);
9208 if (fcode != END_BUILTINS)
9210 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9211 if (tem != NULL_TREE)
9212 return tem;
9217 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9218 if (TREE_CONSTANT (arg1)
9219 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9220 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9221 /* This optimization is invalid for ordered comparisons
9222 if CONST+INCR overflows or if foo+incr might overflow.
9223 This optimization is invalid for floating point due to rounding.
9224 For pointer types we assume overflow doesn't happen. */
9225 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9226 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9227 && (code == EQ_EXPR || code == NE_EXPR))))
9229 tree varop, newconst;
9231 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9233 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9234 arg1, TREE_OPERAND (arg0, 1));
9235 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9236 TREE_OPERAND (arg0, 0),
9237 TREE_OPERAND (arg0, 1));
9239 else
9241 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9242 arg1, TREE_OPERAND (arg0, 1));
9243 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9244 TREE_OPERAND (arg0, 0),
9245 TREE_OPERAND (arg0, 1));
9249 /* If VAROP is a reference to a bitfield, we must mask
9250 the constant by the width of the field. */
9251 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9252 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9253 && host_integerp (DECL_SIZE (TREE_OPERAND
9254 (TREE_OPERAND (varop, 0), 1)), 1))
9256 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9257 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9258 tree folded_compare, shift;
9260 /* First check whether the comparison would come out
9261 always the same. If we don't do that we would
9262 change the meaning with the masking. */
9263 folded_compare = fold_build2 (code, type,
9264 TREE_OPERAND (varop, 0), arg1);
9265 if (integer_zerop (folded_compare)
9266 || integer_onep (folded_compare))
9267 return omit_one_operand (type, folded_compare, varop);
9269 shift = build_int_cst (NULL_TREE,
9270 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9271 shift = fold_convert (TREE_TYPE (varop), shift);
9272 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9273 newconst, shift);
9274 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9275 newconst, shift);
9278 return fold_build2 (code, type, varop, newconst);
9281 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9282 This transformation affects the cases which are handled in later
9283 optimizations involving comparisons with non-negative constants. */
9284 if (TREE_CODE (arg1) == INTEGER_CST
9285 && TREE_CODE (arg0) != INTEGER_CST
9286 && tree_int_cst_sgn (arg1) > 0)
9288 switch (code)
9290 case GE_EXPR:
9291 arg1 = const_binop (MINUS_EXPR, arg1,
9292 build_int_cst (TREE_TYPE (arg1), 1), 0);
9293 return fold_build2 (GT_EXPR, type, arg0,
9294 fold_convert (TREE_TYPE (arg0), arg1));
9296 case LT_EXPR:
9297 arg1 = const_binop (MINUS_EXPR, arg1,
9298 build_int_cst (TREE_TYPE (arg1), 1), 0);
9299 return fold_build2 (LE_EXPR, type, arg0,
9300 fold_convert (TREE_TYPE (arg0), arg1));
9302 default:
9303 break;
9307 /* Comparisons with the highest or lowest possible integer of
9308 the specified size will have known values. */
9310 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9312 if (TREE_CODE (arg1) == INTEGER_CST
9313 && ! TREE_CONSTANT_OVERFLOW (arg1)
9314 && width <= 2 * HOST_BITS_PER_WIDE_INT
9315 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9316 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9318 HOST_WIDE_INT signed_max_hi;
9319 unsigned HOST_WIDE_INT signed_max_lo;
9320 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9322 if (width <= HOST_BITS_PER_WIDE_INT)
9324 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9325 - 1;
9326 signed_max_hi = 0;
9327 max_hi = 0;
9329 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9331 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9332 min_lo = 0;
9333 min_hi = 0;
9335 else
9337 max_lo = signed_max_lo;
9338 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9339 min_hi = -1;
9342 else
9344 width -= HOST_BITS_PER_WIDE_INT;
9345 signed_max_lo = -1;
9346 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9347 - 1;
9348 max_lo = -1;
9349 min_lo = 0;
9351 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9353 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9354 min_hi = 0;
9356 else
9358 max_hi = signed_max_hi;
9359 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9363 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9364 && TREE_INT_CST_LOW (arg1) == max_lo)
9365 switch (code)
9367 case GT_EXPR:
9368 return omit_one_operand (type, integer_zero_node, arg0);
9370 case GE_EXPR:
9371 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9373 case LE_EXPR:
9374 return omit_one_operand (type, integer_one_node, arg0);
9376 case LT_EXPR:
9377 return fold_build2 (NE_EXPR, type, arg0, arg1);
9379 /* The GE_EXPR and LT_EXPR cases above are not normally
9380 reached because of previous transformations. */
9382 default:
9383 break;
9385 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9386 == max_hi
9387 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9388 switch (code)
9390 case GT_EXPR:
9391 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9392 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9393 case LE_EXPR:
9394 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9395 return fold_build2 (NE_EXPR, type, arg0, arg1);
9396 default:
9397 break;
9399 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9400 == min_hi
9401 && TREE_INT_CST_LOW (arg1) == min_lo)
9402 switch (code)
9404 case LT_EXPR:
9405 return omit_one_operand (type, integer_zero_node, arg0);
9407 case LE_EXPR:
9408 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9410 case GE_EXPR:
9411 return omit_one_operand (type, integer_one_node, arg0);
9413 case GT_EXPR:
9414 return fold_build2 (NE_EXPR, type, op0, op1);
9416 default:
9417 break;
9419 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9420 == min_hi
9421 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9422 switch (code)
9424 case GE_EXPR:
9425 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9426 return fold_build2 (NE_EXPR, type, arg0, arg1);
9427 case LT_EXPR:
9428 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9429 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9430 default:
9431 break;
9434 else if (!in_gimple_form
9435 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9436 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9437 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9438 /* signed_type does not work on pointer types. */
9439 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9441 /* The following case also applies to X < signed_max+1
9442 and X >= signed_max+1 because previous transformations. */
9443 if (code == LE_EXPR || code == GT_EXPR)
9445 tree st0, st1;
9446 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9447 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9448 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9449 type, fold_convert (st0, arg0),
9450 build_int_cst (st1, 0));
9456 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9457 a MINUS_EXPR of a constant, we can convert it into a comparison with
9458 a revised constant as long as no overflow occurs. */
9459 if ((code == EQ_EXPR || code == NE_EXPR)
9460 && TREE_CODE (arg1) == INTEGER_CST
9461 && (TREE_CODE (arg0) == PLUS_EXPR
9462 || TREE_CODE (arg0) == MINUS_EXPR)
9463 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9464 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9465 ? MINUS_EXPR : PLUS_EXPR,
9466 arg1, TREE_OPERAND (arg0, 1), 0))
9467 && ! TREE_CONSTANT_OVERFLOW (tem))
9468 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9470 /* Similarly for a NEGATE_EXPR. */
9471 else if ((code == EQ_EXPR || code == NE_EXPR)
9472 && TREE_CODE (arg0) == NEGATE_EXPR
9473 && TREE_CODE (arg1) == INTEGER_CST
9474 && 0 != (tem = negate_expr (arg1))
9475 && TREE_CODE (tem) == INTEGER_CST
9476 && ! TREE_CONSTANT_OVERFLOW (tem))
9477 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9479 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9480 for !=. Don't do this for ordered comparisons due to overflow. */
9481 else if ((code == NE_EXPR || code == EQ_EXPR)
9482 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9483 return fold_build2 (code, type,
9484 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9486 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9487 && (TREE_CODE (arg0) == NOP_EXPR
9488 || TREE_CODE (arg0) == CONVERT_EXPR))
9490 /* If we are widening one operand of an integer comparison,
9491 see if the other operand is similarly being widened. Perhaps we
9492 can do the comparison in the narrower type. */
9493 tem = fold_widened_comparison (code, type, arg0, arg1);
9494 if (tem)
9495 return tem;
9497 /* Or if we are changing signedness. */
9498 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9499 if (tem)
9500 return tem;
9503 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9504 constant, we can simplify it. */
9505 else if (TREE_CODE (arg1) == INTEGER_CST
9506 && (TREE_CODE (arg0) == MIN_EXPR
9507 || TREE_CODE (arg0) == MAX_EXPR)
9508 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9510 tem = optimize_minmax_comparison (code, type, op0, op1);
9511 if (tem)
9512 return tem;
9514 return NULL_TREE;
9517 /* If we are comparing an ABS_EXPR with a constant, we can
9518 convert all the cases into explicit comparisons, but they may
9519 well not be faster than doing the ABS and one comparison.
9520 But ABS (X) <= C is a range comparison, which becomes a subtraction
9521 and a comparison, and is probably faster. */
9522 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9523 && TREE_CODE (arg0) == ABS_EXPR
9524 && ! TREE_SIDE_EFFECTS (arg0)
9525 && (0 != (tem = negate_expr (arg1)))
9526 && TREE_CODE (tem) == INTEGER_CST
9527 && ! TREE_CONSTANT_OVERFLOW (tem))
9528 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9529 build2 (GE_EXPR, type,
9530 TREE_OPERAND (arg0, 0), tem),
9531 build2 (LE_EXPR, type,
9532 TREE_OPERAND (arg0, 0), arg1));
9534 /* Convert ABS_EXPR<x> >= 0 to true. */
9535 else if (code == GE_EXPR
9536 && tree_expr_nonnegative_p (arg0)
9537 && (integer_zerop (arg1)
9538 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9539 && real_zerop (arg1))))
9540 return omit_one_operand (type, integer_one_node, arg0);
9542 /* Convert ABS_EXPR<x> < 0 to false. */
9543 else if (code == LT_EXPR
9544 && tree_expr_nonnegative_p (arg0)
9545 && (integer_zerop (arg1) || real_zerop (arg1)))
9546 return omit_one_operand (type, integer_zero_node, arg0);
9548 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9549 else if ((code == EQ_EXPR || code == NE_EXPR)
9550 && TREE_CODE (arg0) == ABS_EXPR
9551 && (integer_zerop (arg1) || real_zerop (arg1)))
9552 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9554 /* If this is an EQ or NE comparison with zero and ARG0 is
9555 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9556 two operations, but the latter can be done in one less insn
9557 on machines that have only two-operand insns or on which a
9558 constant cannot be the first operand. */
9559 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9560 && TREE_CODE (arg0) == BIT_AND_EXPR)
9562 tree arg00 = TREE_OPERAND (arg0, 0);
9563 tree arg01 = TREE_OPERAND (arg0, 1);
9564 if (TREE_CODE (arg00) == LSHIFT_EXPR
9565 && integer_onep (TREE_OPERAND (arg00, 0)))
9566 return
9567 fold_build2 (code, type,
9568 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9569 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9570 arg01, TREE_OPERAND (arg00, 1)),
9571 fold_convert (TREE_TYPE (arg0),
9572 integer_one_node)),
9573 arg1);
9574 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9575 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9576 return
9577 fold_build2 (code, type,
9578 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9579 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9580 arg00, TREE_OPERAND (arg01, 1)),
9581 fold_convert (TREE_TYPE (arg0),
9582 integer_one_node)),
9583 arg1);
9586 /* If this is an NE or EQ comparison of zero against the result of a
9587 signed MOD operation whose second operand is a power of 2, make
9588 the MOD operation unsigned since it is simpler and equivalent. */
9589 if ((code == NE_EXPR || code == EQ_EXPR)
9590 && integer_zerop (arg1)
9591 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9592 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9593 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9594 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9595 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9596 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9598 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9599 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9600 fold_convert (newtype,
9601 TREE_OPERAND (arg0, 0)),
9602 fold_convert (newtype,
9603 TREE_OPERAND (arg0, 1)));
9605 return fold_build2 (code, type, newmod,
9606 fold_convert (newtype, arg1));
9609 /* If this is an NE comparison of zero with an AND of one, remove the
9610 comparison since the AND will give the correct value. */
9611 if (code == NE_EXPR && integer_zerop (arg1)
9612 && TREE_CODE (arg0) == BIT_AND_EXPR
9613 && integer_onep (TREE_OPERAND (arg0, 1)))
9614 return fold_convert (type, arg0);
9616 /* If we have (A & C) == C where C is a power of 2, convert this into
9617 (A & C) != 0. Similarly for NE_EXPR. */
9618 if ((code == EQ_EXPR || code == NE_EXPR)
9619 && TREE_CODE (arg0) == BIT_AND_EXPR
9620 && integer_pow2p (TREE_OPERAND (arg0, 1))
9621 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9622 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9623 arg0, fold_convert (TREE_TYPE (arg0),
9624 integer_zero_node));
9626 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9627 bit, then fold the expression into A < 0 or A >= 0. */
9628 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9629 if (tem)
9630 return tem;
9632 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9633 Similarly for NE_EXPR. */
9634 if ((code == EQ_EXPR || code == NE_EXPR)
9635 && TREE_CODE (arg0) == BIT_AND_EXPR
9636 && TREE_CODE (arg1) == INTEGER_CST
9637 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9639 tree notc = fold_build1 (BIT_NOT_EXPR,
9640 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9641 TREE_OPERAND (arg0, 1));
9642 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9643 arg1, notc);
9644 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9645 if (integer_nonzerop (dandnotc))
9646 return omit_one_operand (type, rslt, arg0);
9649 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9650 Similarly for NE_EXPR. */
9651 if ((code == EQ_EXPR || code == NE_EXPR)
9652 && TREE_CODE (arg0) == BIT_IOR_EXPR
9653 && TREE_CODE (arg1) == INTEGER_CST
9654 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9656 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9657 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9658 TREE_OPERAND (arg0, 1), notd);
9659 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9660 if (integer_nonzerop (candnotd))
9661 return omit_one_operand (type, rslt, arg0);
9664 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9665 and similarly for >= into !=. */
9666 if ((code == LT_EXPR || code == GE_EXPR)
9667 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9668 && TREE_CODE (arg1) == LSHIFT_EXPR
9669 && integer_onep (TREE_OPERAND (arg1, 0)))
9670 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9671 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9672 TREE_OPERAND (arg1, 1)),
9673 build_int_cst (TREE_TYPE (arg0), 0));
9675 else if ((code == LT_EXPR || code == GE_EXPR)
9676 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9677 && (TREE_CODE (arg1) == NOP_EXPR
9678 || TREE_CODE (arg1) == CONVERT_EXPR)
9679 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9680 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9681 return
9682 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9683 fold_convert (TREE_TYPE (arg0),
9684 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9685 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9686 1))),
9687 build_int_cst (TREE_TYPE (arg0), 0));
9689 /* Simplify comparison of something with itself. (For IEEE
9690 floating-point, we can only do some of these simplifications.) */
9691 if (operand_equal_p (arg0, arg1, 0))
9693 switch (code)
9695 case EQ_EXPR:
9696 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9697 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9698 return constant_boolean_node (1, type);
9699 break;
9701 case GE_EXPR:
9702 case LE_EXPR:
9703 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9704 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9705 return constant_boolean_node (1, type);
9706 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9708 case NE_EXPR:
9709 /* For NE, we can only do this simplification if integer
9710 or we don't honor IEEE floating point NaNs. */
9711 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9712 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9713 break;
9714 /* ... fall through ... */
9715 case GT_EXPR:
9716 case LT_EXPR:
9717 return constant_boolean_node (0, type);
9718 default:
9719 gcc_unreachable ();
9723 /* If we are comparing an expression that just has comparisons
9724 of two integer values, arithmetic expressions of those comparisons,
9725 and constants, we can simplify it. There are only three cases
9726 to check: the two values can either be equal, the first can be
9727 greater, or the second can be greater. Fold the expression for
9728 those three values. Since each value must be 0 or 1, we have
9729 eight possibilities, each of which corresponds to the constant 0
9730 or 1 or one of the six possible comparisons.
9732 This handles common cases like (a > b) == 0 but also handles
9733 expressions like ((x > y) - (y > x)) > 0, which supposedly
9734 occur in macroized code. */
9736 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9738 tree cval1 = 0, cval2 = 0;
9739 int save_p = 0;
9741 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9742 /* Don't handle degenerate cases here; they should already
9743 have been handled anyway. */
9744 && cval1 != 0 && cval2 != 0
9745 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9746 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9747 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9748 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9749 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9750 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9751 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9753 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9754 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9756 /* We can't just pass T to eval_subst in case cval1 or cval2
9757 was the same as ARG1. */
9759 tree high_result
9760 = fold_build2 (code, type,
9761 eval_subst (arg0, cval1, maxval,
9762 cval2, minval),
9763 arg1);
9764 tree equal_result
9765 = fold_build2 (code, type,
9766 eval_subst (arg0, cval1, maxval,
9767 cval2, maxval),
9768 arg1);
9769 tree low_result
9770 = fold_build2 (code, type,
9771 eval_subst (arg0, cval1, minval,
9772 cval2, maxval),
9773 arg1);
9775 /* All three of these results should be 0 or 1. Confirm they
9776 are. Then use those values to select the proper code
9777 to use. */
9779 if ((integer_zerop (high_result)
9780 || integer_onep (high_result))
9781 && (integer_zerop (equal_result)
9782 || integer_onep (equal_result))
9783 && (integer_zerop (low_result)
9784 || integer_onep (low_result)))
9786 /* Make a 3-bit mask with the high-order bit being the
9787 value for `>', the next for '=', and the low for '<'. */
9788 switch ((integer_onep (high_result) * 4)
9789 + (integer_onep (equal_result) * 2)
9790 + integer_onep (low_result))
9792 case 0:
9793 /* Always false. */
9794 return omit_one_operand (type, integer_zero_node, arg0);
9795 case 1:
9796 code = LT_EXPR;
9797 break;
9798 case 2:
9799 code = EQ_EXPR;
9800 break;
9801 case 3:
9802 code = LE_EXPR;
9803 break;
9804 case 4:
9805 code = GT_EXPR;
9806 break;
9807 case 5:
9808 code = NE_EXPR;
9809 break;
9810 case 6:
9811 code = GE_EXPR;
9812 break;
9813 case 7:
9814 /* Always true. */
9815 return omit_one_operand (type, integer_one_node, arg0);
9818 if (save_p)
9819 return save_expr (build2 (code, type, cval1, cval2));
9820 else
9821 return fold_build2 (code, type, cval1, cval2);
9826 /* If this is a comparison of a field, we may be able to simplify it. */
9827 if (((TREE_CODE (arg0) == COMPONENT_REF
9828 && lang_hooks.can_use_bit_fields_p ())
9829 || TREE_CODE (arg0) == BIT_FIELD_REF)
9830 && (code == EQ_EXPR || code == NE_EXPR)
9831 /* Handle the constant case even without -O
9832 to make sure the warnings are given. */
9833 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9835 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9836 if (t1)
9837 return t1;
9840 /* Fold a comparison of the address of COMPONENT_REFs with the same
9841 type and component to a comparison of the address of the base
9842 object. In short, &x->a OP &y->a to x OP y and
9843 &x->a OP &y.a to x OP &y */
9844 if (TREE_CODE (arg0) == ADDR_EXPR
9845 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9846 && TREE_CODE (arg1) == ADDR_EXPR
9847 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9849 tree cref0 = TREE_OPERAND (arg0, 0);
9850 tree cref1 = TREE_OPERAND (arg1, 0);
9851 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9853 tree op0 = TREE_OPERAND (cref0, 0);
9854 tree op1 = TREE_OPERAND (cref1, 0);
9855 return fold_build2 (code, type,
9856 build_fold_addr_expr (op0),
9857 build_fold_addr_expr (op1));
9861 /* Optimize comparisons of strlen vs zero to a compare of the
9862 first character of the string vs zero. To wit,
9863 strlen(ptr) == 0 => *ptr == 0
9864 strlen(ptr) != 0 => *ptr != 0
9865 Other cases should reduce to one of these two (or a constant)
9866 due to the return value of strlen being unsigned. */
9867 if ((code == EQ_EXPR || code == NE_EXPR)
9868 && integer_zerop (arg1)
9869 && TREE_CODE (arg0) == CALL_EXPR)
9871 tree fndecl = get_callee_fndecl (arg0);
9872 tree arglist;
9874 if (fndecl
9875 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9876 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9877 && (arglist = TREE_OPERAND (arg0, 1))
9878 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9879 && ! TREE_CHAIN (arglist))
9881 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9882 return fold_build2 (code, type, iref,
9883 build_int_cst (TREE_TYPE (iref), 0));
9887 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9888 into a single range test. */
9889 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9890 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9891 && TREE_CODE (arg1) == INTEGER_CST
9892 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9893 && !integer_zerop (TREE_OPERAND (arg0, 1))
9894 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9895 && !TREE_OVERFLOW (arg1))
9897 t1 = fold_div_compare (code, type, arg0, arg1);
9898 if (t1 != NULL_TREE)
9899 return t1;
9902 if ((code == EQ_EXPR || code == NE_EXPR)
9903 && integer_zerop (arg1)
9904 && tree_expr_nonzero_p (arg0))
9906 tree res = constant_boolean_node (code==NE_EXPR, type);
9907 return omit_one_operand (type, res, arg0);
9910 t1 = fold_relational_const (code, type, arg0, arg1);
9911 return t1 == NULL_TREE ? NULL_TREE : t1;
9913 case UNORDERED_EXPR:
9914 case ORDERED_EXPR:
9915 case UNLT_EXPR:
9916 case UNLE_EXPR:
9917 case UNGT_EXPR:
9918 case UNGE_EXPR:
9919 case UNEQ_EXPR:
9920 case LTGT_EXPR:
9921 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9923 t1 = fold_relational_const (code, type, arg0, arg1);
9924 if (t1 != NULL_TREE)
9925 return t1;
9928 /* If the first operand is NaN, the result is constant. */
9929 if (TREE_CODE (arg0) == REAL_CST
9930 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9931 && (code != LTGT_EXPR || ! flag_trapping_math))
9933 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9934 ? integer_zero_node
9935 : integer_one_node;
9936 return omit_one_operand (type, t1, arg1);
9939 /* If the second operand is NaN, the result is constant. */
9940 if (TREE_CODE (arg1) == REAL_CST
9941 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9942 && (code != LTGT_EXPR || ! flag_trapping_math))
9944 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9945 ? integer_zero_node
9946 : integer_one_node;
9947 return omit_one_operand (type, t1, arg0);
9950 /* Simplify unordered comparison of something with itself. */
9951 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9952 && operand_equal_p (arg0, arg1, 0))
9953 return constant_boolean_node (1, type);
9955 if (code == LTGT_EXPR
9956 && !flag_trapping_math
9957 && operand_equal_p (arg0, arg1, 0))
9958 return constant_boolean_node (0, type);
9960 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9962 tree targ0 = strip_float_extensions (arg0);
9963 tree targ1 = strip_float_extensions (arg1);
9964 tree newtype = TREE_TYPE (targ0);
9966 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9967 newtype = TREE_TYPE (targ1);
9969 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9970 return fold_build2 (code, type, fold_convert (newtype, targ0),
9971 fold_convert (newtype, targ1));
9974 return NULL_TREE;
9976 case COMPOUND_EXPR:
9977 /* When pedantic, a compound expression can be neither an lvalue
9978 nor an integer constant expression. */
9979 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9980 return NULL_TREE;
9981 /* Don't let (0, 0) be null pointer constant. */
9982 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9983 : fold_convert (type, arg1);
9984 return pedantic_non_lvalue (tem);
9986 case COMPLEX_EXPR:
9987 if ((TREE_CODE (arg0) == REAL_CST
9988 && TREE_CODE (arg1) == REAL_CST)
9989 || (TREE_CODE (arg0) == INTEGER_CST
9990 && TREE_CODE (arg1) == INTEGER_CST))
9991 return build_complex (type, arg0, arg1);
9992 return NULL_TREE;
9994 case ASSERT_EXPR:
9995 /* An ASSERT_EXPR should never be passed to fold_binary. */
9996 gcc_unreachable ();
9998 default:
9999 return NULL_TREE;
10000 } /* switch (code) */
10003 /* Callback for walk_tree, looking for LABEL_EXPR.
10004 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10005 Do not check the sub-tree of GOTO_EXPR. */
10007 static tree
10008 contains_label_1 (tree *tp,
10009 int *walk_subtrees,
10010 void *data ATTRIBUTE_UNUSED)
10012 switch (TREE_CODE (*tp))
10014 case LABEL_EXPR:
10015 return *tp;
10016 case GOTO_EXPR:
10017 *walk_subtrees = 0;
10018 /* no break */
10019 default:
10020 return NULL_TREE;
10024 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
10025 accessible from outside the sub-tree. Returns NULL_TREE if no
10026 addressable label is found. */
10028 static bool
10029 contains_label_p (tree st)
10031 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
10034 /* Fold a ternary expression of code CODE and type TYPE with operands
10035 OP0, OP1, and OP2. Return the folded expression if folding is
10036 successful. Otherwise, return NULL_TREE. */
10038 tree
10039 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10041 tree tem;
10042 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
10043 enum tree_code_class kind = TREE_CODE_CLASS (code);
10045 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10046 && TREE_CODE_LENGTH (code) == 3);
10048 /* Strip any conversions that don't change the mode. This is safe
10049 for every expression, except for a comparison expression because
10050 its signedness is derived from its operands. So, in the latter
10051 case, only strip conversions that don't change the signedness.
10053 Note that this is done as an internal manipulation within the
10054 constant folder, in order to find the simplest representation of
10055 the arguments so that their form can be studied. In any cases,
10056 the appropriate type conversions should be put back in the tree
10057 that will get out of the constant folder. */
10058 if (op0)
10060 arg0 = op0;
10061 STRIP_NOPS (arg0);
10064 if (op1)
10066 arg1 = op1;
10067 STRIP_NOPS (arg1);
10070 switch (code)
10072 case COMPONENT_REF:
10073 if (TREE_CODE (arg0) == CONSTRUCTOR
10074 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
10076 unsigned HOST_WIDE_INT idx;
10077 tree field, value;
10078 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
10079 if (field == arg1)
10080 return value;
10082 return NULL_TREE;
10084 case COND_EXPR:
10085 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10086 so all simple results must be passed through pedantic_non_lvalue. */
10087 if (TREE_CODE (arg0) == INTEGER_CST)
10089 tree unused_op = integer_zerop (arg0) ? op1 : op2;
10090 tem = integer_zerop (arg0) ? op2 : op1;
10091 /* Only optimize constant conditions when the selected branch
10092 has the same type as the COND_EXPR. This avoids optimizing
10093 away "c ? x : throw", where the throw has a void type.
10094 Avoid throwing away that operand which contains label. */
10095 if ((!TREE_SIDE_EFFECTS (unused_op)
10096 || !contains_label_p (unused_op))
10097 && (! VOID_TYPE_P (TREE_TYPE (tem))
10098 || VOID_TYPE_P (type)))
10099 return pedantic_non_lvalue (tem);
10100 return NULL_TREE;
10102 if (operand_equal_p (arg1, op2, 0))
10103 return pedantic_omit_one_operand (type, arg1, arg0);
10105 /* If we have A op B ? A : C, we may be able to convert this to a
10106 simpler expression, depending on the operation and the values
10107 of B and C. Signed zeros prevent all of these transformations,
10108 for reasons given above each one.
10110 Also try swapping the arguments and inverting the conditional. */
10111 if (COMPARISON_CLASS_P (arg0)
10112 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10113 arg1, TREE_OPERAND (arg0, 1))
10114 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10116 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10117 if (tem)
10118 return tem;
10121 if (COMPARISON_CLASS_P (arg0)
10122 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10123 op2,
10124 TREE_OPERAND (arg0, 1))
10125 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10127 tem = invert_truthvalue (arg0);
10128 if (COMPARISON_CLASS_P (tem))
10130 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10131 if (tem)
10132 return tem;
10136 /* If the second operand is simpler than the third, swap them
10137 since that produces better jump optimization results. */
10138 if (truth_value_p (TREE_CODE (arg0))
10139 && tree_swap_operands_p (op1, op2, false))
10141 /* See if this can be inverted. If it can't, possibly because
10142 it was a floating-point inequality comparison, don't do
10143 anything. */
10144 tem = invert_truthvalue (arg0);
10146 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10147 return fold_build3 (code, type, tem, op2, op1);
10150 /* Convert A ? 1 : 0 to simply A. */
10151 if (integer_onep (op1)
10152 && integer_zerop (op2)
10153 /* If we try to convert OP0 to our type, the
10154 call to fold will try to move the conversion inside
10155 a COND, which will recurse. In that case, the COND_EXPR
10156 is probably the best choice, so leave it alone. */
10157 && type == TREE_TYPE (arg0))
10158 return pedantic_non_lvalue (arg0);
10160 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10161 over COND_EXPR in cases such as floating point comparisons. */
10162 if (integer_zerop (op1)
10163 && integer_onep (op2)
10164 && truth_value_p (TREE_CODE (arg0)))
10165 return pedantic_non_lvalue (fold_convert (type,
10166 invert_truthvalue (arg0)));
10168 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10169 if (TREE_CODE (arg0) == LT_EXPR
10170 && integer_zerop (TREE_OPERAND (arg0, 1))
10171 && integer_zerop (op2)
10172 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10173 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10174 TREE_TYPE (tem), tem, arg1));
10176 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10177 already handled above. */
10178 if (TREE_CODE (arg0) == BIT_AND_EXPR
10179 && integer_onep (TREE_OPERAND (arg0, 1))
10180 && integer_zerop (op2)
10181 && integer_pow2p (arg1))
10183 tree tem = TREE_OPERAND (arg0, 0);
10184 STRIP_NOPS (tem);
10185 if (TREE_CODE (tem) == RSHIFT_EXPR
10186 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10187 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10188 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10189 return fold_build2 (BIT_AND_EXPR, type,
10190 TREE_OPERAND (tem, 0), arg1);
10193 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10194 is probably obsolete because the first operand should be a
10195 truth value (that's why we have the two cases above), but let's
10196 leave it in until we can confirm this for all front-ends. */
10197 if (integer_zerop (op2)
10198 && TREE_CODE (arg0) == NE_EXPR
10199 && integer_zerop (TREE_OPERAND (arg0, 1))
10200 && integer_pow2p (arg1)
10201 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10202 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10203 arg1, OEP_ONLY_CONST))
10204 return pedantic_non_lvalue (fold_convert (type,
10205 TREE_OPERAND (arg0, 0)));
10207 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10208 if (integer_zerop (op2)
10209 && truth_value_p (TREE_CODE (arg0))
10210 && truth_value_p (TREE_CODE (arg1)))
10211 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10213 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10214 if (integer_onep (op2)
10215 && truth_value_p (TREE_CODE (arg0))
10216 && truth_value_p (TREE_CODE (arg1)))
10218 /* Only perform transformation if ARG0 is easily inverted. */
10219 tem = invert_truthvalue (arg0);
10220 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10221 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10224 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10225 if (integer_zerop (arg1)
10226 && truth_value_p (TREE_CODE (arg0))
10227 && truth_value_p (TREE_CODE (op2)))
10229 /* Only perform transformation if ARG0 is easily inverted. */
10230 tem = invert_truthvalue (arg0);
10231 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10232 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10235 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10236 if (integer_onep (arg1)
10237 && truth_value_p (TREE_CODE (arg0))
10238 && truth_value_p (TREE_CODE (op2)))
10239 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10241 return NULL_TREE;
10243 case CALL_EXPR:
10244 /* Check for a built-in function. */
10245 if (TREE_CODE (op0) == ADDR_EXPR
10246 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10247 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10248 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
10249 return NULL_TREE;
10251 case BIT_FIELD_REF:
10252 if (TREE_CODE (arg0) == VECTOR_CST
10253 && type == TREE_TYPE (TREE_TYPE (arg0))
10254 && host_integerp (arg1, 1)
10255 && host_integerp (op2, 1))
10257 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10258 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10260 if (width != 0
10261 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10262 && (idx % width) == 0
10263 && (idx = idx / width)
10264 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10266 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10267 while (idx-- > 0 && elements)
10268 elements = TREE_CHAIN (elements);
10269 if (elements)
10270 return TREE_VALUE (elements);
10271 else
10272 return fold_convert (type, integer_zero_node);
10275 return NULL_TREE;
10277 default:
10278 return NULL_TREE;
10279 } /* switch (code) */
10282 /* Perform constant folding and related simplification of EXPR.
10283 The related simplifications include x*1 => x, x*0 => 0, etc.,
10284 and application of the associative law.
10285 NOP_EXPR conversions may be removed freely (as long as we
10286 are careful not to change the type of the overall expression).
10287 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10288 but we can constant-fold them if they have constant operands. */
10290 #ifdef ENABLE_FOLD_CHECKING
10291 # define fold(x) fold_1 (x)
10292 static tree fold_1 (tree);
10293 static
10294 #endif
10295 tree
10296 fold (tree expr)
10298 const tree t = expr;
10299 enum tree_code code = TREE_CODE (t);
10300 enum tree_code_class kind = TREE_CODE_CLASS (code);
10301 tree tem;
10303 /* Return right away if a constant. */
10304 if (kind == tcc_constant)
10305 return t;
10307 if (IS_EXPR_CODE_CLASS (kind))
10309 tree type = TREE_TYPE (t);
10310 tree op0, op1, op2;
10312 switch (TREE_CODE_LENGTH (code))
10314 case 1:
10315 op0 = TREE_OPERAND (t, 0);
10316 tem = fold_unary (code, type, op0);
10317 return tem ? tem : expr;
10318 case 2:
10319 op0 = TREE_OPERAND (t, 0);
10320 op1 = TREE_OPERAND (t, 1);
10321 tem = fold_binary (code, type, op0, op1);
10322 return tem ? tem : expr;
10323 case 3:
10324 op0 = TREE_OPERAND (t, 0);
10325 op1 = TREE_OPERAND (t, 1);
10326 op2 = TREE_OPERAND (t, 2);
10327 tem = fold_ternary (code, type, op0, op1, op2);
10328 return tem ? tem : expr;
10329 default:
10330 break;
10334 switch (code)
10336 case CONST_DECL:
10337 return fold (DECL_INITIAL (t));
10339 default:
10340 return t;
10341 } /* switch (code) */
10344 #ifdef ENABLE_FOLD_CHECKING
10345 #undef fold
10347 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10348 static void fold_check_failed (tree, tree);
10349 void print_fold_checksum (tree);
10351 /* When --enable-checking=fold, compute a digest of expr before
10352 and after actual fold call to see if fold did not accidentally
10353 change original expr. */
10355 tree
10356 fold (tree expr)
10358 tree ret;
10359 struct md5_ctx ctx;
10360 unsigned char checksum_before[16], checksum_after[16];
10361 htab_t ht;
10363 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10364 md5_init_ctx (&ctx);
10365 fold_checksum_tree (expr, &ctx, ht);
10366 md5_finish_ctx (&ctx, checksum_before);
10367 htab_empty (ht);
10369 ret = fold_1 (expr);
10371 md5_init_ctx (&ctx);
10372 fold_checksum_tree (expr, &ctx, ht);
10373 md5_finish_ctx (&ctx, checksum_after);
10374 htab_delete (ht);
10376 if (memcmp (checksum_before, checksum_after, 16))
10377 fold_check_failed (expr, ret);
10379 return ret;
10382 void
10383 print_fold_checksum (tree expr)
10385 struct md5_ctx ctx;
10386 unsigned char checksum[16], cnt;
10387 htab_t ht;
10389 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10390 md5_init_ctx (&ctx);
10391 fold_checksum_tree (expr, &ctx, ht);
10392 md5_finish_ctx (&ctx, checksum);
10393 htab_delete (ht);
10394 for (cnt = 0; cnt < 16; ++cnt)
10395 fprintf (stderr, "%02x", checksum[cnt]);
10396 putc ('\n', stderr);
10399 static void
10400 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10402 internal_error ("fold check: original tree changed by fold");
10405 static void
10406 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10408 void **slot;
10409 enum tree_code code;
10410 char buf[sizeof (struct tree_function_decl)];
10411 int i, len;
10413 recursive_label:
10415 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10416 <= sizeof (struct tree_function_decl))
10417 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
10418 if (expr == NULL)
10419 return;
10420 slot = htab_find_slot (ht, expr, INSERT);
10421 if (*slot != NULL)
10422 return;
10423 *slot = expr;
10424 code = TREE_CODE (expr);
10425 if (TREE_CODE_CLASS (code) == tcc_declaration
10426 && DECL_ASSEMBLER_NAME_SET_P (expr))
10428 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10429 memcpy (buf, expr, tree_size (expr));
10430 expr = (tree) buf;
10431 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10433 else if (TREE_CODE_CLASS (code) == tcc_type
10434 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10435 || TYPE_CACHED_VALUES_P (expr)
10436 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10438 /* Allow these fields to be modified. */
10439 memcpy (buf, expr, tree_size (expr));
10440 expr = (tree) buf;
10441 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10442 TYPE_POINTER_TO (expr) = NULL;
10443 TYPE_REFERENCE_TO (expr) = NULL;
10444 if (TYPE_CACHED_VALUES_P (expr))
10446 TYPE_CACHED_VALUES_P (expr) = 0;
10447 TYPE_CACHED_VALUES (expr) = NULL;
10450 md5_process_bytes (expr, tree_size (expr), ctx);
10451 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10452 if (TREE_CODE_CLASS (code) != tcc_type
10453 && TREE_CODE_CLASS (code) != tcc_declaration
10454 && code != TREE_LIST)
10455 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10456 switch (TREE_CODE_CLASS (code))
10458 case tcc_constant:
10459 switch (code)
10461 case STRING_CST:
10462 md5_process_bytes (TREE_STRING_POINTER (expr),
10463 TREE_STRING_LENGTH (expr), ctx);
10464 break;
10465 case COMPLEX_CST:
10466 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10467 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10468 break;
10469 case VECTOR_CST:
10470 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10471 break;
10472 default:
10473 break;
10475 break;
10476 case tcc_exceptional:
10477 switch (code)
10479 case TREE_LIST:
10480 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10481 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10482 expr = TREE_CHAIN (expr);
10483 goto recursive_label;
10484 break;
10485 case TREE_VEC:
10486 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10487 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10488 break;
10489 default:
10490 break;
10492 break;
10493 case tcc_expression:
10494 case tcc_reference:
10495 case tcc_comparison:
10496 case tcc_unary:
10497 case tcc_binary:
10498 case tcc_statement:
10499 len = TREE_CODE_LENGTH (code);
10500 for (i = 0; i < len; ++i)
10501 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10502 break;
10503 case tcc_declaration:
10504 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10505 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10506 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
10508 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10509 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10510 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10511 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10512 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10514 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
10515 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10517 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
10519 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10520 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10521 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
10523 break;
10524 case tcc_type:
10525 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10526 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10527 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10528 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10529 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10530 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10531 if (INTEGRAL_TYPE_P (expr)
10532 || SCALAR_FLOAT_TYPE_P (expr))
10534 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10535 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10537 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10538 if (TREE_CODE (expr) == RECORD_TYPE
10539 || TREE_CODE (expr) == UNION_TYPE
10540 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10541 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10542 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10543 break;
10544 default:
10545 break;
10549 #endif
10551 /* Fold a unary tree expression with code CODE of type TYPE with an
10552 operand OP0. Return a folded expression if successful. Otherwise,
10553 return a tree expression with code CODE of type TYPE with an
10554 operand OP0. */
10556 tree
10557 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
10559 tree tem;
10560 #ifdef ENABLE_FOLD_CHECKING
10561 unsigned char checksum_before[16], checksum_after[16];
10562 struct md5_ctx ctx;
10563 htab_t ht;
10565 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10566 md5_init_ctx (&ctx);
10567 fold_checksum_tree (op0, &ctx, ht);
10568 md5_finish_ctx (&ctx, checksum_before);
10569 htab_empty (ht);
10570 #endif
10572 tem = fold_unary (code, type, op0);
10573 if (!tem)
10574 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
10576 #ifdef ENABLE_FOLD_CHECKING
10577 md5_init_ctx (&ctx);
10578 fold_checksum_tree (op0, &ctx, ht);
10579 md5_finish_ctx (&ctx, checksum_after);
10580 htab_delete (ht);
10582 if (memcmp (checksum_before, checksum_after, 16))
10583 fold_check_failed (op0, tem);
10584 #endif
10585 return tem;
10588 /* Fold a binary tree expression with code CODE of type TYPE with
10589 operands OP0 and OP1. Return a folded expression if successful.
10590 Otherwise, return a tree expression with code CODE of type TYPE
10591 with operands OP0 and OP1. */
10593 tree
10594 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
10595 MEM_STAT_DECL)
10597 tree tem;
10598 #ifdef ENABLE_FOLD_CHECKING
10599 unsigned char checksum_before_op0[16],
10600 checksum_before_op1[16],
10601 checksum_after_op0[16],
10602 checksum_after_op1[16];
10603 struct md5_ctx ctx;
10604 htab_t ht;
10606 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10607 md5_init_ctx (&ctx);
10608 fold_checksum_tree (op0, &ctx, ht);
10609 md5_finish_ctx (&ctx, checksum_before_op0);
10610 htab_empty (ht);
10612 md5_init_ctx (&ctx);
10613 fold_checksum_tree (op1, &ctx, ht);
10614 md5_finish_ctx (&ctx, checksum_before_op1);
10615 htab_empty (ht);
10616 #endif
10618 tem = fold_binary (code, type, op0, op1);
10619 if (!tem)
10620 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
10622 #ifdef ENABLE_FOLD_CHECKING
10623 md5_init_ctx (&ctx);
10624 fold_checksum_tree (op0, &ctx, ht);
10625 md5_finish_ctx (&ctx, checksum_after_op0);
10626 htab_empty (ht);
10628 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10629 fold_check_failed (op0, tem);
10631 md5_init_ctx (&ctx);
10632 fold_checksum_tree (op1, &ctx, ht);
10633 md5_finish_ctx (&ctx, checksum_after_op1);
10634 htab_delete (ht);
10636 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10637 fold_check_failed (op1, tem);
10638 #endif
10639 return tem;
10642 /* Fold a ternary tree expression with code CODE of type TYPE with
10643 operands OP0, OP1, and OP2. Return a folded expression if
10644 successful. Otherwise, return a tree expression with code CODE of
10645 type TYPE with operands OP0, OP1, and OP2. */
10647 tree
10648 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
10649 MEM_STAT_DECL)
10651 tree tem;
10652 #ifdef ENABLE_FOLD_CHECKING
10653 unsigned char checksum_before_op0[16],
10654 checksum_before_op1[16],
10655 checksum_before_op2[16],
10656 checksum_after_op0[16],
10657 checksum_after_op1[16],
10658 checksum_after_op2[16];
10659 struct md5_ctx ctx;
10660 htab_t ht;
10662 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10663 md5_init_ctx (&ctx);
10664 fold_checksum_tree (op0, &ctx, ht);
10665 md5_finish_ctx (&ctx, checksum_before_op0);
10666 htab_empty (ht);
10668 md5_init_ctx (&ctx);
10669 fold_checksum_tree (op1, &ctx, ht);
10670 md5_finish_ctx (&ctx, checksum_before_op1);
10671 htab_empty (ht);
10673 md5_init_ctx (&ctx);
10674 fold_checksum_tree (op2, &ctx, ht);
10675 md5_finish_ctx (&ctx, checksum_before_op2);
10676 htab_empty (ht);
10677 #endif
10679 tem = fold_ternary (code, type, op0, op1, op2);
10680 if (!tem)
10681 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
10683 #ifdef ENABLE_FOLD_CHECKING
10684 md5_init_ctx (&ctx);
10685 fold_checksum_tree (op0, &ctx, ht);
10686 md5_finish_ctx (&ctx, checksum_after_op0);
10687 htab_empty (ht);
10689 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10690 fold_check_failed (op0, tem);
10692 md5_init_ctx (&ctx);
10693 fold_checksum_tree (op1, &ctx, ht);
10694 md5_finish_ctx (&ctx, checksum_after_op1);
10695 htab_empty (ht);
10697 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10698 fold_check_failed (op1, tem);
10700 md5_init_ctx (&ctx);
10701 fold_checksum_tree (op2, &ctx, ht);
10702 md5_finish_ctx (&ctx, checksum_after_op2);
10703 htab_delete (ht);
10705 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
10706 fold_check_failed (op2, tem);
10707 #endif
10708 return tem;
10711 /* Perform constant folding and related simplification of initializer
10712 expression EXPR. These behave identically to "fold_buildN" but ignore
10713 potential run-time traps and exceptions that fold must preserve. */
10715 #define START_FOLD_INIT \
10716 int saved_signaling_nans = flag_signaling_nans;\
10717 int saved_trapping_math = flag_trapping_math;\
10718 int saved_rounding_math = flag_rounding_math;\
10719 int saved_trapv = flag_trapv;\
10720 flag_signaling_nans = 0;\
10721 flag_trapping_math = 0;\
10722 flag_rounding_math = 0;\
10723 flag_trapv = 0
10725 #define END_FOLD_INIT \
10726 flag_signaling_nans = saved_signaling_nans;\
10727 flag_trapping_math = saved_trapping_math;\
10728 flag_rounding_math = saved_rounding_math;\
10729 flag_trapv = saved_trapv
10731 tree
10732 fold_build1_initializer (enum tree_code code, tree type, tree op)
10734 tree result;
10735 START_FOLD_INIT;
10737 result = fold_build1 (code, type, op);
10739 END_FOLD_INIT;
10740 return result;
10743 tree
10744 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
10746 tree result;
10747 START_FOLD_INIT;
10749 result = fold_build2 (code, type, op0, op1);
10751 END_FOLD_INIT;
10752 return result;
10755 tree
10756 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
10757 tree op2)
10759 tree result;
10760 START_FOLD_INIT;
10762 result = fold_build3 (code, type, op0, op1, op2);
10764 END_FOLD_INIT;
10765 return result;
10768 #undef START_FOLD_INIT
10769 #undef END_FOLD_INIT
10771 /* Determine if first argument is a multiple of second argument. Return 0 if
10772 it is not, or we cannot easily determined it to be.
10774 An example of the sort of thing we care about (at this point; this routine
10775 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10776 fold cases do now) is discovering that
10778 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10780 is a multiple of
10782 SAVE_EXPR (J * 8)
10784 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10786 This code also handles discovering that
10788 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10790 is a multiple of 8 so we don't have to worry about dealing with a
10791 possible remainder.
10793 Note that we *look* inside a SAVE_EXPR only to determine how it was
10794 calculated; it is not safe for fold to do much of anything else with the
10795 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10796 at run time. For example, the latter example above *cannot* be implemented
10797 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10798 evaluation time of the original SAVE_EXPR is not necessarily the same at
10799 the time the new expression is evaluated. The only optimization of this
10800 sort that would be valid is changing
10802 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10804 divided by 8 to
10806 SAVE_EXPR (I) * SAVE_EXPR (J)
10808 (where the same SAVE_EXPR (J) is used in the original and the
10809 transformed version). */
10811 static int
10812 multiple_of_p (tree type, tree top, tree bottom)
10814 if (operand_equal_p (top, bottom, 0))
10815 return 1;
10817 if (TREE_CODE (type) != INTEGER_TYPE)
10818 return 0;
10820 switch (TREE_CODE (top))
10822 case BIT_AND_EXPR:
10823 /* Bitwise and provides a power of two multiple. If the mask is
10824 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10825 if (!integer_pow2p (bottom))
10826 return 0;
10827 /* FALLTHRU */
10829 case MULT_EXPR:
10830 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10831 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10833 case PLUS_EXPR:
10834 case MINUS_EXPR:
10835 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10836 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10838 case LSHIFT_EXPR:
10839 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10841 tree op1, t1;
10843 op1 = TREE_OPERAND (top, 1);
10844 /* const_binop may not detect overflow correctly,
10845 so check for it explicitly here. */
10846 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10847 > TREE_INT_CST_LOW (op1)
10848 && TREE_INT_CST_HIGH (op1) == 0
10849 && 0 != (t1 = fold_convert (type,
10850 const_binop (LSHIFT_EXPR,
10851 size_one_node,
10852 op1, 0)))
10853 && ! TREE_OVERFLOW (t1))
10854 return multiple_of_p (type, t1, bottom);
10856 return 0;
10858 case NOP_EXPR:
10859 /* Can't handle conversions from non-integral or wider integral type. */
10860 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10861 || (TYPE_PRECISION (type)
10862 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10863 return 0;
10865 /* .. fall through ... */
10867 case SAVE_EXPR:
10868 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10870 case INTEGER_CST:
10871 if (TREE_CODE (bottom) != INTEGER_CST
10872 || (TYPE_UNSIGNED (type)
10873 && (tree_int_cst_sgn (top) < 0
10874 || tree_int_cst_sgn (bottom) < 0)))
10875 return 0;
10876 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10877 top, bottom, 0));
10879 default:
10880 return 0;
10884 /* Return true if `t' is known to be non-negative. */
10887 tree_expr_nonnegative_p (tree t)
10889 if (TYPE_UNSIGNED (TREE_TYPE (t)))
10890 return 1;
10892 switch (TREE_CODE (t))
10894 case ABS_EXPR:
10895 /* We can't return 1 if flag_wrapv is set because
10896 ABS_EXPR<INT_MIN> = INT_MIN. */
10897 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
10898 return 1;
10899 break;
10901 case INTEGER_CST:
10902 return tree_int_cst_sgn (t) >= 0;
10904 case REAL_CST:
10905 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10907 case PLUS_EXPR:
10908 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10909 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10910 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10912 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10913 both unsigned and at least 2 bits shorter than the result. */
10914 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10915 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10916 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10918 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10919 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10920 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10921 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10923 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10924 TYPE_PRECISION (inner2)) + 1;
10925 return prec < TYPE_PRECISION (TREE_TYPE (t));
10928 break;
10930 case MULT_EXPR:
10931 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10933 /* x * x for floating point x is always non-negative. */
10934 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10935 return 1;
10936 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10937 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10940 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10941 both unsigned and their total bits is shorter than the result. */
10942 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10943 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10944 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10946 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10947 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10948 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10949 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10950 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10951 < TYPE_PRECISION (TREE_TYPE (t));
10953 return 0;
10955 case BIT_AND_EXPR:
10956 case MAX_EXPR:
10957 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10958 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10960 case BIT_IOR_EXPR:
10961 case BIT_XOR_EXPR:
10962 case MIN_EXPR:
10963 case RDIV_EXPR:
10964 case TRUNC_DIV_EXPR:
10965 case CEIL_DIV_EXPR:
10966 case FLOOR_DIV_EXPR:
10967 case ROUND_DIV_EXPR:
10968 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10969 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10971 case TRUNC_MOD_EXPR:
10972 case CEIL_MOD_EXPR:
10973 case FLOOR_MOD_EXPR:
10974 case ROUND_MOD_EXPR:
10975 case SAVE_EXPR:
10976 case NON_LVALUE_EXPR:
10977 case FLOAT_EXPR:
10978 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10980 case COMPOUND_EXPR:
10981 case MODIFY_EXPR:
10982 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10984 case BIND_EXPR:
10985 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10987 case COND_EXPR:
10988 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10989 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10991 case NOP_EXPR:
10993 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10994 tree outer_type = TREE_TYPE (t);
10996 if (TREE_CODE (outer_type) == REAL_TYPE)
10998 if (TREE_CODE (inner_type) == REAL_TYPE)
10999 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11000 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11002 if (TYPE_UNSIGNED (inner_type))
11003 return 1;
11004 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11007 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
11009 if (TREE_CODE (inner_type) == REAL_TYPE)
11010 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
11011 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11012 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
11013 && TYPE_UNSIGNED (inner_type);
11016 break;
11018 case TARGET_EXPR:
11020 tree temp = TARGET_EXPR_SLOT (t);
11021 t = TARGET_EXPR_INITIAL (t);
11023 /* If the initializer is non-void, then it's a normal expression
11024 that will be assigned to the slot. */
11025 if (!VOID_TYPE_P (t))
11026 return tree_expr_nonnegative_p (t);
11028 /* Otherwise, the initializer sets the slot in some way. One common
11029 way is an assignment statement at the end of the initializer. */
11030 while (1)
11032 if (TREE_CODE (t) == BIND_EXPR)
11033 t = expr_last (BIND_EXPR_BODY (t));
11034 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
11035 || TREE_CODE (t) == TRY_CATCH_EXPR)
11036 t = expr_last (TREE_OPERAND (t, 0));
11037 else if (TREE_CODE (t) == STATEMENT_LIST)
11038 t = expr_last (t);
11039 else
11040 break;
11042 if (TREE_CODE (t) == MODIFY_EXPR
11043 && TREE_OPERAND (t, 0) == temp)
11044 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11046 return 0;
11049 case CALL_EXPR:
11051 tree fndecl = get_callee_fndecl (t);
11052 tree arglist = TREE_OPERAND (t, 1);
11053 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
11054 switch (DECL_FUNCTION_CODE (fndecl))
11056 CASE_FLT_FN (BUILT_IN_ACOS):
11057 CASE_FLT_FN (BUILT_IN_ACOSH):
11058 CASE_FLT_FN (BUILT_IN_CABS):
11059 CASE_FLT_FN (BUILT_IN_COSH):
11060 CASE_FLT_FN (BUILT_IN_ERFC):
11061 CASE_FLT_FN (BUILT_IN_EXP):
11062 CASE_FLT_FN (BUILT_IN_EXP10):
11063 CASE_FLT_FN (BUILT_IN_EXP2):
11064 CASE_FLT_FN (BUILT_IN_FABS):
11065 CASE_FLT_FN (BUILT_IN_FDIM):
11066 CASE_FLT_FN (BUILT_IN_HYPOT):
11067 CASE_FLT_FN (BUILT_IN_POW10):
11068 CASE_INT_FN (BUILT_IN_FFS):
11069 CASE_INT_FN (BUILT_IN_PARITY):
11070 CASE_INT_FN (BUILT_IN_POPCOUNT):
11071 /* Always true. */
11072 return 1;
11074 CASE_FLT_FN (BUILT_IN_SQRT):
11075 /* sqrt(-0.0) is -0.0. */
11076 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
11077 return 1;
11078 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11080 CASE_FLT_FN (BUILT_IN_ASINH):
11081 CASE_FLT_FN (BUILT_IN_ATAN):
11082 CASE_FLT_FN (BUILT_IN_ATANH):
11083 CASE_FLT_FN (BUILT_IN_CBRT):
11084 CASE_FLT_FN (BUILT_IN_CEIL):
11085 CASE_FLT_FN (BUILT_IN_ERF):
11086 CASE_FLT_FN (BUILT_IN_EXPM1):
11087 CASE_FLT_FN (BUILT_IN_FLOOR):
11088 CASE_FLT_FN (BUILT_IN_FMOD):
11089 CASE_FLT_FN (BUILT_IN_FREXP):
11090 CASE_FLT_FN (BUILT_IN_LCEIL):
11091 CASE_FLT_FN (BUILT_IN_LDEXP):
11092 CASE_FLT_FN (BUILT_IN_LFLOOR):
11093 CASE_FLT_FN (BUILT_IN_LLCEIL):
11094 CASE_FLT_FN (BUILT_IN_LLFLOOR):
11095 CASE_FLT_FN (BUILT_IN_LLRINT):
11096 CASE_FLT_FN (BUILT_IN_LLROUND):
11097 CASE_FLT_FN (BUILT_IN_LRINT):
11098 CASE_FLT_FN (BUILT_IN_LROUND):
11099 CASE_FLT_FN (BUILT_IN_MODF):
11100 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11101 CASE_FLT_FN (BUILT_IN_POW):
11102 CASE_FLT_FN (BUILT_IN_RINT):
11103 CASE_FLT_FN (BUILT_IN_ROUND):
11104 CASE_FLT_FN (BUILT_IN_SIGNBIT):
11105 CASE_FLT_FN (BUILT_IN_SINH):
11106 CASE_FLT_FN (BUILT_IN_TANH):
11107 CASE_FLT_FN (BUILT_IN_TRUNC):
11108 /* True if the 1st argument is nonnegative. */
11109 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11111 CASE_FLT_FN (BUILT_IN_FMAX):
11112 /* True if the 1st OR 2nd arguments are nonnegative. */
11113 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11114 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11116 CASE_FLT_FN (BUILT_IN_FMIN):
11117 /* True if the 1st AND 2nd arguments are nonnegative. */
11118 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11119 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11121 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11122 /* True if the 2nd argument is nonnegative. */
11123 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11125 default:
11126 break;
11130 /* ... fall through ... */
11132 default:
11133 if (truth_value_p (TREE_CODE (t)))
11134 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11135 return 1;
11138 /* We don't know sign of `t', so be conservative and return false. */
11139 return 0;
11142 /* Return true when T is an address and is known to be nonzero.
11143 For floating point we further ensure that T is not denormal.
11144 Similar logic is present in nonzero_address in rtlanal.h. */
11146 bool
11147 tree_expr_nonzero_p (tree t)
11149 tree type = TREE_TYPE (t);
11151 /* Doing something useful for floating point would need more work. */
11152 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11153 return false;
11155 switch (TREE_CODE (t))
11157 case ABS_EXPR:
11158 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11160 case INTEGER_CST:
11161 /* We used to test for !integer_zerop here. This does not work correctly
11162 if TREE_CONSTANT_OVERFLOW (t). */
11163 return (TREE_INT_CST_LOW (t) != 0
11164 || TREE_INT_CST_HIGH (t) != 0);
11166 case PLUS_EXPR:
11167 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11169 /* With the presence of negative values it is hard
11170 to say something. */
11171 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11172 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11173 return false;
11174 /* One of operands must be positive and the other non-negative. */
11175 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11176 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11178 break;
11180 case MULT_EXPR:
11181 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11183 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11184 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11186 break;
11188 case NOP_EXPR:
11190 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11191 tree outer_type = TREE_TYPE (t);
11193 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11194 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11196 break;
11198 case ADDR_EXPR:
11200 tree base = get_base_address (TREE_OPERAND (t, 0));
11202 if (!base)
11203 return false;
11205 /* Weak declarations may link to NULL. */
11206 if (VAR_OR_FUNCTION_DECL_P (base))
11207 return !DECL_WEAK (base);
11209 /* Constants are never weak. */
11210 if (CONSTANT_CLASS_P (base))
11211 return true;
11213 return false;
11216 case COND_EXPR:
11217 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11218 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11220 case MIN_EXPR:
11221 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11222 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11224 case MAX_EXPR:
11225 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11227 /* When both operands are nonzero, then MAX must be too. */
11228 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11229 return true;
11231 /* MAX where operand 0 is positive is positive. */
11232 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11234 /* MAX where operand 1 is positive is positive. */
11235 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11236 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11237 return true;
11238 break;
11240 case COMPOUND_EXPR:
11241 case MODIFY_EXPR:
11242 case BIND_EXPR:
11243 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11245 case SAVE_EXPR:
11246 case NON_LVALUE_EXPR:
11247 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11249 case BIT_IOR_EXPR:
11250 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11251 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11253 case CALL_EXPR:
11254 return alloca_call_p (t);
11256 default:
11257 break;
11259 return false;
11262 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11263 attempt to fold the expression to a constant without modifying TYPE,
11264 OP0 or OP1.
11266 If the expression could be simplified to a constant, then return
11267 the constant. If the expression would not be simplified to a
11268 constant, then return NULL_TREE. */
11270 tree
11271 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11273 tree tem = fold_binary (code, type, op0, op1);
11274 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11277 /* Given the components of a unary expression CODE, TYPE and OP0,
11278 attempt to fold the expression to a constant without modifying
11279 TYPE or OP0.
11281 If the expression could be simplified to a constant, then return
11282 the constant. If the expression would not be simplified to a
11283 constant, then return NULL_TREE. */
11285 tree
11286 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11288 tree tem = fold_unary (code, type, op0);
11289 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11292 /* If EXP represents referencing an element in a constant string
11293 (either via pointer arithmetic or array indexing), return the
11294 tree representing the value accessed, otherwise return NULL. */
11296 tree
11297 fold_read_from_constant_string (tree exp)
11299 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11301 tree exp1 = TREE_OPERAND (exp, 0);
11302 tree index;
11303 tree string;
11305 if (TREE_CODE (exp) == INDIRECT_REF)
11306 string = string_constant (exp1, &index);
11307 else
11309 tree low_bound = array_ref_low_bound (exp);
11310 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11312 /* Optimize the special-case of a zero lower bound.
11314 We convert the low_bound to sizetype to avoid some problems
11315 with constant folding. (E.g. suppose the lower bound is 1,
11316 and its mode is QI. Without the conversion,l (ARRAY
11317 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11318 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11319 if (! integer_zerop (low_bound))
11320 index = size_diffop (index, fold_convert (sizetype, low_bound));
11322 string = exp1;
11325 if (string
11326 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11327 && TREE_CODE (string) == STRING_CST
11328 && TREE_CODE (index) == INTEGER_CST
11329 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11330 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11331 == MODE_INT)
11332 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11333 return fold_convert (TREE_TYPE (exp),
11334 build_int_cst (NULL_TREE,
11335 (TREE_STRING_POINTER (string)
11336 [TREE_INT_CST_LOW (index)])));
11338 return NULL;
11341 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11342 an integer constant or real constant.
11344 TYPE is the type of the result. */
11346 static tree
11347 fold_negate_const (tree arg0, tree type)
11349 tree t = NULL_TREE;
11351 switch (TREE_CODE (arg0))
11353 case INTEGER_CST:
11355 unsigned HOST_WIDE_INT low;
11356 HOST_WIDE_INT high;
11357 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11358 TREE_INT_CST_HIGH (arg0),
11359 &low, &high);
11360 t = build_int_cst_wide (type, low, high);
11361 t = force_fit_type (t, 1,
11362 (overflow | TREE_OVERFLOW (arg0))
11363 && !TYPE_UNSIGNED (type),
11364 TREE_CONSTANT_OVERFLOW (arg0));
11365 break;
11368 case REAL_CST:
11369 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11370 break;
11372 default:
11373 gcc_unreachable ();
11376 return t;
11379 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11380 an integer constant or real constant.
11382 TYPE is the type of the result. */
11384 tree
11385 fold_abs_const (tree arg0, tree type)
11387 tree t = NULL_TREE;
11389 switch (TREE_CODE (arg0))
11391 case INTEGER_CST:
11392 /* If the value is unsigned, then the absolute value is
11393 the same as the ordinary value. */
11394 if (TYPE_UNSIGNED (type))
11395 t = arg0;
11396 /* Similarly, if the value is non-negative. */
11397 else if (INT_CST_LT (integer_minus_one_node, arg0))
11398 t = arg0;
11399 /* If the value is negative, then the absolute value is
11400 its negation. */
11401 else
11403 unsigned HOST_WIDE_INT low;
11404 HOST_WIDE_INT high;
11405 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11406 TREE_INT_CST_HIGH (arg0),
11407 &low, &high);
11408 t = build_int_cst_wide (type, low, high);
11409 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11410 TREE_CONSTANT_OVERFLOW (arg0));
11412 break;
11414 case REAL_CST:
11415 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11416 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11417 else
11418 t = arg0;
11419 break;
11421 default:
11422 gcc_unreachable ();
11425 return t;
11428 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11429 constant. TYPE is the type of the result. */
11431 static tree
11432 fold_not_const (tree arg0, tree type)
11434 tree t = NULL_TREE;
11436 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11438 t = build_int_cst_wide (type,
11439 ~ TREE_INT_CST_LOW (arg0),
11440 ~ TREE_INT_CST_HIGH (arg0));
11441 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11442 TREE_CONSTANT_OVERFLOW (arg0));
11444 return t;
11447 /* Given CODE, a relational operator, the target type, TYPE and two
11448 constant operands OP0 and OP1, return the result of the
11449 relational operation. If the result is not a compile time
11450 constant, then return NULL_TREE. */
11452 static tree
11453 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11455 int result, invert;
11457 /* From here on, the only cases we handle are when the result is
11458 known to be a constant. */
11460 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11462 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11463 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11465 /* Handle the cases where either operand is a NaN. */
11466 if (real_isnan (c0) || real_isnan (c1))
11468 switch (code)
11470 case EQ_EXPR:
11471 case ORDERED_EXPR:
11472 result = 0;
11473 break;
11475 case NE_EXPR:
11476 case UNORDERED_EXPR:
11477 case UNLT_EXPR:
11478 case UNLE_EXPR:
11479 case UNGT_EXPR:
11480 case UNGE_EXPR:
11481 case UNEQ_EXPR:
11482 result = 1;
11483 break;
11485 case LT_EXPR:
11486 case LE_EXPR:
11487 case GT_EXPR:
11488 case GE_EXPR:
11489 case LTGT_EXPR:
11490 if (flag_trapping_math)
11491 return NULL_TREE;
11492 result = 0;
11493 break;
11495 default:
11496 gcc_unreachable ();
11499 return constant_boolean_node (result, type);
11502 return constant_boolean_node (real_compare (code, c0, c1), type);
11505 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11507 To compute GT, swap the arguments and do LT.
11508 To compute GE, do LT and invert the result.
11509 To compute LE, swap the arguments, do LT and invert the result.
11510 To compute NE, do EQ and invert the result.
11512 Therefore, the code below must handle only EQ and LT. */
11514 if (code == LE_EXPR || code == GT_EXPR)
11516 tree tem = op0;
11517 op0 = op1;
11518 op1 = tem;
11519 code = swap_tree_comparison (code);
11522 /* Note that it is safe to invert for real values here because we
11523 have already handled the one case that it matters. */
11525 invert = 0;
11526 if (code == NE_EXPR || code == GE_EXPR)
11528 invert = 1;
11529 code = invert_tree_comparison (code, false);
11532 /* Compute a result for LT or EQ if args permit;
11533 Otherwise return T. */
11534 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11536 if (code == EQ_EXPR)
11537 result = tree_int_cst_equal (op0, op1);
11538 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11539 result = INT_CST_LT_UNSIGNED (op0, op1);
11540 else
11541 result = INT_CST_LT (op0, op1);
11543 else
11544 return NULL_TREE;
11546 if (invert)
11547 result ^= 1;
11548 return constant_boolean_node (result, type);
11551 /* Build an expression for the a clean point containing EXPR with type TYPE.
11552 Don't build a cleanup point expression for EXPR which don't have side
11553 effects. */
11555 tree
11556 fold_build_cleanup_point_expr (tree type, tree expr)
11558 /* If the expression does not have side effects then we don't have to wrap
11559 it with a cleanup point expression. */
11560 if (!TREE_SIDE_EFFECTS (expr))
11561 return expr;
11563 /* If the expression is a return, check to see if the expression inside the
11564 return has no side effects or the right hand side of the modify expression
11565 inside the return. If either don't have side effects set we don't need to
11566 wrap the expression in a cleanup point expression. Note we don't check the
11567 left hand side of the modify because it should always be a return decl. */
11568 if (TREE_CODE (expr) == RETURN_EXPR)
11570 tree op = TREE_OPERAND (expr, 0);
11571 if (!op || !TREE_SIDE_EFFECTS (op))
11572 return expr;
11573 op = TREE_OPERAND (op, 1);
11574 if (!TREE_SIDE_EFFECTS (op))
11575 return expr;
11578 return build1 (CLEANUP_POINT_EXPR, type, expr);
11581 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11582 avoid confusing the gimplify process. */
11584 tree
11585 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11587 /* The size of the object is not relevant when talking about its address. */
11588 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11589 t = TREE_OPERAND (t, 0);
11591 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11592 if (TREE_CODE (t) == INDIRECT_REF
11593 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11595 t = TREE_OPERAND (t, 0);
11596 if (TREE_TYPE (t) != ptrtype)
11597 t = build1 (NOP_EXPR, ptrtype, t);
11599 else
11601 tree base = t;
11603 while (handled_component_p (base))
11604 base = TREE_OPERAND (base, 0);
11605 if (DECL_P (base))
11606 TREE_ADDRESSABLE (base) = 1;
11608 t = build1 (ADDR_EXPR, ptrtype, t);
11611 return t;
11614 tree
11615 build_fold_addr_expr (tree t)
11617 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11620 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11621 of an indirection through OP0, or NULL_TREE if no simplification is
11622 possible. */
11624 tree
11625 fold_indirect_ref_1 (tree type, tree op0)
11627 tree sub = op0;
11628 tree subtype;
11630 STRIP_NOPS (sub);
11631 subtype = TREE_TYPE (sub);
11632 if (!POINTER_TYPE_P (subtype))
11633 return NULL_TREE;
11635 if (TREE_CODE (sub) == ADDR_EXPR)
11637 tree op = TREE_OPERAND (sub, 0);
11638 tree optype = TREE_TYPE (op);
11639 /* *&p => p; make sure to handle *&"str"[cst] here. */
11640 if (type == optype)
11642 tree fop = fold_read_from_constant_string (op);
11643 if (fop)
11644 return fop;
11645 else
11646 return op;
11648 /* *(foo *)&fooarray => fooarray[0] */
11649 else if (TREE_CODE (optype) == ARRAY_TYPE
11650 && type == TREE_TYPE (optype))
11652 tree type_domain = TYPE_DOMAIN (optype);
11653 tree min_val = size_zero_node;
11654 if (type_domain && TYPE_MIN_VALUE (type_domain))
11655 min_val = TYPE_MIN_VALUE (type_domain);
11656 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11660 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11661 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11662 && type == TREE_TYPE (TREE_TYPE (subtype)))
11664 tree type_domain;
11665 tree min_val = size_zero_node;
11666 sub = build_fold_indirect_ref (sub);
11667 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11668 if (type_domain && TYPE_MIN_VALUE (type_domain))
11669 min_val = TYPE_MIN_VALUE (type_domain);
11670 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11673 return NULL_TREE;
11676 /* Builds an expression for an indirection through T, simplifying some
11677 cases. */
11679 tree
11680 build_fold_indirect_ref (tree t)
11682 tree type = TREE_TYPE (TREE_TYPE (t));
11683 tree sub = fold_indirect_ref_1 (type, t);
11685 if (sub)
11686 return sub;
11687 else
11688 return build1 (INDIRECT_REF, type, t);
11691 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11693 tree
11694 fold_indirect_ref (tree t)
11696 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11698 if (sub)
11699 return sub;
11700 else
11701 return t;
11704 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11705 whose result is ignored. The type of the returned tree need not be
11706 the same as the original expression. */
11708 tree
11709 fold_ignored_result (tree t)
11711 if (!TREE_SIDE_EFFECTS (t))
11712 return integer_zero_node;
11714 for (;;)
11715 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11717 case tcc_unary:
11718 t = TREE_OPERAND (t, 0);
11719 break;
11721 case tcc_binary:
11722 case tcc_comparison:
11723 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11724 t = TREE_OPERAND (t, 0);
11725 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11726 t = TREE_OPERAND (t, 1);
11727 else
11728 return t;
11729 break;
11731 case tcc_expression:
11732 switch (TREE_CODE (t))
11734 case COMPOUND_EXPR:
11735 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11736 return t;
11737 t = TREE_OPERAND (t, 0);
11738 break;
11740 case COND_EXPR:
11741 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11742 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11743 return t;
11744 t = TREE_OPERAND (t, 0);
11745 break;
11747 default:
11748 return t;
11750 break;
11752 default:
11753 return t;
11757 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11758 This can only be applied to objects of a sizetype. */
11760 tree
11761 round_up (tree value, int divisor)
11763 tree div = NULL_TREE;
11765 gcc_assert (divisor > 0);
11766 if (divisor == 1)
11767 return value;
11769 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11770 have to do anything. Only do this when we are not given a const,
11771 because in that case, this check is more expensive than just
11772 doing it. */
11773 if (TREE_CODE (value) != INTEGER_CST)
11775 div = build_int_cst (TREE_TYPE (value), divisor);
11777 if (multiple_of_p (TREE_TYPE (value), value, div))
11778 return value;
11781 /* If divisor is a power of two, simplify this to bit manipulation. */
11782 if (divisor == (divisor & -divisor))
11784 tree t;
11786 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11787 value = size_binop (PLUS_EXPR, value, t);
11788 t = build_int_cst (TREE_TYPE (value), -divisor);
11789 value = size_binop (BIT_AND_EXPR, value, t);
11791 else
11793 if (!div)
11794 div = build_int_cst (TREE_TYPE (value), divisor);
11795 value = size_binop (CEIL_DIV_EXPR, value, div);
11796 value = size_binop (MULT_EXPR, value, div);
11799 return value;
11802 /* Likewise, but round down. */
11804 tree
11805 round_down (tree value, int divisor)
11807 tree div = NULL_TREE;
11809 gcc_assert (divisor > 0);
11810 if (divisor == 1)
11811 return value;
11813 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11814 have to do anything. Only do this when we are not given a const,
11815 because in that case, this check is more expensive than just
11816 doing it. */
11817 if (TREE_CODE (value) != INTEGER_CST)
11819 div = build_int_cst (TREE_TYPE (value), divisor);
11821 if (multiple_of_p (TREE_TYPE (value), value, div))
11822 return value;
11825 /* If divisor is a power of two, simplify this to bit manipulation. */
11826 if (divisor == (divisor & -divisor))
11828 tree t;
11830 t = build_int_cst (TREE_TYPE (value), -divisor);
11831 value = size_binop (BIT_AND_EXPR, value, t);
11833 else
11835 if (!div)
11836 div = build_int_cst (TREE_TYPE (value), divisor);
11837 value = size_binop (FLOOR_DIV_EXPR, value, div);
11838 value = size_binop (MULT_EXPR, value, div);
11841 return value;
11844 /* Returns the pointer to the base of the object addressed by EXP and
11845 extracts the information about the offset of the access, storing it
11846 to PBITPOS and POFFSET. */
11848 static tree
11849 split_address_to_core_and_offset (tree exp,
11850 HOST_WIDE_INT *pbitpos, tree *poffset)
11852 tree core;
11853 enum machine_mode mode;
11854 int unsignedp, volatilep;
11855 HOST_WIDE_INT bitsize;
11857 if (TREE_CODE (exp) == ADDR_EXPR)
11859 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11860 poffset, &mode, &unsignedp, &volatilep,
11861 false);
11862 core = build_fold_addr_expr (core);
11864 else
11866 core = exp;
11867 *pbitpos = 0;
11868 *poffset = NULL_TREE;
11871 return core;
11874 /* Returns true if addresses of E1 and E2 differ by a constant, false
11875 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11877 bool
11878 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11880 tree core1, core2;
11881 HOST_WIDE_INT bitpos1, bitpos2;
11882 tree toffset1, toffset2, tdiff, type;
11884 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11885 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11887 if (bitpos1 % BITS_PER_UNIT != 0
11888 || bitpos2 % BITS_PER_UNIT != 0
11889 || !operand_equal_p (core1, core2, 0))
11890 return false;
11892 if (toffset1 && toffset2)
11894 type = TREE_TYPE (toffset1);
11895 if (type != TREE_TYPE (toffset2))
11896 toffset2 = fold_convert (type, toffset2);
11898 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11899 if (!cst_and_fits_in_hwi (tdiff))
11900 return false;
11902 *diff = int_cst_value (tdiff);
11904 else if (toffset1 || toffset2)
11906 /* If only one of the offsets is non-constant, the difference cannot
11907 be a constant. */
11908 return false;
11910 else
11911 *diff = 0;
11913 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11914 return true;
11917 /* Simplify the floating point expression EXP when the sign of the
11918 result is not significant. Return NULL_TREE if no simplification
11919 is possible. */
11921 tree
11922 fold_strip_sign_ops (tree exp)
11924 tree arg0, arg1;
11926 switch (TREE_CODE (exp))
11928 case ABS_EXPR:
11929 case NEGATE_EXPR:
11930 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11931 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11933 case MULT_EXPR:
11934 case RDIV_EXPR:
11935 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11936 return NULL_TREE;
11937 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11938 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11939 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11940 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11941 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11942 arg1 ? arg1 : TREE_OPERAND (exp, 1));
11943 break;
11945 default:
11946 break;
11948 return NULL_TREE;