* cfgcleanup.c, config/dfp-bit.c, expr.c, fold-const.c,
[official-gcc.git] / gcc / fold-const.c
blobddc56f66135dede6b0d5d189c76d1f60de8ac01d
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
106 tree *, tree *);
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree range_predecessor (tree);
112 static tree range_successor (tree);
113 static tree make_range (tree, int *, tree *, tree *);
114 static tree build_range_check (tree, tree, int, tree, tree);
115 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
116 tree);
117 static tree fold_range_test (enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree fold_truthop (enum tree_code, tree, tree, tree);
121 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
124 static int multiple_of_p (tree, tree, tree);
125 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static bool fold_real_zero_addition_p (tree, tree, int);
129 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (tree, tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static int native_encode_expr (tree, unsigned char *, int);
138 static tree native_interpret_expr (tree, unsigned char *, int);
141 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
142 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
143 and SUM1. Then this yields nonzero if overflow occurred during the
144 addition.
146 Overflow occurs if A and B have the same sign, but A and SUM differ in
147 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
148 sign. */
149 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
151 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
152 We do that by representing the two-word integer in 4 words, with only
153 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
154 number. The value of the word is LOWPART + HIGHPART * BASE. */
156 #define LOWPART(x) \
157 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
158 #define HIGHPART(x) \
159 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
160 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
162 /* Unpack a two-word integer into 4 words.
163 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
164 WORDS points to the array of HOST_WIDE_INTs. */
166 static void
167 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
169 words[0] = LOWPART (low);
170 words[1] = HIGHPART (low);
171 words[2] = LOWPART (hi);
172 words[3] = HIGHPART (hi);
175 /* Pack an array of 4 words into a two-word integer.
176 WORDS points to the array of words.
177 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
179 static void
180 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
181 HOST_WIDE_INT *hi)
183 *low = words[0] + words[1] * BASE;
184 *hi = words[2] + words[3] * BASE;
187 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
188 in overflow of the value, when >0 we are only interested in signed
189 overflow, for <0 we are interested in any overflow. OVERFLOWED
190 indicates whether overflow has already occurred. CONST_OVERFLOWED
191 indicates whether constant overflow has already occurred. We force
192 T's value to be within range of T's type (by setting to 0 or 1 all
193 the bits outside the type's range). We set TREE_OVERFLOWED if,
194 OVERFLOWED is nonzero,
195 or OVERFLOWABLE is >0 and signed overflow occurs
196 or OVERFLOWABLE is <0 and any overflow occurs
197 We set TREE_CONSTANT_OVERFLOWED if,
198 CONST_OVERFLOWED is nonzero
199 or we set TREE_OVERFLOWED.
200 We return either the original T, or a copy. */
202 tree
203 force_fit_type (tree t, int overflowable,
204 bool overflowed, bool overflowed_const)
206 unsigned HOST_WIDE_INT low;
207 HOST_WIDE_INT high;
208 unsigned int prec;
209 int sign_extended_type;
211 gcc_assert (TREE_CODE (t) == INTEGER_CST);
213 low = TREE_INT_CST_LOW (t);
214 high = TREE_INT_CST_HIGH (t);
216 if (POINTER_TYPE_P (TREE_TYPE (t))
217 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
218 prec = POINTER_SIZE;
219 else
220 prec = TYPE_PRECISION (TREE_TYPE (t));
221 /* Size types *are* sign extended. */
222 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
223 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
224 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
226 /* First clear all bits that are beyond the type's precision. */
228 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
230 else if (prec > HOST_BITS_PER_WIDE_INT)
231 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
232 else
234 high = 0;
235 if (prec < HOST_BITS_PER_WIDE_INT)
236 low &= ~((HOST_WIDE_INT) (-1) << prec);
239 if (!sign_extended_type)
240 /* No sign extension */;
241 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
242 /* Correct width already. */;
243 else if (prec > HOST_BITS_PER_WIDE_INT)
245 /* Sign extend top half? */
246 if (high & ((unsigned HOST_WIDE_INT)1
247 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
248 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
250 else if (prec == HOST_BITS_PER_WIDE_INT)
252 if ((HOST_WIDE_INT)low < 0)
253 high = -1;
255 else
257 /* Sign extend bottom half? */
258 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
260 high = -1;
261 low |= (HOST_WIDE_INT)(-1) << prec;
265 /* If the value changed, return a new node. */
266 if (overflowed || overflowed_const
267 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
269 t = build_int_cst_wide (TREE_TYPE (t), low, high);
271 if (overflowed
272 || overflowable < 0
273 || (overflowable > 0 && sign_extended_type))
275 t = copy_node (t);
276 TREE_OVERFLOW (t) = 1;
277 TREE_CONSTANT_OVERFLOW (t) = 1;
279 else if (overflowed_const)
281 t = copy_node (t);
282 TREE_CONSTANT_OVERFLOW (t) = 1;
286 return t;
289 /* Add two doubleword integers with doubleword result.
290 Each argument is given as two `HOST_WIDE_INT' pieces.
291 One argument is L1 and H1; the other, L2 and H2.
292 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
295 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
296 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
297 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
299 unsigned HOST_WIDE_INT l;
300 HOST_WIDE_INT h;
302 l = l1 + l2;
303 h = h1 + h2 + (l < l1);
305 *lv = l;
306 *hv = h;
307 return OVERFLOW_SUM_SIGN (h1, h2, h);
310 /* Negate a doubleword integer with doubleword result.
311 Return nonzero if the operation overflows, assuming it's signed.
312 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
313 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
316 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
317 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
319 if (l1 == 0)
321 *lv = 0;
322 *hv = - h1;
323 return (*hv & h1) < 0;
325 else
327 *lv = -l1;
328 *hv = ~h1;
329 return 0;
333 /* Multiply two doubleword integers with doubleword result.
334 Return nonzero if the operation overflows, assuming it's signed.
335 Each argument is given as two `HOST_WIDE_INT' pieces.
336 One argument is L1 and H1; the other, L2 and H2.
337 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
340 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
341 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
342 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
344 HOST_WIDE_INT arg1[4];
345 HOST_WIDE_INT arg2[4];
346 HOST_WIDE_INT prod[4 * 2];
347 unsigned HOST_WIDE_INT carry;
348 int i, j, k;
349 unsigned HOST_WIDE_INT toplow, neglow;
350 HOST_WIDE_INT tophigh, neghigh;
352 encode (arg1, l1, h1);
353 encode (arg2, l2, h2);
355 memset (prod, 0, sizeof prod);
357 for (i = 0; i < 4; i++)
359 carry = 0;
360 for (j = 0; j < 4; j++)
362 k = i + j;
363 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
364 carry += arg1[i] * arg2[j];
365 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
366 carry += prod[k];
367 prod[k] = LOWPART (carry);
368 carry = HIGHPART (carry);
370 prod[i + 4] = carry;
373 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
375 /* Check for overflow by calculating the top half of the answer in full;
376 it should agree with the low half's sign bit. */
377 decode (prod + 4, &toplow, &tophigh);
378 if (h1 < 0)
380 neg_double (l2, h2, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 if (h2 < 0)
385 neg_double (l1, h1, &neglow, &neghigh);
386 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
388 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
391 /* Shift the doubleword integer in L1, H1 left by COUNT places
392 keeping only PREC bits of result.
393 Shift right if COUNT is negative.
394 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
395 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
397 void
398 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
399 HOST_WIDE_INT count, unsigned int prec,
400 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
402 unsigned HOST_WIDE_INT signmask;
404 if (count < 0)
406 rshift_double (l1, h1, -count, prec, lv, hv, arith);
407 return;
410 if (SHIFT_COUNT_TRUNCATED)
411 count %= prec;
413 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
415 /* Shifting by the host word size is undefined according to the
416 ANSI standard, so we must handle this as a special case. */
417 *hv = 0;
418 *lv = 0;
420 else if (count >= HOST_BITS_PER_WIDE_INT)
422 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
423 *lv = 0;
425 else
427 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
428 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
429 *lv = l1 << count;
432 /* Sign extend all bits that are beyond the precision. */
434 signmask = -((prec > HOST_BITS_PER_WIDE_INT
435 ? ((unsigned HOST_WIDE_INT) *hv
436 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
437 : (*lv >> (prec - 1))) & 1);
439 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
441 else if (prec >= HOST_BITS_PER_WIDE_INT)
443 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
444 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
446 else
448 *hv = signmask;
449 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
450 *lv |= signmask << prec;
454 /* Shift the doubleword integer in L1, H1 right by COUNT places
455 keeping only PREC bits of result. COUNT must be positive.
456 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
457 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
459 void
460 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
461 HOST_WIDE_INT count, unsigned int prec,
462 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
463 int arith)
465 unsigned HOST_WIDE_INT signmask;
467 signmask = (arith
468 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
469 : 0);
471 if (SHIFT_COUNT_TRUNCATED)
472 count %= prec;
474 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
476 /* Shifting by the host word size is undefined according to the
477 ANSI standard, so we must handle this as a special case. */
478 *hv = 0;
479 *lv = 0;
481 else if (count >= HOST_BITS_PER_WIDE_INT)
483 *hv = 0;
484 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
486 else
488 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
489 *lv = ((l1 >> count)
490 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
493 /* Zero / sign extend all bits that are beyond the precision. */
495 if (count >= (HOST_WIDE_INT)prec)
497 *hv = signmask;
498 *lv = signmask;
500 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
502 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
504 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
505 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
507 else
509 *hv = signmask;
510 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
511 *lv |= signmask << (prec - count);
515 /* Rotate the doubleword integer in L1, H1 left by COUNT places
516 keeping only PREC bits of result.
517 Rotate right if COUNT is negative.
518 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
520 void
521 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
522 HOST_WIDE_INT count, unsigned int prec,
523 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
525 unsigned HOST_WIDE_INT s1l, s2l;
526 HOST_WIDE_INT s1h, s2h;
528 count %= prec;
529 if (count < 0)
530 count += prec;
532 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
533 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
534 *lv = s1l | s2l;
535 *hv = s1h | s2h;
538 /* Rotate the doubleword integer in L1, H1 left by COUNT places
539 keeping only PREC bits of result. COUNT must be positive.
540 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
542 void
543 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
544 HOST_WIDE_INT count, unsigned int prec,
545 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
547 unsigned HOST_WIDE_INT s1l, s2l;
548 HOST_WIDE_INT s1h, s2h;
550 count %= prec;
551 if (count < 0)
552 count += prec;
554 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
555 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
556 *lv = s1l | s2l;
557 *hv = s1h | s2h;
560 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
561 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
562 CODE is a tree code for a kind of division, one of
563 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
564 or EXACT_DIV_EXPR
565 It controls how the quotient is rounded to an integer.
566 Return nonzero if the operation overflows.
567 UNS nonzero says do unsigned division. */
570 div_and_round_double (enum tree_code code, int uns,
571 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
572 HOST_WIDE_INT hnum_orig,
573 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
574 HOST_WIDE_INT hden_orig,
575 unsigned HOST_WIDE_INT *lquo,
576 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
577 HOST_WIDE_INT *hrem)
579 int quo_neg = 0;
580 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
581 HOST_WIDE_INT den[4], quo[4];
582 int i, j;
583 unsigned HOST_WIDE_INT work;
584 unsigned HOST_WIDE_INT carry = 0;
585 unsigned HOST_WIDE_INT lnum = lnum_orig;
586 HOST_WIDE_INT hnum = hnum_orig;
587 unsigned HOST_WIDE_INT lden = lden_orig;
588 HOST_WIDE_INT hden = hden_orig;
589 int overflow = 0;
591 if (hden == 0 && lden == 0)
592 overflow = 1, lden = 1;
594 /* Calculate quotient sign and convert operands to unsigned. */
595 if (!uns)
597 if (hnum < 0)
599 quo_neg = ~ quo_neg;
600 /* (minimum integer) / (-1) is the only overflow case. */
601 if (neg_double (lnum, hnum, &lnum, &hnum)
602 && ((HOST_WIDE_INT) lden & hden) == -1)
603 overflow = 1;
605 if (hden < 0)
607 quo_neg = ~ quo_neg;
608 neg_double (lden, hden, &lden, &hden);
612 if (hnum == 0 && hden == 0)
613 { /* single precision */
614 *hquo = *hrem = 0;
615 /* This unsigned division rounds toward zero. */
616 *lquo = lnum / lden;
617 goto finish_up;
620 if (hnum == 0)
621 { /* trivial case: dividend < divisor */
622 /* hden != 0 already checked. */
623 *hquo = *lquo = 0;
624 *hrem = hnum;
625 *lrem = lnum;
626 goto finish_up;
629 memset (quo, 0, sizeof quo);
631 memset (num, 0, sizeof num); /* to zero 9th element */
632 memset (den, 0, sizeof den);
634 encode (num, lnum, hnum);
635 encode (den, lden, hden);
637 /* Special code for when the divisor < BASE. */
638 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
640 /* hnum != 0 already checked. */
641 for (i = 4 - 1; i >= 0; i--)
643 work = num[i] + carry * BASE;
644 quo[i] = work / lden;
645 carry = work % lden;
648 else
650 /* Full double precision division,
651 with thanks to Don Knuth's "Seminumerical Algorithms". */
652 int num_hi_sig, den_hi_sig;
653 unsigned HOST_WIDE_INT quo_est, scale;
655 /* Find the highest nonzero divisor digit. */
656 for (i = 4 - 1;; i--)
657 if (den[i] != 0)
659 den_hi_sig = i;
660 break;
663 /* Insure that the first digit of the divisor is at least BASE/2.
664 This is required by the quotient digit estimation algorithm. */
666 scale = BASE / (den[den_hi_sig] + 1);
667 if (scale > 1)
668 { /* scale divisor and dividend */
669 carry = 0;
670 for (i = 0; i <= 4 - 1; i++)
672 work = (num[i] * scale) + carry;
673 num[i] = LOWPART (work);
674 carry = HIGHPART (work);
677 num[4] = carry;
678 carry = 0;
679 for (i = 0; i <= 4 - 1; i++)
681 work = (den[i] * scale) + carry;
682 den[i] = LOWPART (work);
683 carry = HIGHPART (work);
684 if (den[i] != 0) den_hi_sig = i;
688 num_hi_sig = 4;
690 /* Main loop */
691 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
693 /* Guess the next quotient digit, quo_est, by dividing the first
694 two remaining dividend digits by the high order quotient digit.
695 quo_est is never low and is at most 2 high. */
696 unsigned HOST_WIDE_INT tmp;
698 num_hi_sig = i + den_hi_sig + 1;
699 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
700 if (num[num_hi_sig] != den[den_hi_sig])
701 quo_est = work / den[den_hi_sig];
702 else
703 quo_est = BASE - 1;
705 /* Refine quo_est so it's usually correct, and at most one high. */
706 tmp = work - quo_est * den[den_hi_sig];
707 if (tmp < BASE
708 && (den[den_hi_sig - 1] * quo_est
709 > (tmp * BASE + num[num_hi_sig - 2])))
710 quo_est--;
712 /* Try QUO_EST as the quotient digit, by multiplying the
713 divisor by QUO_EST and subtracting from the remaining dividend.
714 Keep in mind that QUO_EST is the I - 1st digit. */
716 carry = 0;
717 for (j = 0; j <= den_hi_sig; j++)
719 work = quo_est * den[j] + carry;
720 carry = HIGHPART (work);
721 work = num[i + j] - LOWPART (work);
722 num[i + j] = LOWPART (work);
723 carry += HIGHPART (work) != 0;
726 /* If quo_est was high by one, then num[i] went negative and
727 we need to correct things. */
728 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
730 quo_est--;
731 carry = 0; /* add divisor back in */
732 for (j = 0; j <= den_hi_sig; j++)
734 work = num[i + j] + den[j] + carry;
735 carry = HIGHPART (work);
736 num[i + j] = LOWPART (work);
739 num [num_hi_sig] += carry;
742 /* Store the quotient digit. */
743 quo[i] = quo_est;
747 decode (quo, lquo, hquo);
749 finish_up:
750 /* If result is negative, make it so. */
751 if (quo_neg)
752 neg_double (*lquo, *hquo, lquo, hquo);
754 /* Compute trial remainder: rem = num - (quo * den) */
755 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
756 neg_double (*lrem, *hrem, lrem, hrem);
757 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
759 switch (code)
761 case TRUNC_DIV_EXPR:
762 case TRUNC_MOD_EXPR: /* round toward zero */
763 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
764 return overflow;
766 case FLOOR_DIV_EXPR:
767 case FLOOR_MOD_EXPR: /* round toward negative infinity */
768 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
770 /* quo = quo - 1; */
771 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
772 lquo, hquo);
774 else
775 return overflow;
776 break;
778 case CEIL_DIV_EXPR:
779 case CEIL_MOD_EXPR: /* round toward positive infinity */
780 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
782 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
783 lquo, hquo);
785 else
786 return overflow;
787 break;
789 case ROUND_DIV_EXPR:
790 case ROUND_MOD_EXPR: /* round to closest integer */
792 unsigned HOST_WIDE_INT labs_rem = *lrem;
793 HOST_WIDE_INT habs_rem = *hrem;
794 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
795 HOST_WIDE_INT habs_den = hden, htwice;
797 /* Get absolute values. */
798 if (*hrem < 0)
799 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
800 if (hden < 0)
801 neg_double (lden, hden, &labs_den, &habs_den);
803 /* If (2 * abs (lrem) >= abs (lden)) */
804 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
805 labs_rem, habs_rem, &ltwice, &htwice);
807 if (((unsigned HOST_WIDE_INT) habs_den
808 < (unsigned HOST_WIDE_INT) htwice)
809 || (((unsigned HOST_WIDE_INT) habs_den
810 == (unsigned HOST_WIDE_INT) htwice)
811 && (labs_den < ltwice)))
813 if (*hquo < 0)
814 /* quo = quo - 1; */
815 add_double (*lquo, *hquo,
816 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
817 else
818 /* quo = quo + 1; */
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
820 lquo, hquo);
822 else
823 return overflow;
825 break;
827 default:
828 gcc_unreachable ();
831 /* Compute true remainder: rem = num - (quo * den) */
832 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
833 neg_double (*lrem, *hrem, lrem, hrem);
834 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
835 return overflow;
838 /* If ARG2 divides ARG1 with zero remainder, carries out the division
839 of type CODE and returns the quotient.
840 Otherwise returns NULL_TREE. */
842 static tree
843 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
845 unsigned HOST_WIDE_INT int1l, int2l;
846 HOST_WIDE_INT int1h, int2h;
847 unsigned HOST_WIDE_INT quol, reml;
848 HOST_WIDE_INT quoh, remh;
849 tree type = TREE_TYPE (arg1);
850 int uns = TYPE_UNSIGNED (type);
852 int1l = TREE_INT_CST_LOW (arg1);
853 int1h = TREE_INT_CST_HIGH (arg1);
854 int2l = TREE_INT_CST_LOW (arg2);
855 int2h = TREE_INT_CST_HIGH (arg2);
857 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
858 &quol, &quoh, &reml, &remh);
859 if (remh != 0 || reml != 0)
860 return NULL_TREE;
862 return build_int_cst_wide (type, quol, quoh);
865 /* Return true if the built-in mathematical function specified by CODE
866 is odd, i.e. -f(x) == f(-x). */
868 static bool
869 negate_mathfn_p (enum built_in_function code)
871 switch (code)
873 CASE_FLT_FN (BUILT_IN_ASIN):
874 CASE_FLT_FN (BUILT_IN_ASINH):
875 CASE_FLT_FN (BUILT_IN_ATAN):
876 CASE_FLT_FN (BUILT_IN_ATANH):
877 CASE_FLT_FN (BUILT_IN_CBRT):
878 CASE_FLT_FN (BUILT_IN_SIN):
879 CASE_FLT_FN (BUILT_IN_SINH):
880 CASE_FLT_FN (BUILT_IN_TAN):
881 CASE_FLT_FN (BUILT_IN_TANH):
882 return true;
884 default:
885 break;
887 return false;
890 /* Check whether we may negate an integer constant T without causing
891 overflow. */
893 bool
894 may_negate_without_overflow_p (tree t)
896 unsigned HOST_WIDE_INT val;
897 unsigned int prec;
898 tree type;
900 gcc_assert (TREE_CODE (t) == INTEGER_CST);
902 type = TREE_TYPE (t);
903 if (TYPE_UNSIGNED (type))
904 return false;
906 prec = TYPE_PRECISION (type);
907 if (prec > HOST_BITS_PER_WIDE_INT)
909 if (TREE_INT_CST_LOW (t) != 0)
910 return true;
911 prec -= HOST_BITS_PER_WIDE_INT;
912 val = TREE_INT_CST_HIGH (t);
914 else
915 val = TREE_INT_CST_LOW (t);
916 if (prec < HOST_BITS_PER_WIDE_INT)
917 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
918 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
921 /* Determine whether an expression T can be cheaply negated using
922 the function negate_expr. */
924 static bool
925 negate_expr_p (tree t)
927 tree type;
929 if (t == 0)
930 return false;
932 type = TREE_TYPE (t);
934 STRIP_SIGN_NOPS (t);
935 switch (TREE_CODE (t))
937 case INTEGER_CST:
938 if (TYPE_UNSIGNED (type) || ! flag_trapv)
939 return true;
941 /* Check that -CST will not overflow type. */
942 return may_negate_without_overflow_p (t);
943 case BIT_NOT_EXPR:
944 return INTEGRAL_TYPE_P (type);
946 case REAL_CST:
947 case NEGATE_EXPR:
948 return true;
950 case COMPLEX_CST:
951 return negate_expr_p (TREE_REALPART (t))
952 && negate_expr_p (TREE_IMAGPART (t));
954 case PLUS_EXPR:
955 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
956 return false;
957 /* -(A + B) -> (-B) - A. */
958 if (negate_expr_p (TREE_OPERAND (t, 1))
959 && reorder_operands_p (TREE_OPERAND (t, 0),
960 TREE_OPERAND (t, 1)))
961 return true;
962 /* -(A + B) -> (-A) - B. */
963 return negate_expr_p (TREE_OPERAND (t, 0));
965 case MINUS_EXPR:
966 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
967 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
968 && reorder_operands_p (TREE_OPERAND (t, 0),
969 TREE_OPERAND (t, 1));
971 case MULT_EXPR:
972 if (TYPE_UNSIGNED (TREE_TYPE (t)))
973 break;
975 /* Fall through. */
977 case RDIV_EXPR:
978 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
979 return negate_expr_p (TREE_OPERAND (t, 1))
980 || negate_expr_p (TREE_OPERAND (t, 0));
981 break;
983 case TRUNC_DIV_EXPR:
984 case ROUND_DIV_EXPR:
985 case FLOOR_DIV_EXPR:
986 case CEIL_DIV_EXPR:
987 case EXACT_DIV_EXPR:
988 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
989 break;
990 return negate_expr_p (TREE_OPERAND (t, 1))
991 || negate_expr_p (TREE_OPERAND (t, 0));
993 case NOP_EXPR:
994 /* Negate -((double)float) as (double)(-float). */
995 if (TREE_CODE (type) == REAL_TYPE)
997 tree tem = strip_float_extensions (t);
998 if (tem != t)
999 return negate_expr_p (tem);
1001 break;
1003 case CALL_EXPR:
1004 /* Negate -f(x) as f(-x). */
1005 if (negate_mathfn_p (builtin_mathfn_code (t)))
1006 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1007 break;
1009 case RSHIFT_EXPR:
1010 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1011 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1013 tree op1 = TREE_OPERAND (t, 1);
1014 if (TREE_INT_CST_HIGH (op1) == 0
1015 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1016 == TREE_INT_CST_LOW (op1))
1017 return true;
1019 break;
1021 default:
1022 break;
1024 return false;
1027 /* Given T, an expression, return the negation of T. Allow for T to be
1028 null, in which case return null. */
1030 static tree
1031 negate_expr (tree t)
1033 tree type;
1034 tree tem;
1036 if (t == 0)
1037 return 0;
1039 type = TREE_TYPE (t);
1040 STRIP_SIGN_NOPS (t);
1042 switch (TREE_CODE (t))
1044 /* Convert - (~A) to A + 1. */
1045 case BIT_NOT_EXPR:
1046 if (INTEGRAL_TYPE_P (type))
1047 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1048 build_int_cst (type, 1));
1049 break;
1051 case INTEGER_CST:
1052 tem = fold_negate_const (t, type);
1053 if (! TREE_OVERFLOW (tem)
1054 || TYPE_UNSIGNED (type)
1055 || ! flag_trapv)
1056 return tem;
1057 break;
1059 case REAL_CST:
1060 tem = fold_negate_const (t, type);
1061 /* Two's complement FP formats, such as c4x, may overflow. */
1062 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1063 return fold_convert (type, tem);
1064 break;
1066 case COMPLEX_CST:
1068 tree rpart = negate_expr (TREE_REALPART (t));
1069 tree ipart = negate_expr (TREE_IMAGPART (t));
1071 if ((TREE_CODE (rpart) == REAL_CST
1072 && TREE_CODE (ipart) == REAL_CST)
1073 || (TREE_CODE (rpart) == INTEGER_CST
1074 && TREE_CODE (ipart) == INTEGER_CST))
1075 return build_complex (type, rpart, ipart);
1077 break;
1079 case NEGATE_EXPR:
1080 return fold_convert (type, TREE_OPERAND (t, 0));
1082 case PLUS_EXPR:
1083 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1085 /* -(A + B) -> (-B) - A. */
1086 if (negate_expr_p (TREE_OPERAND (t, 1))
1087 && reorder_operands_p (TREE_OPERAND (t, 0),
1088 TREE_OPERAND (t, 1)))
1090 tem = negate_expr (TREE_OPERAND (t, 1));
1091 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1092 tem, TREE_OPERAND (t, 0));
1093 return fold_convert (type, tem);
1096 /* -(A + B) -> (-A) - B. */
1097 if (negate_expr_p (TREE_OPERAND (t, 0)))
1099 tem = negate_expr (TREE_OPERAND (t, 0));
1100 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1101 tem, TREE_OPERAND (t, 1));
1102 return fold_convert (type, tem);
1105 break;
1107 case MINUS_EXPR:
1108 /* - (A - B) -> B - A */
1109 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1110 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1111 return fold_convert (type,
1112 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1113 TREE_OPERAND (t, 1),
1114 TREE_OPERAND (t, 0)));
1115 break;
1117 case MULT_EXPR:
1118 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1119 break;
1121 /* Fall through. */
1123 case RDIV_EXPR:
1124 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1126 tem = TREE_OPERAND (t, 1);
1127 if (negate_expr_p (tem))
1128 return fold_convert (type,
1129 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1130 TREE_OPERAND (t, 0),
1131 negate_expr (tem)));
1132 tem = TREE_OPERAND (t, 0);
1133 if (negate_expr_p (tem))
1134 return fold_convert (type,
1135 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1136 negate_expr (tem),
1137 TREE_OPERAND (t, 1)));
1139 break;
1141 case TRUNC_DIV_EXPR:
1142 case ROUND_DIV_EXPR:
1143 case FLOOR_DIV_EXPR:
1144 case CEIL_DIV_EXPR:
1145 case EXACT_DIV_EXPR:
1146 if (!TYPE_UNSIGNED (TREE_TYPE (t)) && !flag_wrapv)
1148 tem = TREE_OPERAND (t, 1);
1149 if (negate_expr_p (tem))
1150 return fold_convert (type,
1151 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1152 TREE_OPERAND (t, 0),
1153 negate_expr (tem)));
1154 tem = TREE_OPERAND (t, 0);
1155 if (negate_expr_p (tem))
1156 return fold_convert (type,
1157 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1158 negate_expr (tem),
1159 TREE_OPERAND (t, 1)));
1161 break;
1163 case NOP_EXPR:
1164 /* Convert -((double)float) into (double)(-float). */
1165 if (TREE_CODE (type) == REAL_TYPE)
1167 tem = strip_float_extensions (t);
1168 if (tem != t && negate_expr_p (tem))
1169 return fold_convert (type, negate_expr (tem));
1171 break;
1173 case CALL_EXPR:
1174 /* Negate -f(x) as f(-x). */
1175 if (negate_mathfn_p (builtin_mathfn_code (t))
1176 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1178 tree fndecl, arg, arglist;
1180 fndecl = get_callee_fndecl (t);
1181 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1182 arglist = build_tree_list (NULL_TREE, arg);
1183 return build_function_call_expr (fndecl, arglist);
1185 break;
1187 case RSHIFT_EXPR:
1188 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1189 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1191 tree op1 = TREE_OPERAND (t, 1);
1192 if (TREE_INT_CST_HIGH (op1) == 0
1193 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1194 == TREE_INT_CST_LOW (op1))
1196 tree ntype = TYPE_UNSIGNED (type)
1197 ? lang_hooks.types.signed_type (type)
1198 : lang_hooks.types.unsigned_type (type);
1199 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1200 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1201 return fold_convert (type, temp);
1204 break;
1206 default:
1207 break;
1210 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1211 return fold_convert (type, tem);
1214 /* Split a tree IN into a constant, literal and variable parts that could be
1215 combined with CODE to make IN. "constant" means an expression with
1216 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1217 commutative arithmetic operation. Store the constant part into *CONP,
1218 the literal in *LITP and return the variable part. If a part isn't
1219 present, set it to null. If the tree does not decompose in this way,
1220 return the entire tree as the variable part and the other parts as null.
1222 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1223 case, we negate an operand that was subtracted. Except if it is a
1224 literal for which we use *MINUS_LITP instead.
1226 If NEGATE_P is true, we are negating all of IN, again except a literal
1227 for which we use *MINUS_LITP instead.
1229 If IN is itself a literal or constant, return it as appropriate.
1231 Note that we do not guarantee that any of the three values will be the
1232 same type as IN, but they will have the same signedness and mode. */
1234 static tree
1235 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1236 tree *minus_litp, int negate_p)
1238 tree var = 0;
1240 *conp = 0;
1241 *litp = 0;
1242 *minus_litp = 0;
1244 /* Strip any conversions that don't change the machine mode or signedness. */
1245 STRIP_SIGN_NOPS (in);
1247 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1248 *litp = in;
1249 else if (TREE_CODE (in) == code
1250 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1251 /* We can associate addition and subtraction together (even
1252 though the C standard doesn't say so) for integers because
1253 the value is not affected. For reals, the value might be
1254 affected, so we can't. */
1255 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1256 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1258 tree op0 = TREE_OPERAND (in, 0);
1259 tree op1 = TREE_OPERAND (in, 1);
1260 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1261 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1263 /* First see if either of the operands is a literal, then a constant. */
1264 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1265 *litp = op0, op0 = 0;
1266 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1267 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1269 if (op0 != 0 && TREE_CONSTANT (op0))
1270 *conp = op0, op0 = 0;
1271 else if (op1 != 0 && TREE_CONSTANT (op1))
1272 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1274 /* If we haven't dealt with either operand, this is not a case we can
1275 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1276 if (op0 != 0 && op1 != 0)
1277 var = in;
1278 else if (op0 != 0)
1279 var = op0;
1280 else
1281 var = op1, neg_var_p = neg1_p;
1283 /* Now do any needed negations. */
1284 if (neg_litp_p)
1285 *minus_litp = *litp, *litp = 0;
1286 if (neg_conp_p)
1287 *conp = negate_expr (*conp);
1288 if (neg_var_p)
1289 var = negate_expr (var);
1291 else if (TREE_CONSTANT (in))
1292 *conp = in;
1293 else
1294 var = in;
1296 if (negate_p)
1298 if (*litp)
1299 *minus_litp = *litp, *litp = 0;
1300 else if (*minus_litp)
1301 *litp = *minus_litp, *minus_litp = 0;
1302 *conp = negate_expr (*conp);
1303 var = negate_expr (var);
1306 return var;
1309 /* Re-associate trees split by the above function. T1 and T2 are either
1310 expressions to associate or null. Return the new expression, if any. If
1311 we build an operation, do it in TYPE and with CODE. */
1313 static tree
1314 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1316 if (t1 == 0)
1317 return t2;
1318 else if (t2 == 0)
1319 return t1;
1321 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1322 try to fold this since we will have infinite recursion. But do
1323 deal with any NEGATE_EXPRs. */
1324 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1325 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1327 if (code == PLUS_EXPR)
1329 if (TREE_CODE (t1) == NEGATE_EXPR)
1330 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1331 fold_convert (type, TREE_OPERAND (t1, 0)));
1332 else if (TREE_CODE (t2) == NEGATE_EXPR)
1333 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1334 fold_convert (type, TREE_OPERAND (t2, 0)));
1335 else if (integer_zerop (t2))
1336 return fold_convert (type, t1);
1338 else if (code == MINUS_EXPR)
1340 if (integer_zerop (t2))
1341 return fold_convert (type, t1);
1344 return build2 (code, type, fold_convert (type, t1),
1345 fold_convert (type, t2));
1348 return fold_build2 (code, type, fold_convert (type, t1),
1349 fold_convert (type, t2));
1352 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1353 to produce a new constant. Return NULL_TREE if we don't know how
1354 to evaluate CODE at compile-time.
1356 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1358 tree
1359 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1361 unsigned HOST_WIDE_INT int1l, int2l;
1362 HOST_WIDE_INT int1h, int2h;
1363 unsigned HOST_WIDE_INT low;
1364 HOST_WIDE_INT hi;
1365 unsigned HOST_WIDE_INT garbagel;
1366 HOST_WIDE_INT garbageh;
1367 tree t;
1368 tree type = TREE_TYPE (arg1);
1369 int uns = TYPE_UNSIGNED (type);
1370 int is_sizetype
1371 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1372 int overflow = 0;
1374 int1l = TREE_INT_CST_LOW (arg1);
1375 int1h = TREE_INT_CST_HIGH (arg1);
1376 int2l = TREE_INT_CST_LOW (arg2);
1377 int2h = TREE_INT_CST_HIGH (arg2);
1379 switch (code)
1381 case BIT_IOR_EXPR:
1382 low = int1l | int2l, hi = int1h | int2h;
1383 break;
1385 case BIT_XOR_EXPR:
1386 low = int1l ^ int2l, hi = int1h ^ int2h;
1387 break;
1389 case BIT_AND_EXPR:
1390 low = int1l & int2l, hi = int1h & int2h;
1391 break;
1393 case RSHIFT_EXPR:
1394 int2l = -int2l;
1395 case LSHIFT_EXPR:
1396 /* It's unclear from the C standard whether shifts can overflow.
1397 The following code ignores overflow; perhaps a C standard
1398 interpretation ruling is needed. */
1399 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1400 &low, &hi, !uns);
1401 break;
1403 case RROTATE_EXPR:
1404 int2l = - int2l;
1405 case LROTATE_EXPR:
1406 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1407 &low, &hi);
1408 break;
1410 case PLUS_EXPR:
1411 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1412 break;
1414 case MINUS_EXPR:
1415 neg_double (int2l, int2h, &low, &hi);
1416 add_double (int1l, int1h, low, hi, &low, &hi);
1417 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1418 break;
1420 case MULT_EXPR:
1421 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1422 break;
1424 case TRUNC_DIV_EXPR:
1425 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1426 case EXACT_DIV_EXPR:
1427 /* This is a shortcut for a common special case. */
1428 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1429 && ! TREE_CONSTANT_OVERFLOW (arg1)
1430 && ! TREE_CONSTANT_OVERFLOW (arg2)
1431 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1433 if (code == CEIL_DIV_EXPR)
1434 int1l += int2l - 1;
1436 low = int1l / int2l, hi = 0;
1437 break;
1440 /* ... fall through ... */
1442 case ROUND_DIV_EXPR:
1443 if (int2h == 0 && int2l == 0)
1444 return NULL_TREE;
1445 if (int2h == 0 && int2l == 1)
1447 low = int1l, hi = int1h;
1448 break;
1450 if (int1l == int2l && int1h == int2h
1451 && ! (int1l == 0 && int1h == 0))
1453 low = 1, hi = 0;
1454 break;
1456 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1457 &low, &hi, &garbagel, &garbageh);
1458 break;
1460 case TRUNC_MOD_EXPR:
1461 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1462 /* This is a shortcut for a common special case. */
1463 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1464 && ! TREE_CONSTANT_OVERFLOW (arg1)
1465 && ! TREE_CONSTANT_OVERFLOW (arg2)
1466 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1468 if (code == CEIL_MOD_EXPR)
1469 int1l += int2l - 1;
1470 low = int1l % int2l, hi = 0;
1471 break;
1474 /* ... fall through ... */
1476 case ROUND_MOD_EXPR:
1477 if (int2h == 0 && int2l == 0)
1478 return NULL_TREE;
1479 overflow = div_and_round_double (code, uns,
1480 int1l, int1h, int2l, int2h,
1481 &garbagel, &garbageh, &low, &hi);
1482 break;
1484 case MIN_EXPR:
1485 case MAX_EXPR:
1486 if (uns)
1487 low = (((unsigned HOST_WIDE_INT) int1h
1488 < (unsigned HOST_WIDE_INT) int2h)
1489 || (((unsigned HOST_WIDE_INT) int1h
1490 == (unsigned HOST_WIDE_INT) int2h)
1491 && int1l < int2l));
1492 else
1493 low = (int1h < int2h
1494 || (int1h == int2h && int1l < int2l));
1496 if (low == (code == MIN_EXPR))
1497 low = int1l, hi = int1h;
1498 else
1499 low = int2l, hi = int2h;
1500 break;
1502 default:
1503 return NULL_TREE;
1506 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1508 if (notrunc)
1510 /* Propagate overflow flags ourselves. */
1511 if (((!uns || is_sizetype) && overflow)
1512 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1514 t = copy_node (t);
1515 TREE_OVERFLOW (t) = 1;
1516 TREE_CONSTANT_OVERFLOW (t) = 1;
1518 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1520 t = copy_node (t);
1521 TREE_CONSTANT_OVERFLOW (t) = 1;
1524 else
1525 t = force_fit_type (t, 1,
1526 ((!uns || is_sizetype) && overflow)
1527 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1528 TREE_CONSTANT_OVERFLOW (arg1)
1529 | TREE_CONSTANT_OVERFLOW (arg2));
1531 return t;
1534 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1535 constant. We assume ARG1 and ARG2 have the same data type, or at least
1536 are the same kind of constant and the same machine mode.
1538 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1540 static tree
1541 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1543 STRIP_NOPS (arg1);
1544 STRIP_NOPS (arg2);
1546 if (TREE_CODE (arg1) == INTEGER_CST)
1547 return int_const_binop (code, arg1, arg2, notrunc);
1549 if (TREE_CODE (arg1) == REAL_CST)
1551 enum machine_mode mode;
1552 REAL_VALUE_TYPE d1;
1553 REAL_VALUE_TYPE d2;
1554 REAL_VALUE_TYPE value;
1555 REAL_VALUE_TYPE result;
1556 bool inexact;
1557 tree t, type;
1559 /* The following codes are handled by real_arithmetic. */
1560 switch (code)
1562 case PLUS_EXPR:
1563 case MINUS_EXPR:
1564 case MULT_EXPR:
1565 case RDIV_EXPR:
1566 case MIN_EXPR:
1567 case MAX_EXPR:
1568 break;
1570 default:
1571 return NULL_TREE;
1574 d1 = TREE_REAL_CST (arg1);
1575 d2 = TREE_REAL_CST (arg2);
1577 type = TREE_TYPE (arg1);
1578 mode = TYPE_MODE (type);
1580 /* Don't perform operation if we honor signaling NaNs and
1581 either operand is a NaN. */
1582 if (HONOR_SNANS (mode)
1583 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1584 return NULL_TREE;
1586 /* Don't perform operation if it would raise a division
1587 by zero exception. */
1588 if (code == RDIV_EXPR
1589 && REAL_VALUES_EQUAL (d2, dconst0)
1590 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1591 return NULL_TREE;
1593 /* If either operand is a NaN, just return it. Otherwise, set up
1594 for floating-point trap; we return an overflow. */
1595 if (REAL_VALUE_ISNAN (d1))
1596 return arg1;
1597 else if (REAL_VALUE_ISNAN (d2))
1598 return arg2;
1600 inexact = real_arithmetic (&value, code, &d1, &d2);
1601 real_convert (&result, mode, &value);
1603 /* Don't constant fold this floating point operation if
1604 the result has overflowed and flag_trapping_math. */
1606 if (flag_trapping_math
1607 && MODE_HAS_INFINITIES (mode)
1608 && REAL_VALUE_ISINF (result)
1609 && !REAL_VALUE_ISINF (d1)
1610 && !REAL_VALUE_ISINF (d2))
1611 return NULL_TREE;
1613 /* Don't constant fold this floating point operation if the
1614 result may dependent upon the run-time rounding mode and
1615 flag_rounding_math is set, or if GCC's software emulation
1616 is unable to accurately represent the result. */
1618 if ((flag_rounding_math
1619 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1620 && !flag_unsafe_math_optimizations))
1621 && (inexact || !real_identical (&result, &value)))
1622 return NULL_TREE;
1624 t = build_real (type, result);
1626 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1627 TREE_CONSTANT_OVERFLOW (t)
1628 = TREE_OVERFLOW (t)
1629 | TREE_CONSTANT_OVERFLOW (arg1)
1630 | TREE_CONSTANT_OVERFLOW (arg2);
1631 return t;
1634 if (TREE_CODE (arg1) == COMPLEX_CST)
1636 tree type = TREE_TYPE (arg1);
1637 tree r1 = TREE_REALPART (arg1);
1638 tree i1 = TREE_IMAGPART (arg1);
1639 tree r2 = TREE_REALPART (arg2);
1640 tree i2 = TREE_IMAGPART (arg2);
1641 tree t;
1643 switch (code)
1645 case PLUS_EXPR:
1646 t = build_complex (type,
1647 const_binop (PLUS_EXPR, r1, r2, notrunc),
1648 const_binop (PLUS_EXPR, i1, i2, notrunc));
1649 break;
1651 case MINUS_EXPR:
1652 t = build_complex (type,
1653 const_binop (MINUS_EXPR, r1, r2, notrunc),
1654 const_binop (MINUS_EXPR, i1, i2, notrunc));
1655 break;
1657 case MULT_EXPR:
1658 t = build_complex (type,
1659 const_binop (MINUS_EXPR,
1660 const_binop (MULT_EXPR,
1661 r1, r2, notrunc),
1662 const_binop (MULT_EXPR,
1663 i1, i2, notrunc),
1664 notrunc),
1665 const_binop (PLUS_EXPR,
1666 const_binop (MULT_EXPR,
1667 r1, i2, notrunc),
1668 const_binop (MULT_EXPR,
1669 i1, r2, notrunc),
1670 notrunc));
1671 break;
1673 case RDIV_EXPR:
1675 tree t1, t2, real, imag;
1676 tree magsquared
1677 = const_binop (PLUS_EXPR,
1678 const_binop (MULT_EXPR, r2, r2, notrunc),
1679 const_binop (MULT_EXPR, i2, i2, notrunc),
1680 notrunc);
1682 t1 = const_binop (PLUS_EXPR,
1683 const_binop (MULT_EXPR, r1, r2, notrunc),
1684 const_binop (MULT_EXPR, i1, i2, notrunc),
1685 notrunc);
1686 t2 = const_binop (MINUS_EXPR,
1687 const_binop (MULT_EXPR, i1, r2, notrunc),
1688 const_binop (MULT_EXPR, r1, i2, notrunc),
1689 notrunc);
1691 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1693 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1694 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1696 else
1698 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1699 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1700 if (!real || !imag)
1701 return NULL_TREE;
1704 t = build_complex (type, real, imag);
1706 break;
1708 default:
1709 return NULL_TREE;
1711 return t;
1713 return NULL_TREE;
1716 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1717 indicates which particular sizetype to create. */
1719 tree
1720 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1722 return build_int_cst (sizetype_tab[(int) kind], number);
1725 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1726 is a tree code. The type of the result is taken from the operands.
1727 Both must be the same type integer type and it must be a size type.
1728 If the operands are constant, so is the result. */
1730 tree
1731 size_binop (enum tree_code code, tree arg0, tree arg1)
1733 tree type = TREE_TYPE (arg0);
1735 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1736 && type == TREE_TYPE (arg1));
1738 /* Handle the special case of two integer constants faster. */
1739 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1741 /* And some specific cases even faster than that. */
1742 if (code == PLUS_EXPR && integer_zerop (arg0))
1743 return arg1;
1744 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1745 && integer_zerop (arg1))
1746 return arg0;
1747 else if (code == MULT_EXPR && integer_onep (arg0))
1748 return arg1;
1750 /* Handle general case of two integer constants. */
1751 return int_const_binop (code, arg0, arg1, 0);
1754 if (arg0 == error_mark_node || arg1 == error_mark_node)
1755 return error_mark_node;
1757 return fold_build2 (code, type, arg0, arg1);
1760 /* Given two values, either both of sizetype or both of bitsizetype,
1761 compute the difference between the two values. Return the value
1762 in signed type corresponding to the type of the operands. */
1764 tree
1765 size_diffop (tree arg0, tree arg1)
1767 tree type = TREE_TYPE (arg0);
1768 tree ctype;
1770 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1771 && type == TREE_TYPE (arg1));
1773 /* If the type is already signed, just do the simple thing. */
1774 if (!TYPE_UNSIGNED (type))
1775 return size_binop (MINUS_EXPR, arg0, arg1);
1777 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1779 /* If either operand is not a constant, do the conversions to the signed
1780 type and subtract. The hardware will do the right thing with any
1781 overflow in the subtraction. */
1782 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1783 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1784 fold_convert (ctype, arg1));
1786 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1787 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1788 overflow) and negate (which can't either). Special-case a result
1789 of zero while we're here. */
1790 if (tree_int_cst_equal (arg0, arg1))
1791 return build_int_cst (ctype, 0);
1792 else if (tree_int_cst_lt (arg1, arg0))
1793 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1794 else
1795 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1796 fold_convert (ctype, size_binop (MINUS_EXPR,
1797 arg1, arg0)));
1800 /* A subroutine of fold_convert_const handling conversions of an
1801 INTEGER_CST to another integer type. */
1803 static tree
1804 fold_convert_const_int_from_int (tree type, tree arg1)
1806 tree t;
1808 /* Given an integer constant, make new constant with new type,
1809 appropriately sign-extended or truncated. */
1810 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1811 TREE_INT_CST_HIGH (arg1));
1813 t = force_fit_type (t,
1814 /* Don't set the overflow when
1815 converting a pointer */
1816 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1817 (TREE_INT_CST_HIGH (arg1) < 0
1818 && (TYPE_UNSIGNED (type)
1819 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1820 | TREE_OVERFLOW (arg1),
1821 TREE_CONSTANT_OVERFLOW (arg1));
1823 return t;
1826 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1827 to an integer type. */
1829 static tree
1830 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1832 int overflow = 0;
1833 tree t;
1835 /* The following code implements the floating point to integer
1836 conversion rules required by the Java Language Specification,
1837 that IEEE NaNs are mapped to zero and values that overflow
1838 the target precision saturate, i.e. values greater than
1839 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1840 are mapped to INT_MIN. These semantics are allowed by the
1841 C and C++ standards that simply state that the behavior of
1842 FP-to-integer conversion is unspecified upon overflow. */
1844 HOST_WIDE_INT high, low;
1845 REAL_VALUE_TYPE r;
1846 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1848 switch (code)
1850 case FIX_TRUNC_EXPR:
1851 real_trunc (&r, VOIDmode, &x);
1852 break;
1854 case FIX_CEIL_EXPR:
1855 real_ceil (&r, VOIDmode, &x);
1856 break;
1858 case FIX_FLOOR_EXPR:
1859 real_floor (&r, VOIDmode, &x);
1860 break;
1862 case FIX_ROUND_EXPR:
1863 real_round (&r, VOIDmode, &x);
1864 break;
1866 default:
1867 gcc_unreachable ();
1870 /* If R is NaN, return zero and show we have an overflow. */
1871 if (REAL_VALUE_ISNAN (r))
1873 overflow = 1;
1874 high = 0;
1875 low = 0;
1878 /* See if R is less than the lower bound or greater than the
1879 upper bound. */
1881 if (! overflow)
1883 tree lt = TYPE_MIN_VALUE (type);
1884 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1885 if (REAL_VALUES_LESS (r, l))
1887 overflow = 1;
1888 high = TREE_INT_CST_HIGH (lt);
1889 low = TREE_INT_CST_LOW (lt);
1893 if (! overflow)
1895 tree ut = TYPE_MAX_VALUE (type);
1896 if (ut)
1898 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1899 if (REAL_VALUES_LESS (u, r))
1901 overflow = 1;
1902 high = TREE_INT_CST_HIGH (ut);
1903 low = TREE_INT_CST_LOW (ut);
1908 if (! overflow)
1909 REAL_VALUE_TO_INT (&low, &high, r);
1911 t = build_int_cst_wide (type, low, high);
1913 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1914 TREE_CONSTANT_OVERFLOW (arg1));
1915 return t;
1918 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1919 to another floating point type. */
1921 static tree
1922 fold_convert_const_real_from_real (tree type, tree arg1)
1924 REAL_VALUE_TYPE value;
1925 tree t;
1927 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1928 t = build_real (type, value);
1930 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1931 TREE_CONSTANT_OVERFLOW (t)
1932 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1933 return t;
1936 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1937 type TYPE. If no simplification can be done return NULL_TREE. */
1939 static tree
1940 fold_convert_const (enum tree_code code, tree type, tree arg1)
1942 if (TREE_TYPE (arg1) == type)
1943 return arg1;
1945 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1947 if (TREE_CODE (arg1) == INTEGER_CST)
1948 return fold_convert_const_int_from_int (type, arg1);
1949 else if (TREE_CODE (arg1) == REAL_CST)
1950 return fold_convert_const_int_from_real (code, type, arg1);
1952 else if (TREE_CODE (type) == REAL_TYPE)
1954 if (TREE_CODE (arg1) == INTEGER_CST)
1955 return build_real_from_int_cst (type, arg1);
1956 if (TREE_CODE (arg1) == REAL_CST)
1957 return fold_convert_const_real_from_real (type, arg1);
1959 return NULL_TREE;
1962 /* Construct a vector of zero elements of vector type TYPE. */
1964 static tree
1965 build_zero_vector (tree type)
1967 tree elem, list;
1968 int i, units;
1970 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1971 units = TYPE_VECTOR_SUBPARTS (type);
1973 list = NULL_TREE;
1974 for (i = 0; i < units; i++)
1975 list = tree_cons (NULL_TREE, elem, list);
1976 return build_vector (type, list);
1979 /* Convert expression ARG to type TYPE. Used by the middle-end for
1980 simple conversions in preference to calling the front-end's convert. */
1982 tree
1983 fold_convert (tree type, tree arg)
1985 tree orig = TREE_TYPE (arg);
1986 tree tem;
1988 if (type == orig)
1989 return arg;
1991 if (TREE_CODE (arg) == ERROR_MARK
1992 || TREE_CODE (type) == ERROR_MARK
1993 || TREE_CODE (orig) == ERROR_MARK)
1994 return error_mark_node;
1996 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1997 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1998 TYPE_MAIN_VARIANT (orig)))
1999 return fold_build1 (NOP_EXPR, type, arg);
2001 switch (TREE_CODE (type))
2003 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2004 case POINTER_TYPE: case REFERENCE_TYPE:
2005 case OFFSET_TYPE:
2006 if (TREE_CODE (arg) == INTEGER_CST)
2008 tem = fold_convert_const (NOP_EXPR, type, arg);
2009 if (tem != NULL_TREE)
2010 return tem;
2012 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2013 || TREE_CODE (orig) == OFFSET_TYPE)
2014 return fold_build1 (NOP_EXPR, type, arg);
2015 if (TREE_CODE (orig) == COMPLEX_TYPE)
2017 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2018 return fold_convert (type, tem);
2020 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2021 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2022 return fold_build1 (NOP_EXPR, type, arg);
2024 case REAL_TYPE:
2025 if (TREE_CODE (arg) == INTEGER_CST)
2027 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2028 if (tem != NULL_TREE)
2029 return tem;
2031 else if (TREE_CODE (arg) == REAL_CST)
2033 tem = fold_convert_const (NOP_EXPR, type, arg);
2034 if (tem != NULL_TREE)
2035 return tem;
2038 switch (TREE_CODE (orig))
2040 case INTEGER_TYPE:
2041 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2042 case POINTER_TYPE: case REFERENCE_TYPE:
2043 return fold_build1 (FLOAT_EXPR, type, arg);
2045 case REAL_TYPE:
2046 return fold_build1 (NOP_EXPR, type, arg);
2048 case COMPLEX_TYPE:
2049 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2050 return fold_convert (type, tem);
2052 default:
2053 gcc_unreachable ();
2056 case COMPLEX_TYPE:
2057 switch (TREE_CODE (orig))
2059 case INTEGER_TYPE:
2060 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2061 case POINTER_TYPE: case REFERENCE_TYPE:
2062 case REAL_TYPE:
2063 return build2 (COMPLEX_EXPR, type,
2064 fold_convert (TREE_TYPE (type), arg),
2065 fold_convert (TREE_TYPE (type), integer_zero_node));
2066 case COMPLEX_TYPE:
2068 tree rpart, ipart;
2070 if (TREE_CODE (arg) == COMPLEX_EXPR)
2072 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2073 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2074 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2077 arg = save_expr (arg);
2078 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2079 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2080 rpart = fold_convert (TREE_TYPE (type), rpart);
2081 ipart = fold_convert (TREE_TYPE (type), ipart);
2082 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2085 default:
2086 gcc_unreachable ();
2089 case VECTOR_TYPE:
2090 if (integer_zerop (arg))
2091 return build_zero_vector (type);
2092 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2093 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2094 || TREE_CODE (orig) == VECTOR_TYPE);
2095 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2097 case VOID_TYPE:
2098 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2100 default:
2101 gcc_unreachable ();
2105 /* Return false if expr can be assumed not to be an lvalue, true
2106 otherwise. */
2108 static bool
2109 maybe_lvalue_p (tree x)
2111 /* We only need to wrap lvalue tree codes. */
2112 switch (TREE_CODE (x))
2114 case VAR_DECL:
2115 case PARM_DECL:
2116 case RESULT_DECL:
2117 case LABEL_DECL:
2118 case FUNCTION_DECL:
2119 case SSA_NAME:
2121 case COMPONENT_REF:
2122 case INDIRECT_REF:
2123 case ALIGN_INDIRECT_REF:
2124 case MISALIGNED_INDIRECT_REF:
2125 case ARRAY_REF:
2126 case ARRAY_RANGE_REF:
2127 case BIT_FIELD_REF:
2128 case OBJ_TYPE_REF:
2130 case REALPART_EXPR:
2131 case IMAGPART_EXPR:
2132 case PREINCREMENT_EXPR:
2133 case PREDECREMENT_EXPR:
2134 case SAVE_EXPR:
2135 case TRY_CATCH_EXPR:
2136 case WITH_CLEANUP_EXPR:
2137 case COMPOUND_EXPR:
2138 case MODIFY_EXPR:
2139 case TARGET_EXPR:
2140 case COND_EXPR:
2141 case BIND_EXPR:
2142 case MIN_EXPR:
2143 case MAX_EXPR:
2144 break;
2146 default:
2147 /* Assume the worst for front-end tree codes. */
2148 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2149 break;
2150 return false;
2153 return true;
2156 /* Return an expr equal to X but certainly not valid as an lvalue. */
2158 tree
2159 non_lvalue (tree x)
2161 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2162 us. */
2163 if (in_gimple_form)
2164 return x;
2166 if (! maybe_lvalue_p (x))
2167 return x;
2168 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2171 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2172 Zero means allow extended lvalues. */
2174 int pedantic_lvalues;
2176 /* When pedantic, return an expr equal to X but certainly not valid as a
2177 pedantic lvalue. Otherwise, return X. */
2179 static tree
2180 pedantic_non_lvalue (tree x)
2182 if (pedantic_lvalues)
2183 return non_lvalue (x);
2184 else
2185 return x;
2188 /* Given a tree comparison code, return the code that is the logical inverse
2189 of the given code. It is not safe to do this for floating-point
2190 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2191 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2193 enum tree_code
2194 invert_tree_comparison (enum tree_code code, bool honor_nans)
2196 if (honor_nans && flag_trapping_math)
2197 return ERROR_MARK;
2199 switch (code)
2201 case EQ_EXPR:
2202 return NE_EXPR;
2203 case NE_EXPR:
2204 return EQ_EXPR;
2205 case GT_EXPR:
2206 return honor_nans ? UNLE_EXPR : LE_EXPR;
2207 case GE_EXPR:
2208 return honor_nans ? UNLT_EXPR : LT_EXPR;
2209 case LT_EXPR:
2210 return honor_nans ? UNGE_EXPR : GE_EXPR;
2211 case LE_EXPR:
2212 return honor_nans ? UNGT_EXPR : GT_EXPR;
2213 case LTGT_EXPR:
2214 return UNEQ_EXPR;
2215 case UNEQ_EXPR:
2216 return LTGT_EXPR;
2217 case UNGT_EXPR:
2218 return LE_EXPR;
2219 case UNGE_EXPR:
2220 return LT_EXPR;
2221 case UNLT_EXPR:
2222 return GE_EXPR;
2223 case UNLE_EXPR:
2224 return GT_EXPR;
2225 case ORDERED_EXPR:
2226 return UNORDERED_EXPR;
2227 case UNORDERED_EXPR:
2228 return ORDERED_EXPR;
2229 default:
2230 gcc_unreachable ();
2234 /* Similar, but return the comparison that results if the operands are
2235 swapped. This is safe for floating-point. */
2237 enum tree_code
2238 swap_tree_comparison (enum tree_code code)
2240 switch (code)
2242 case EQ_EXPR:
2243 case NE_EXPR:
2244 case ORDERED_EXPR:
2245 case UNORDERED_EXPR:
2246 case LTGT_EXPR:
2247 case UNEQ_EXPR:
2248 return code;
2249 case GT_EXPR:
2250 return LT_EXPR;
2251 case GE_EXPR:
2252 return LE_EXPR;
2253 case LT_EXPR:
2254 return GT_EXPR;
2255 case LE_EXPR:
2256 return GE_EXPR;
2257 case UNGT_EXPR:
2258 return UNLT_EXPR;
2259 case UNGE_EXPR:
2260 return UNLE_EXPR;
2261 case UNLT_EXPR:
2262 return UNGT_EXPR;
2263 case UNLE_EXPR:
2264 return UNGE_EXPR;
2265 default:
2266 gcc_unreachable ();
2271 /* Convert a comparison tree code from an enum tree_code representation
2272 into a compcode bit-based encoding. This function is the inverse of
2273 compcode_to_comparison. */
2275 static enum comparison_code
2276 comparison_to_compcode (enum tree_code code)
2278 switch (code)
2280 case LT_EXPR:
2281 return COMPCODE_LT;
2282 case EQ_EXPR:
2283 return COMPCODE_EQ;
2284 case LE_EXPR:
2285 return COMPCODE_LE;
2286 case GT_EXPR:
2287 return COMPCODE_GT;
2288 case NE_EXPR:
2289 return COMPCODE_NE;
2290 case GE_EXPR:
2291 return COMPCODE_GE;
2292 case ORDERED_EXPR:
2293 return COMPCODE_ORD;
2294 case UNORDERED_EXPR:
2295 return COMPCODE_UNORD;
2296 case UNLT_EXPR:
2297 return COMPCODE_UNLT;
2298 case UNEQ_EXPR:
2299 return COMPCODE_UNEQ;
2300 case UNLE_EXPR:
2301 return COMPCODE_UNLE;
2302 case UNGT_EXPR:
2303 return COMPCODE_UNGT;
2304 case LTGT_EXPR:
2305 return COMPCODE_LTGT;
2306 case UNGE_EXPR:
2307 return COMPCODE_UNGE;
2308 default:
2309 gcc_unreachable ();
2313 /* Convert a compcode bit-based encoding of a comparison operator back
2314 to GCC's enum tree_code representation. This function is the
2315 inverse of comparison_to_compcode. */
2317 static enum tree_code
2318 compcode_to_comparison (enum comparison_code code)
2320 switch (code)
2322 case COMPCODE_LT:
2323 return LT_EXPR;
2324 case COMPCODE_EQ:
2325 return EQ_EXPR;
2326 case COMPCODE_LE:
2327 return LE_EXPR;
2328 case COMPCODE_GT:
2329 return GT_EXPR;
2330 case COMPCODE_NE:
2331 return NE_EXPR;
2332 case COMPCODE_GE:
2333 return GE_EXPR;
2334 case COMPCODE_ORD:
2335 return ORDERED_EXPR;
2336 case COMPCODE_UNORD:
2337 return UNORDERED_EXPR;
2338 case COMPCODE_UNLT:
2339 return UNLT_EXPR;
2340 case COMPCODE_UNEQ:
2341 return UNEQ_EXPR;
2342 case COMPCODE_UNLE:
2343 return UNLE_EXPR;
2344 case COMPCODE_UNGT:
2345 return UNGT_EXPR;
2346 case COMPCODE_LTGT:
2347 return LTGT_EXPR;
2348 case COMPCODE_UNGE:
2349 return UNGE_EXPR;
2350 default:
2351 gcc_unreachable ();
2355 /* Return a tree for the comparison which is the combination of
2356 doing the AND or OR (depending on CODE) of the two operations LCODE
2357 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2358 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2359 if this makes the transformation invalid. */
2361 tree
2362 combine_comparisons (enum tree_code code, enum tree_code lcode,
2363 enum tree_code rcode, tree truth_type,
2364 tree ll_arg, tree lr_arg)
2366 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2367 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2368 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2369 enum comparison_code compcode;
2371 switch (code)
2373 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2374 compcode = lcompcode & rcompcode;
2375 break;
2377 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2378 compcode = lcompcode | rcompcode;
2379 break;
2381 default:
2382 return NULL_TREE;
2385 if (!honor_nans)
2387 /* Eliminate unordered comparisons, as well as LTGT and ORD
2388 which are not used unless the mode has NaNs. */
2389 compcode &= ~COMPCODE_UNORD;
2390 if (compcode == COMPCODE_LTGT)
2391 compcode = COMPCODE_NE;
2392 else if (compcode == COMPCODE_ORD)
2393 compcode = COMPCODE_TRUE;
2395 else if (flag_trapping_math)
2397 /* Check that the original operation and the optimized ones will trap
2398 under the same condition. */
2399 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2400 && (lcompcode != COMPCODE_EQ)
2401 && (lcompcode != COMPCODE_ORD);
2402 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2403 && (rcompcode != COMPCODE_EQ)
2404 && (rcompcode != COMPCODE_ORD);
2405 bool trap = (compcode & COMPCODE_UNORD) == 0
2406 && (compcode != COMPCODE_EQ)
2407 && (compcode != COMPCODE_ORD);
2409 /* In a short-circuited boolean expression the LHS might be
2410 such that the RHS, if evaluated, will never trap. For
2411 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2412 if neither x nor y is NaN. (This is a mixed blessing: for
2413 example, the expression above will never trap, hence
2414 optimizing it to x < y would be invalid). */
2415 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2416 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2417 rtrap = false;
2419 /* If the comparison was short-circuited, and only the RHS
2420 trapped, we may now generate a spurious trap. */
2421 if (rtrap && !ltrap
2422 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2423 return NULL_TREE;
2425 /* If we changed the conditions that cause a trap, we lose. */
2426 if ((ltrap || rtrap) != trap)
2427 return NULL_TREE;
2430 if (compcode == COMPCODE_TRUE)
2431 return constant_boolean_node (true, truth_type);
2432 else if (compcode == COMPCODE_FALSE)
2433 return constant_boolean_node (false, truth_type);
2434 else
2435 return fold_build2 (compcode_to_comparison (compcode),
2436 truth_type, ll_arg, lr_arg);
2439 /* Return nonzero if CODE is a tree code that represents a truth value. */
2441 static int
2442 truth_value_p (enum tree_code code)
2444 return (TREE_CODE_CLASS (code) == tcc_comparison
2445 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2446 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2447 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2450 /* Return nonzero if two operands (typically of the same tree node)
2451 are necessarily equal. If either argument has side-effects this
2452 function returns zero. FLAGS modifies behavior as follows:
2454 If OEP_ONLY_CONST is set, only return nonzero for constants.
2455 This function tests whether the operands are indistinguishable;
2456 it does not test whether they are equal using C's == operation.
2457 The distinction is important for IEEE floating point, because
2458 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2459 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2461 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2462 even though it may hold multiple values during a function.
2463 This is because a GCC tree node guarantees that nothing else is
2464 executed between the evaluation of its "operands" (which may often
2465 be evaluated in arbitrary order). Hence if the operands themselves
2466 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2467 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2468 unset means assuming isochronic (or instantaneous) tree equivalence.
2469 Unless comparing arbitrary expression trees, such as from different
2470 statements, this flag can usually be left unset.
2472 If OEP_PURE_SAME is set, then pure functions with identical arguments
2473 are considered the same. It is used when the caller has other ways
2474 to ensure that global memory is unchanged in between. */
2477 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2479 /* If either is ERROR_MARK, they aren't equal. */
2480 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2481 return 0;
2483 /* If both types don't have the same signedness, then we can't consider
2484 them equal. We must check this before the STRIP_NOPS calls
2485 because they may change the signedness of the arguments. */
2486 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2487 return 0;
2489 STRIP_NOPS (arg0);
2490 STRIP_NOPS (arg1);
2492 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2493 /* This is needed for conversions and for COMPONENT_REF.
2494 Might as well play it safe and always test this. */
2495 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2496 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2497 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2498 return 0;
2500 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2501 We don't care about side effects in that case because the SAVE_EXPR
2502 takes care of that for us. In all other cases, two expressions are
2503 equal if they have no side effects. If we have two identical
2504 expressions with side effects that should be treated the same due
2505 to the only side effects being identical SAVE_EXPR's, that will
2506 be detected in the recursive calls below. */
2507 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2508 && (TREE_CODE (arg0) == SAVE_EXPR
2509 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2510 return 1;
2512 /* Next handle constant cases, those for which we can return 1 even
2513 if ONLY_CONST is set. */
2514 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2515 switch (TREE_CODE (arg0))
2517 case INTEGER_CST:
2518 return (! TREE_CONSTANT_OVERFLOW (arg0)
2519 && ! TREE_CONSTANT_OVERFLOW (arg1)
2520 && tree_int_cst_equal (arg0, arg1));
2522 case REAL_CST:
2523 return (! TREE_CONSTANT_OVERFLOW (arg0)
2524 && ! TREE_CONSTANT_OVERFLOW (arg1)
2525 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2526 TREE_REAL_CST (arg1)));
2528 case VECTOR_CST:
2530 tree v1, v2;
2532 if (TREE_CONSTANT_OVERFLOW (arg0)
2533 || TREE_CONSTANT_OVERFLOW (arg1))
2534 return 0;
2536 v1 = TREE_VECTOR_CST_ELTS (arg0);
2537 v2 = TREE_VECTOR_CST_ELTS (arg1);
2538 while (v1 && v2)
2540 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2541 flags))
2542 return 0;
2543 v1 = TREE_CHAIN (v1);
2544 v2 = TREE_CHAIN (v2);
2547 return v1 == v2;
2550 case COMPLEX_CST:
2551 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2552 flags)
2553 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2554 flags));
2556 case STRING_CST:
2557 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2558 && ! memcmp (TREE_STRING_POINTER (arg0),
2559 TREE_STRING_POINTER (arg1),
2560 TREE_STRING_LENGTH (arg0)));
2562 case ADDR_EXPR:
2563 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2565 default:
2566 break;
2569 if (flags & OEP_ONLY_CONST)
2570 return 0;
2572 /* Define macros to test an operand from arg0 and arg1 for equality and a
2573 variant that allows null and views null as being different from any
2574 non-null value. In the latter case, if either is null, the both
2575 must be; otherwise, do the normal comparison. */
2576 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2577 TREE_OPERAND (arg1, N), flags)
2579 #define OP_SAME_WITH_NULL(N) \
2580 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2581 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2583 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2585 case tcc_unary:
2586 /* Two conversions are equal only if signedness and modes match. */
2587 switch (TREE_CODE (arg0))
2589 case NOP_EXPR:
2590 case CONVERT_EXPR:
2591 case FIX_CEIL_EXPR:
2592 case FIX_TRUNC_EXPR:
2593 case FIX_FLOOR_EXPR:
2594 case FIX_ROUND_EXPR:
2595 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2596 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2597 return 0;
2598 break;
2599 default:
2600 break;
2603 return OP_SAME (0);
2606 case tcc_comparison:
2607 case tcc_binary:
2608 if (OP_SAME (0) && OP_SAME (1))
2609 return 1;
2611 /* For commutative ops, allow the other order. */
2612 return (commutative_tree_code (TREE_CODE (arg0))
2613 && operand_equal_p (TREE_OPERAND (arg0, 0),
2614 TREE_OPERAND (arg1, 1), flags)
2615 && operand_equal_p (TREE_OPERAND (arg0, 1),
2616 TREE_OPERAND (arg1, 0), flags));
2618 case tcc_reference:
2619 /* If either of the pointer (or reference) expressions we are
2620 dereferencing contain a side effect, these cannot be equal. */
2621 if (TREE_SIDE_EFFECTS (arg0)
2622 || TREE_SIDE_EFFECTS (arg1))
2623 return 0;
2625 switch (TREE_CODE (arg0))
2627 case INDIRECT_REF:
2628 case ALIGN_INDIRECT_REF:
2629 case MISALIGNED_INDIRECT_REF:
2630 case REALPART_EXPR:
2631 case IMAGPART_EXPR:
2632 return OP_SAME (0);
2634 case ARRAY_REF:
2635 case ARRAY_RANGE_REF:
2636 /* Operands 2 and 3 may be null. */
2637 return (OP_SAME (0)
2638 && OP_SAME (1)
2639 && OP_SAME_WITH_NULL (2)
2640 && OP_SAME_WITH_NULL (3));
2642 case COMPONENT_REF:
2643 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2644 may be NULL when we're called to compare MEM_EXPRs. */
2645 return OP_SAME_WITH_NULL (0)
2646 && OP_SAME (1)
2647 && OP_SAME_WITH_NULL (2);
2649 case BIT_FIELD_REF:
2650 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2652 default:
2653 return 0;
2656 case tcc_expression:
2657 switch (TREE_CODE (arg0))
2659 case ADDR_EXPR:
2660 case TRUTH_NOT_EXPR:
2661 return OP_SAME (0);
2663 case TRUTH_ANDIF_EXPR:
2664 case TRUTH_ORIF_EXPR:
2665 return OP_SAME (0) && OP_SAME (1);
2667 case TRUTH_AND_EXPR:
2668 case TRUTH_OR_EXPR:
2669 case TRUTH_XOR_EXPR:
2670 if (OP_SAME (0) && OP_SAME (1))
2671 return 1;
2673 /* Otherwise take into account this is a commutative operation. */
2674 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2675 TREE_OPERAND (arg1, 1), flags)
2676 && operand_equal_p (TREE_OPERAND (arg0, 1),
2677 TREE_OPERAND (arg1, 0), flags));
2679 case CALL_EXPR:
2680 /* If the CALL_EXPRs call different functions, then they
2681 clearly can not be equal. */
2682 if (!OP_SAME (0))
2683 return 0;
2686 unsigned int cef = call_expr_flags (arg0);
2687 if (flags & OEP_PURE_SAME)
2688 cef &= ECF_CONST | ECF_PURE;
2689 else
2690 cef &= ECF_CONST;
2691 if (!cef)
2692 return 0;
2695 /* Now see if all the arguments are the same. operand_equal_p
2696 does not handle TREE_LIST, so we walk the operands here
2697 feeding them to operand_equal_p. */
2698 arg0 = TREE_OPERAND (arg0, 1);
2699 arg1 = TREE_OPERAND (arg1, 1);
2700 while (arg0 && arg1)
2702 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2703 flags))
2704 return 0;
2706 arg0 = TREE_CHAIN (arg0);
2707 arg1 = TREE_CHAIN (arg1);
2710 /* If we get here and both argument lists are exhausted
2711 then the CALL_EXPRs are equal. */
2712 return ! (arg0 || arg1);
2714 default:
2715 return 0;
2718 case tcc_declaration:
2719 /* Consider __builtin_sqrt equal to sqrt. */
2720 return (TREE_CODE (arg0) == FUNCTION_DECL
2721 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2722 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2723 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2725 default:
2726 return 0;
2729 #undef OP_SAME
2730 #undef OP_SAME_WITH_NULL
2733 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2734 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2736 When in doubt, return 0. */
2738 static int
2739 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2741 int unsignedp1, unsignedpo;
2742 tree primarg0, primarg1, primother;
2743 unsigned int correct_width;
2745 if (operand_equal_p (arg0, arg1, 0))
2746 return 1;
2748 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2749 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2750 return 0;
2752 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2753 and see if the inner values are the same. This removes any
2754 signedness comparison, which doesn't matter here. */
2755 primarg0 = arg0, primarg1 = arg1;
2756 STRIP_NOPS (primarg0);
2757 STRIP_NOPS (primarg1);
2758 if (operand_equal_p (primarg0, primarg1, 0))
2759 return 1;
2761 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2762 actual comparison operand, ARG0.
2764 First throw away any conversions to wider types
2765 already present in the operands. */
2767 primarg1 = get_narrower (arg1, &unsignedp1);
2768 primother = get_narrower (other, &unsignedpo);
2770 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2771 if (unsignedp1 == unsignedpo
2772 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2773 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2775 tree type = TREE_TYPE (arg0);
2777 /* Make sure shorter operand is extended the right way
2778 to match the longer operand. */
2779 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2780 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2782 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2783 return 1;
2786 return 0;
2789 /* See if ARG is an expression that is either a comparison or is performing
2790 arithmetic on comparisons. The comparisons must only be comparing
2791 two different values, which will be stored in *CVAL1 and *CVAL2; if
2792 they are nonzero it means that some operands have already been found.
2793 No variables may be used anywhere else in the expression except in the
2794 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2795 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2797 If this is true, return 1. Otherwise, return zero. */
2799 static int
2800 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2802 enum tree_code code = TREE_CODE (arg);
2803 enum tree_code_class class = TREE_CODE_CLASS (code);
2805 /* We can handle some of the tcc_expression cases here. */
2806 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2807 class = tcc_unary;
2808 else if (class == tcc_expression
2809 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2810 || code == COMPOUND_EXPR))
2811 class = tcc_binary;
2813 else if (class == tcc_expression && code == SAVE_EXPR
2814 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2816 /* If we've already found a CVAL1 or CVAL2, this expression is
2817 two complex to handle. */
2818 if (*cval1 || *cval2)
2819 return 0;
2821 class = tcc_unary;
2822 *save_p = 1;
2825 switch (class)
2827 case tcc_unary:
2828 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2830 case tcc_binary:
2831 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2832 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2833 cval1, cval2, save_p));
2835 case tcc_constant:
2836 return 1;
2838 case tcc_expression:
2839 if (code == COND_EXPR)
2840 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2841 cval1, cval2, save_p)
2842 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2843 cval1, cval2, save_p)
2844 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2845 cval1, cval2, save_p));
2846 return 0;
2848 case tcc_comparison:
2849 /* First see if we can handle the first operand, then the second. For
2850 the second operand, we know *CVAL1 can't be zero. It must be that
2851 one side of the comparison is each of the values; test for the
2852 case where this isn't true by failing if the two operands
2853 are the same. */
2855 if (operand_equal_p (TREE_OPERAND (arg, 0),
2856 TREE_OPERAND (arg, 1), 0))
2857 return 0;
2859 if (*cval1 == 0)
2860 *cval1 = TREE_OPERAND (arg, 0);
2861 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2863 else if (*cval2 == 0)
2864 *cval2 = TREE_OPERAND (arg, 0);
2865 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2867 else
2868 return 0;
2870 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2872 else if (*cval2 == 0)
2873 *cval2 = TREE_OPERAND (arg, 1);
2874 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2876 else
2877 return 0;
2879 return 1;
2881 default:
2882 return 0;
2886 /* ARG is a tree that is known to contain just arithmetic operations and
2887 comparisons. Evaluate the operations in the tree substituting NEW0 for
2888 any occurrence of OLD0 as an operand of a comparison and likewise for
2889 NEW1 and OLD1. */
2891 static tree
2892 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2894 tree type = TREE_TYPE (arg);
2895 enum tree_code code = TREE_CODE (arg);
2896 enum tree_code_class class = TREE_CODE_CLASS (code);
2898 /* We can handle some of the tcc_expression cases here. */
2899 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2900 class = tcc_unary;
2901 else if (class == tcc_expression
2902 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2903 class = tcc_binary;
2905 switch (class)
2907 case tcc_unary:
2908 return fold_build1 (code, type,
2909 eval_subst (TREE_OPERAND (arg, 0),
2910 old0, new0, old1, new1));
2912 case tcc_binary:
2913 return fold_build2 (code, type,
2914 eval_subst (TREE_OPERAND (arg, 0),
2915 old0, new0, old1, new1),
2916 eval_subst (TREE_OPERAND (arg, 1),
2917 old0, new0, old1, new1));
2919 case tcc_expression:
2920 switch (code)
2922 case SAVE_EXPR:
2923 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2925 case COMPOUND_EXPR:
2926 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2928 case COND_EXPR:
2929 return fold_build3 (code, type,
2930 eval_subst (TREE_OPERAND (arg, 0),
2931 old0, new0, old1, new1),
2932 eval_subst (TREE_OPERAND (arg, 1),
2933 old0, new0, old1, new1),
2934 eval_subst (TREE_OPERAND (arg, 2),
2935 old0, new0, old1, new1));
2936 default:
2937 break;
2939 /* Fall through - ??? */
2941 case tcc_comparison:
2943 tree arg0 = TREE_OPERAND (arg, 0);
2944 tree arg1 = TREE_OPERAND (arg, 1);
2946 /* We need to check both for exact equality and tree equality. The
2947 former will be true if the operand has a side-effect. In that
2948 case, we know the operand occurred exactly once. */
2950 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2951 arg0 = new0;
2952 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2953 arg0 = new1;
2955 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2956 arg1 = new0;
2957 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2958 arg1 = new1;
2960 return fold_build2 (code, type, arg0, arg1);
2963 default:
2964 return arg;
2968 /* Return a tree for the case when the result of an expression is RESULT
2969 converted to TYPE and OMITTED was previously an operand of the expression
2970 but is now not needed (e.g., we folded OMITTED * 0).
2972 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2973 the conversion of RESULT to TYPE. */
2975 tree
2976 omit_one_operand (tree type, tree result, tree omitted)
2978 tree t = fold_convert (type, result);
2980 if (TREE_SIDE_EFFECTS (omitted))
2981 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2983 return non_lvalue (t);
2986 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2988 static tree
2989 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2991 tree t = fold_convert (type, result);
2993 if (TREE_SIDE_EFFECTS (omitted))
2994 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2996 return pedantic_non_lvalue (t);
2999 /* Return a tree for the case when the result of an expression is RESULT
3000 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3001 of the expression but are now not needed.
3003 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3004 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3005 evaluated before OMITTED2. Otherwise, if neither has side effects,
3006 just do the conversion of RESULT to TYPE. */
3008 tree
3009 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3011 tree t = fold_convert (type, result);
3013 if (TREE_SIDE_EFFECTS (omitted2))
3014 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3015 if (TREE_SIDE_EFFECTS (omitted1))
3016 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3018 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3022 /* Return a simplified tree node for the truth-negation of ARG. This
3023 never alters ARG itself. We assume that ARG is an operation that
3024 returns a truth value (0 or 1).
3026 FIXME: one would think we would fold the result, but it causes
3027 problems with the dominator optimizer. */
3028 tree
3029 invert_truthvalue (tree arg)
3031 tree type = TREE_TYPE (arg);
3032 enum tree_code code = TREE_CODE (arg);
3034 if (code == ERROR_MARK)
3035 return arg;
3037 /* If this is a comparison, we can simply invert it, except for
3038 floating-point non-equality comparisons, in which case we just
3039 enclose a TRUTH_NOT_EXPR around what we have. */
3041 if (TREE_CODE_CLASS (code) == tcc_comparison)
3043 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3044 if (FLOAT_TYPE_P (op_type)
3045 && flag_trapping_math
3046 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3047 && code != NE_EXPR && code != EQ_EXPR)
3048 return build1 (TRUTH_NOT_EXPR, type, arg);
3049 else
3051 code = invert_tree_comparison (code,
3052 HONOR_NANS (TYPE_MODE (op_type)));
3053 if (code == ERROR_MARK)
3054 return build1 (TRUTH_NOT_EXPR, type, arg);
3055 else
3056 return build2 (code, type,
3057 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3061 switch (code)
3063 case INTEGER_CST:
3064 return constant_boolean_node (integer_zerop (arg), type);
3066 case TRUTH_AND_EXPR:
3067 return build2 (TRUTH_OR_EXPR, type,
3068 invert_truthvalue (TREE_OPERAND (arg, 0)),
3069 invert_truthvalue (TREE_OPERAND (arg, 1)));
3071 case TRUTH_OR_EXPR:
3072 return build2 (TRUTH_AND_EXPR, type,
3073 invert_truthvalue (TREE_OPERAND (arg, 0)),
3074 invert_truthvalue (TREE_OPERAND (arg, 1)));
3076 case TRUTH_XOR_EXPR:
3077 /* Here we can invert either operand. We invert the first operand
3078 unless the second operand is a TRUTH_NOT_EXPR in which case our
3079 result is the XOR of the first operand with the inside of the
3080 negation of the second operand. */
3082 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3083 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3084 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3085 else
3086 return build2 (TRUTH_XOR_EXPR, type,
3087 invert_truthvalue (TREE_OPERAND (arg, 0)),
3088 TREE_OPERAND (arg, 1));
3090 case TRUTH_ANDIF_EXPR:
3091 return build2 (TRUTH_ORIF_EXPR, type,
3092 invert_truthvalue (TREE_OPERAND (arg, 0)),
3093 invert_truthvalue (TREE_OPERAND (arg, 1)));
3095 case TRUTH_ORIF_EXPR:
3096 return build2 (TRUTH_ANDIF_EXPR, type,
3097 invert_truthvalue (TREE_OPERAND (arg, 0)),
3098 invert_truthvalue (TREE_OPERAND (arg, 1)));
3100 case TRUTH_NOT_EXPR:
3101 return TREE_OPERAND (arg, 0);
3103 case COND_EXPR:
3105 tree arg1 = TREE_OPERAND (arg, 1);
3106 tree arg2 = TREE_OPERAND (arg, 2);
3107 /* A COND_EXPR may have a throw as one operand, which
3108 then has void type. Just leave void operands
3109 as they are. */
3110 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3111 VOID_TYPE_P (TREE_TYPE (arg1))
3112 ? arg1 : invert_truthvalue (arg1),
3113 VOID_TYPE_P (TREE_TYPE (arg2))
3114 ? arg2 : invert_truthvalue (arg2));
3117 case COMPOUND_EXPR:
3118 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3119 invert_truthvalue (TREE_OPERAND (arg, 1)));
3121 case NON_LVALUE_EXPR:
3122 return invert_truthvalue (TREE_OPERAND (arg, 0));
3124 case NOP_EXPR:
3125 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3126 break;
3128 case CONVERT_EXPR:
3129 case FLOAT_EXPR:
3130 return build1 (TREE_CODE (arg), type,
3131 invert_truthvalue (TREE_OPERAND (arg, 0)));
3133 case BIT_AND_EXPR:
3134 if (!integer_onep (TREE_OPERAND (arg, 1)))
3135 break;
3136 return build2 (EQ_EXPR, type, arg,
3137 build_int_cst (type, 0));
3139 case SAVE_EXPR:
3140 return build1 (TRUTH_NOT_EXPR, type, arg);
3142 case CLEANUP_POINT_EXPR:
3143 return build1 (CLEANUP_POINT_EXPR, type,
3144 invert_truthvalue (TREE_OPERAND (arg, 0)));
3146 default:
3147 break;
3149 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3150 return build1 (TRUTH_NOT_EXPR, type, arg);
3153 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3154 operands are another bit-wise operation with a common input. If so,
3155 distribute the bit operations to save an operation and possibly two if
3156 constants are involved. For example, convert
3157 (A | B) & (A | C) into A | (B & C)
3158 Further simplification will occur if B and C are constants.
3160 If this optimization cannot be done, 0 will be returned. */
3162 static tree
3163 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3165 tree common;
3166 tree left, right;
3168 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3169 || TREE_CODE (arg0) == code
3170 || (TREE_CODE (arg0) != BIT_AND_EXPR
3171 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3172 return 0;
3174 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3176 common = TREE_OPERAND (arg0, 0);
3177 left = TREE_OPERAND (arg0, 1);
3178 right = TREE_OPERAND (arg1, 1);
3180 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3182 common = TREE_OPERAND (arg0, 0);
3183 left = TREE_OPERAND (arg0, 1);
3184 right = TREE_OPERAND (arg1, 0);
3186 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3188 common = TREE_OPERAND (arg0, 1);
3189 left = TREE_OPERAND (arg0, 0);
3190 right = TREE_OPERAND (arg1, 1);
3192 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3194 common = TREE_OPERAND (arg0, 1);
3195 left = TREE_OPERAND (arg0, 0);
3196 right = TREE_OPERAND (arg1, 0);
3198 else
3199 return 0;
3201 return fold_build2 (TREE_CODE (arg0), type, common,
3202 fold_build2 (code, type, left, right));
3205 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3206 with code CODE. This optimization is unsafe. */
3207 static tree
3208 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3210 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3211 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3213 /* (A / C) +- (B / C) -> (A +- B) / C. */
3214 if (mul0 == mul1
3215 && operand_equal_p (TREE_OPERAND (arg0, 1),
3216 TREE_OPERAND (arg1, 1), 0))
3217 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3218 fold_build2 (code, type,
3219 TREE_OPERAND (arg0, 0),
3220 TREE_OPERAND (arg1, 0)),
3221 TREE_OPERAND (arg0, 1));
3223 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3224 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3225 TREE_OPERAND (arg1, 0), 0)
3226 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3227 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3229 REAL_VALUE_TYPE r0, r1;
3230 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3231 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3232 if (!mul0)
3233 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3234 if (!mul1)
3235 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3236 real_arithmetic (&r0, code, &r0, &r1);
3237 return fold_build2 (MULT_EXPR, type,
3238 TREE_OPERAND (arg0, 0),
3239 build_real (type, r0));
3242 return NULL_TREE;
3245 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3246 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3248 static tree
3249 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3250 int unsignedp)
3252 tree result;
3254 if (bitpos == 0)
3256 tree size = TYPE_SIZE (TREE_TYPE (inner));
3257 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3258 || POINTER_TYPE_P (TREE_TYPE (inner)))
3259 && host_integerp (size, 0)
3260 && tree_low_cst (size, 0) == bitsize)
3261 return fold_convert (type, inner);
3264 result = build3 (BIT_FIELD_REF, type, inner,
3265 size_int (bitsize), bitsize_int (bitpos));
3267 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3269 return result;
3272 /* Optimize a bit-field compare.
3274 There are two cases: First is a compare against a constant and the
3275 second is a comparison of two items where the fields are at the same
3276 bit position relative to the start of a chunk (byte, halfword, word)
3277 large enough to contain it. In these cases we can avoid the shift
3278 implicit in bitfield extractions.
3280 For constants, we emit a compare of the shifted constant with the
3281 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3282 compared. For two fields at the same position, we do the ANDs with the
3283 similar mask and compare the result of the ANDs.
3285 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3286 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3287 are the left and right operands of the comparison, respectively.
3289 If the optimization described above can be done, we return the resulting
3290 tree. Otherwise we return zero. */
3292 static tree
3293 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3294 tree lhs, tree rhs)
3296 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3297 tree type = TREE_TYPE (lhs);
3298 tree signed_type, unsigned_type;
3299 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3300 enum machine_mode lmode, rmode, nmode;
3301 int lunsignedp, runsignedp;
3302 int lvolatilep = 0, rvolatilep = 0;
3303 tree linner, rinner = NULL_TREE;
3304 tree mask;
3305 tree offset;
3307 /* Get all the information about the extractions being done. If the bit size
3308 if the same as the size of the underlying object, we aren't doing an
3309 extraction at all and so can do nothing. We also don't want to
3310 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3311 then will no longer be able to replace it. */
3312 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3313 &lunsignedp, &lvolatilep, false);
3314 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3315 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3316 return 0;
3318 if (!const_p)
3320 /* If this is not a constant, we can only do something if bit positions,
3321 sizes, and signedness are the same. */
3322 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3323 &runsignedp, &rvolatilep, false);
3325 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3326 || lunsignedp != runsignedp || offset != 0
3327 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3328 return 0;
3331 /* See if we can find a mode to refer to this field. We should be able to,
3332 but fail if we can't. */
3333 nmode = get_best_mode (lbitsize, lbitpos,
3334 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3335 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3336 TYPE_ALIGN (TREE_TYPE (rinner))),
3337 word_mode, lvolatilep || rvolatilep);
3338 if (nmode == VOIDmode)
3339 return 0;
3341 /* Set signed and unsigned types of the precision of this mode for the
3342 shifts below. */
3343 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3344 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3346 /* Compute the bit position and size for the new reference and our offset
3347 within it. If the new reference is the same size as the original, we
3348 won't optimize anything, so return zero. */
3349 nbitsize = GET_MODE_BITSIZE (nmode);
3350 nbitpos = lbitpos & ~ (nbitsize - 1);
3351 lbitpos -= nbitpos;
3352 if (nbitsize == lbitsize)
3353 return 0;
3355 if (BYTES_BIG_ENDIAN)
3356 lbitpos = nbitsize - lbitsize - lbitpos;
3358 /* Make the mask to be used against the extracted field. */
3359 mask = build_int_cst (unsigned_type, -1);
3360 mask = force_fit_type (mask, 0, false, false);
3361 mask = fold_convert (unsigned_type, mask);
3362 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3363 mask = const_binop (RSHIFT_EXPR, mask,
3364 size_int (nbitsize - lbitsize - lbitpos), 0);
3366 if (! const_p)
3367 /* If not comparing with constant, just rework the comparison
3368 and return. */
3369 return build2 (code, compare_type,
3370 build2 (BIT_AND_EXPR, unsigned_type,
3371 make_bit_field_ref (linner, unsigned_type,
3372 nbitsize, nbitpos, 1),
3373 mask),
3374 build2 (BIT_AND_EXPR, unsigned_type,
3375 make_bit_field_ref (rinner, unsigned_type,
3376 nbitsize, nbitpos, 1),
3377 mask));
3379 /* Otherwise, we are handling the constant case. See if the constant is too
3380 big for the field. Warn and return a tree of for 0 (false) if so. We do
3381 this not only for its own sake, but to avoid having to test for this
3382 error case below. If we didn't, we might generate wrong code.
3384 For unsigned fields, the constant shifted right by the field length should
3385 be all zero. For signed fields, the high-order bits should agree with
3386 the sign bit. */
3388 if (lunsignedp)
3390 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3391 fold_convert (unsigned_type, rhs),
3392 size_int (lbitsize), 0)))
3394 warning (0, "comparison is always %d due to width of bit-field",
3395 code == NE_EXPR);
3396 return constant_boolean_node (code == NE_EXPR, compare_type);
3399 else
3401 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3402 size_int (lbitsize - 1), 0);
3403 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3405 warning (0, "comparison is always %d due to width of bit-field",
3406 code == NE_EXPR);
3407 return constant_boolean_node (code == NE_EXPR, compare_type);
3411 /* Single-bit compares should always be against zero. */
3412 if (lbitsize == 1 && ! integer_zerop (rhs))
3414 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3415 rhs = build_int_cst (type, 0);
3418 /* Make a new bitfield reference, shift the constant over the
3419 appropriate number of bits and mask it with the computed mask
3420 (in case this was a signed field). If we changed it, make a new one. */
3421 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3422 if (lvolatilep)
3424 TREE_SIDE_EFFECTS (lhs) = 1;
3425 TREE_THIS_VOLATILE (lhs) = 1;
3428 rhs = const_binop (BIT_AND_EXPR,
3429 const_binop (LSHIFT_EXPR,
3430 fold_convert (unsigned_type, rhs),
3431 size_int (lbitpos), 0),
3432 mask, 0);
3434 return build2 (code, compare_type,
3435 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3436 rhs);
3439 /* Subroutine for fold_truthop: decode a field reference.
3441 If EXP is a comparison reference, we return the innermost reference.
3443 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3444 set to the starting bit number.
3446 If the innermost field can be completely contained in a mode-sized
3447 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3449 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3450 otherwise it is not changed.
3452 *PUNSIGNEDP is set to the signedness of the field.
3454 *PMASK is set to the mask used. This is either contained in a
3455 BIT_AND_EXPR or derived from the width of the field.
3457 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3459 Return 0 if this is not a component reference or is one that we can't
3460 do anything with. */
3462 static tree
3463 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3464 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3465 int *punsignedp, int *pvolatilep,
3466 tree *pmask, tree *pand_mask)
3468 tree outer_type = 0;
3469 tree and_mask = 0;
3470 tree mask, inner, offset;
3471 tree unsigned_type;
3472 unsigned int precision;
3474 /* All the optimizations using this function assume integer fields.
3475 There are problems with FP fields since the type_for_size call
3476 below can fail for, e.g., XFmode. */
3477 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3478 return 0;
3480 /* We are interested in the bare arrangement of bits, so strip everything
3481 that doesn't affect the machine mode. However, record the type of the
3482 outermost expression if it may matter below. */
3483 if (TREE_CODE (exp) == NOP_EXPR
3484 || TREE_CODE (exp) == CONVERT_EXPR
3485 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3486 outer_type = TREE_TYPE (exp);
3487 STRIP_NOPS (exp);
3489 if (TREE_CODE (exp) == BIT_AND_EXPR)
3491 and_mask = TREE_OPERAND (exp, 1);
3492 exp = TREE_OPERAND (exp, 0);
3493 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3494 if (TREE_CODE (and_mask) != INTEGER_CST)
3495 return 0;
3498 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3499 punsignedp, pvolatilep, false);
3500 if ((inner == exp && and_mask == 0)
3501 || *pbitsize < 0 || offset != 0
3502 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3503 return 0;
3505 /* If the number of bits in the reference is the same as the bitsize of
3506 the outer type, then the outer type gives the signedness. Otherwise
3507 (in case of a small bitfield) the signedness is unchanged. */
3508 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3509 *punsignedp = TYPE_UNSIGNED (outer_type);
3511 /* Compute the mask to access the bitfield. */
3512 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3513 precision = TYPE_PRECISION (unsigned_type);
3515 mask = build_int_cst (unsigned_type, -1);
3516 mask = force_fit_type (mask, 0, false, false);
3518 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3519 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3521 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3522 if (and_mask != 0)
3523 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3524 fold_convert (unsigned_type, and_mask), mask);
3526 *pmask = mask;
3527 *pand_mask = and_mask;
3528 return inner;
3531 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3532 bit positions. */
3534 static int
3535 all_ones_mask_p (tree mask, int size)
3537 tree type = TREE_TYPE (mask);
3538 unsigned int precision = TYPE_PRECISION (type);
3539 tree tmask;
3541 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3542 tmask = force_fit_type (tmask, 0, false, false);
3544 return
3545 tree_int_cst_equal (mask,
3546 const_binop (RSHIFT_EXPR,
3547 const_binop (LSHIFT_EXPR, tmask,
3548 size_int (precision - size),
3550 size_int (precision - size), 0));
3553 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3554 represents the sign bit of EXP's type. If EXP represents a sign
3555 or zero extension, also test VAL against the unextended type.
3556 The return value is the (sub)expression whose sign bit is VAL,
3557 or NULL_TREE otherwise. */
3559 static tree
3560 sign_bit_p (tree exp, tree val)
3562 unsigned HOST_WIDE_INT mask_lo, lo;
3563 HOST_WIDE_INT mask_hi, hi;
3564 int width;
3565 tree t;
3567 /* Tree EXP must have an integral type. */
3568 t = TREE_TYPE (exp);
3569 if (! INTEGRAL_TYPE_P (t))
3570 return NULL_TREE;
3572 /* Tree VAL must be an integer constant. */
3573 if (TREE_CODE (val) != INTEGER_CST
3574 || TREE_CONSTANT_OVERFLOW (val))
3575 return NULL_TREE;
3577 width = TYPE_PRECISION (t);
3578 if (width > HOST_BITS_PER_WIDE_INT)
3580 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3581 lo = 0;
3583 mask_hi = ((unsigned HOST_WIDE_INT) -1
3584 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3585 mask_lo = -1;
3587 else
3589 hi = 0;
3590 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3592 mask_hi = 0;
3593 mask_lo = ((unsigned HOST_WIDE_INT) -1
3594 >> (HOST_BITS_PER_WIDE_INT - width));
3597 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3598 treat VAL as if it were unsigned. */
3599 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3600 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3601 return exp;
3603 /* Handle extension from a narrower type. */
3604 if (TREE_CODE (exp) == NOP_EXPR
3605 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3606 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3608 return NULL_TREE;
3611 /* Subroutine for fold_truthop: determine if an operand is simple enough
3612 to be evaluated unconditionally. */
3614 static int
3615 simple_operand_p (tree exp)
3617 /* Strip any conversions that don't change the machine mode. */
3618 STRIP_NOPS (exp);
3620 return (CONSTANT_CLASS_P (exp)
3621 || TREE_CODE (exp) == SSA_NAME
3622 || (DECL_P (exp)
3623 && ! TREE_ADDRESSABLE (exp)
3624 && ! TREE_THIS_VOLATILE (exp)
3625 && ! DECL_NONLOCAL (exp)
3626 /* Don't regard global variables as simple. They may be
3627 allocated in ways unknown to the compiler (shared memory,
3628 #pragma weak, etc). */
3629 && ! TREE_PUBLIC (exp)
3630 && ! DECL_EXTERNAL (exp)
3631 /* Loading a static variable is unduly expensive, but global
3632 registers aren't expensive. */
3633 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3636 /* The following functions are subroutines to fold_range_test and allow it to
3637 try to change a logical combination of comparisons into a range test.
3639 For example, both
3640 X == 2 || X == 3 || X == 4 || X == 5
3642 X >= 2 && X <= 5
3643 are converted to
3644 (unsigned) (X - 2) <= 3
3646 We describe each set of comparisons as being either inside or outside
3647 a range, using a variable named like IN_P, and then describe the
3648 range with a lower and upper bound. If one of the bounds is omitted,
3649 it represents either the highest or lowest value of the type.
3651 In the comments below, we represent a range by two numbers in brackets
3652 preceded by a "+" to designate being inside that range, or a "-" to
3653 designate being outside that range, so the condition can be inverted by
3654 flipping the prefix. An omitted bound is represented by a "-". For
3655 example, "- [-, 10]" means being outside the range starting at the lowest
3656 possible value and ending at 10, in other words, being greater than 10.
3657 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3658 always false.
3660 We set up things so that the missing bounds are handled in a consistent
3661 manner so neither a missing bound nor "true" and "false" need to be
3662 handled using a special case. */
3664 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3665 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3666 and UPPER1_P are nonzero if the respective argument is an upper bound
3667 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3668 must be specified for a comparison. ARG1 will be converted to ARG0's
3669 type if both are specified. */
3671 static tree
3672 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3673 tree arg1, int upper1_p)
3675 tree tem;
3676 int result;
3677 int sgn0, sgn1;
3679 /* If neither arg represents infinity, do the normal operation.
3680 Else, if not a comparison, return infinity. Else handle the special
3681 comparison rules. Note that most of the cases below won't occur, but
3682 are handled for consistency. */
3684 if (arg0 != 0 && arg1 != 0)
3686 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3687 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3688 STRIP_NOPS (tem);
3689 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3692 if (TREE_CODE_CLASS (code) != tcc_comparison)
3693 return 0;
3695 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3696 for neither. In real maths, we cannot assume open ended ranges are
3697 the same. But, this is computer arithmetic, where numbers are finite.
3698 We can therefore make the transformation of any unbounded range with
3699 the value Z, Z being greater than any representable number. This permits
3700 us to treat unbounded ranges as equal. */
3701 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3702 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3703 switch (code)
3705 case EQ_EXPR:
3706 result = sgn0 == sgn1;
3707 break;
3708 case NE_EXPR:
3709 result = sgn0 != sgn1;
3710 break;
3711 case LT_EXPR:
3712 result = sgn0 < sgn1;
3713 break;
3714 case LE_EXPR:
3715 result = sgn0 <= sgn1;
3716 break;
3717 case GT_EXPR:
3718 result = sgn0 > sgn1;
3719 break;
3720 case GE_EXPR:
3721 result = sgn0 >= sgn1;
3722 break;
3723 default:
3724 gcc_unreachable ();
3727 return constant_boolean_node (result, type);
3730 /* Given EXP, a logical expression, set the range it is testing into
3731 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3732 actually being tested. *PLOW and *PHIGH will be made of the same type
3733 as the returned expression. If EXP is not a comparison, we will most
3734 likely not be returning a useful value and range. */
3736 static tree
3737 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3739 enum tree_code code;
3740 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3741 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3742 int in_p, n_in_p;
3743 tree low, high, n_low, n_high;
3745 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3746 and see if we can refine the range. Some of the cases below may not
3747 happen, but it doesn't seem worth worrying about this. We "continue"
3748 the outer loop when we've changed something; otherwise we "break"
3749 the switch, which will "break" the while. */
3751 in_p = 0;
3752 low = high = build_int_cst (TREE_TYPE (exp), 0);
3754 while (1)
3756 code = TREE_CODE (exp);
3757 exp_type = TREE_TYPE (exp);
3759 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3761 if (TREE_CODE_LENGTH (code) > 0)
3762 arg0 = TREE_OPERAND (exp, 0);
3763 if (TREE_CODE_CLASS (code) == tcc_comparison
3764 || TREE_CODE_CLASS (code) == tcc_unary
3765 || TREE_CODE_CLASS (code) == tcc_binary)
3766 arg0_type = TREE_TYPE (arg0);
3767 if (TREE_CODE_CLASS (code) == tcc_binary
3768 || TREE_CODE_CLASS (code) == tcc_comparison
3769 || (TREE_CODE_CLASS (code) == tcc_expression
3770 && TREE_CODE_LENGTH (code) > 1))
3771 arg1 = TREE_OPERAND (exp, 1);
3774 switch (code)
3776 case TRUTH_NOT_EXPR:
3777 in_p = ! in_p, exp = arg0;
3778 continue;
3780 case EQ_EXPR: case NE_EXPR:
3781 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3782 /* We can only do something if the range is testing for zero
3783 and if the second operand is an integer constant. Note that
3784 saying something is "in" the range we make is done by
3785 complementing IN_P since it will set in the initial case of
3786 being not equal to zero; "out" is leaving it alone. */
3787 if (low == 0 || high == 0
3788 || ! integer_zerop (low) || ! integer_zerop (high)
3789 || TREE_CODE (arg1) != INTEGER_CST)
3790 break;
3792 switch (code)
3794 case NE_EXPR: /* - [c, c] */
3795 low = high = arg1;
3796 break;
3797 case EQ_EXPR: /* + [c, c] */
3798 in_p = ! in_p, low = high = arg1;
3799 break;
3800 case GT_EXPR: /* - [-, c] */
3801 low = 0, high = arg1;
3802 break;
3803 case GE_EXPR: /* + [c, -] */
3804 in_p = ! in_p, low = arg1, high = 0;
3805 break;
3806 case LT_EXPR: /* - [c, -] */
3807 low = arg1, high = 0;
3808 break;
3809 case LE_EXPR: /* + [-, c] */
3810 in_p = ! in_p, low = 0, high = arg1;
3811 break;
3812 default:
3813 gcc_unreachable ();
3816 /* If this is an unsigned comparison, we also know that EXP is
3817 greater than or equal to zero. We base the range tests we make
3818 on that fact, so we record it here so we can parse existing
3819 range tests. We test arg0_type since often the return type
3820 of, e.g. EQ_EXPR, is boolean. */
3821 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3823 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3824 in_p, low, high, 1,
3825 build_int_cst (arg0_type, 0),
3826 NULL_TREE))
3827 break;
3829 in_p = n_in_p, low = n_low, high = n_high;
3831 /* If the high bound is missing, but we have a nonzero low
3832 bound, reverse the range so it goes from zero to the low bound
3833 minus 1. */
3834 if (high == 0 && low && ! integer_zerop (low))
3836 in_p = ! in_p;
3837 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3838 integer_one_node, 0);
3839 low = build_int_cst (arg0_type, 0);
3843 exp = arg0;
3844 continue;
3846 case NEGATE_EXPR:
3847 /* (-x) IN [a,b] -> x in [-b, -a] */
3848 n_low = range_binop (MINUS_EXPR, exp_type,
3849 build_int_cst (exp_type, 0),
3850 0, high, 1);
3851 n_high = range_binop (MINUS_EXPR, exp_type,
3852 build_int_cst (exp_type, 0),
3853 0, low, 0);
3854 low = n_low, high = n_high;
3855 exp = arg0;
3856 continue;
3858 case BIT_NOT_EXPR:
3859 /* ~ X -> -X - 1 */
3860 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3861 build_int_cst (exp_type, 1));
3862 continue;
3864 case PLUS_EXPR: case MINUS_EXPR:
3865 if (TREE_CODE (arg1) != INTEGER_CST)
3866 break;
3868 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3869 move a constant to the other side. */
3870 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3871 break;
3873 /* If EXP is signed, any overflow in the computation is undefined,
3874 so we don't worry about it so long as our computations on
3875 the bounds don't overflow. For unsigned, overflow is defined
3876 and this is exactly the right thing. */
3877 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3878 arg0_type, low, 0, arg1, 0);
3879 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3880 arg0_type, high, 1, arg1, 0);
3881 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3882 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3883 break;
3885 /* Check for an unsigned range which has wrapped around the maximum
3886 value thus making n_high < n_low, and normalize it. */
3887 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3889 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3890 integer_one_node, 0);
3891 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3892 integer_one_node, 0);
3894 /* If the range is of the form +/- [ x+1, x ], we won't
3895 be able to normalize it. But then, it represents the
3896 whole range or the empty set, so make it
3897 +/- [ -, - ]. */
3898 if (tree_int_cst_equal (n_low, low)
3899 && tree_int_cst_equal (n_high, high))
3900 low = high = 0;
3901 else
3902 in_p = ! in_p;
3904 else
3905 low = n_low, high = n_high;
3907 exp = arg0;
3908 continue;
3910 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3911 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3912 break;
3914 if (! INTEGRAL_TYPE_P (arg0_type)
3915 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3916 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3917 break;
3919 n_low = low, n_high = high;
3921 if (n_low != 0)
3922 n_low = fold_convert (arg0_type, n_low);
3924 if (n_high != 0)
3925 n_high = fold_convert (arg0_type, n_high);
3928 /* If we're converting arg0 from an unsigned type, to exp,
3929 a signed type, we will be doing the comparison as unsigned.
3930 The tests above have already verified that LOW and HIGH
3931 are both positive.
3933 So we have to ensure that we will handle large unsigned
3934 values the same way that the current signed bounds treat
3935 negative values. */
3937 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3939 tree high_positive;
3940 tree equiv_type = lang_hooks.types.type_for_mode
3941 (TYPE_MODE (arg0_type), 1);
3943 /* A range without an upper bound is, naturally, unbounded.
3944 Since convert would have cropped a very large value, use
3945 the max value for the destination type. */
3946 high_positive
3947 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3948 : TYPE_MAX_VALUE (arg0_type);
3950 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3951 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3952 fold_convert (arg0_type,
3953 high_positive),
3954 fold_convert (arg0_type,
3955 integer_one_node));
3957 /* If the low bound is specified, "and" the range with the
3958 range for which the original unsigned value will be
3959 positive. */
3960 if (low != 0)
3962 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3963 1, n_low, n_high, 1,
3964 fold_convert (arg0_type,
3965 integer_zero_node),
3966 high_positive))
3967 break;
3969 in_p = (n_in_p == in_p);
3971 else
3973 /* Otherwise, "or" the range with the range of the input
3974 that will be interpreted as negative. */
3975 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3976 0, n_low, n_high, 1,
3977 fold_convert (arg0_type,
3978 integer_zero_node),
3979 high_positive))
3980 break;
3982 in_p = (in_p != n_in_p);
3986 exp = arg0;
3987 low = n_low, high = n_high;
3988 continue;
3990 default:
3991 break;
3994 break;
3997 /* If EXP is a constant, we can evaluate whether this is true or false. */
3998 if (TREE_CODE (exp) == INTEGER_CST)
4000 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4001 exp, 0, low, 0))
4002 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4003 exp, 1, high, 1)));
4004 low = high = 0;
4005 exp = 0;
4008 *pin_p = in_p, *plow = low, *phigh = high;
4009 return exp;
4012 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4013 type, TYPE, return an expression to test if EXP is in (or out of, depending
4014 on IN_P) the range. Return 0 if the test couldn't be created. */
4016 static tree
4017 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4019 tree etype = TREE_TYPE (exp);
4020 tree value;
4022 #ifdef HAVE_canonicalize_funcptr_for_compare
4023 /* Disable this optimization for function pointer expressions
4024 on targets that require function pointer canonicalization. */
4025 if (HAVE_canonicalize_funcptr_for_compare
4026 && TREE_CODE (etype) == POINTER_TYPE
4027 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4028 return NULL_TREE;
4029 #endif
4031 if (! in_p)
4033 value = build_range_check (type, exp, 1, low, high);
4034 if (value != 0)
4035 return invert_truthvalue (value);
4037 return 0;
4040 if (low == 0 && high == 0)
4041 return build_int_cst (type, 1);
4043 if (low == 0)
4044 return fold_build2 (LE_EXPR, type, exp,
4045 fold_convert (etype, high));
4047 if (high == 0)
4048 return fold_build2 (GE_EXPR, type, exp,
4049 fold_convert (etype, low));
4051 if (operand_equal_p (low, high, 0))
4052 return fold_build2 (EQ_EXPR, type, exp,
4053 fold_convert (etype, low));
4055 if (integer_zerop (low))
4057 if (! TYPE_UNSIGNED (etype))
4059 etype = lang_hooks.types.unsigned_type (etype);
4060 high = fold_convert (etype, high);
4061 exp = fold_convert (etype, exp);
4063 return build_range_check (type, exp, 1, 0, high);
4066 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4067 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4069 unsigned HOST_WIDE_INT lo;
4070 HOST_WIDE_INT hi;
4071 int prec;
4073 prec = TYPE_PRECISION (etype);
4074 if (prec <= HOST_BITS_PER_WIDE_INT)
4076 hi = 0;
4077 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4079 else
4081 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4082 lo = (unsigned HOST_WIDE_INT) -1;
4085 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4087 if (TYPE_UNSIGNED (etype))
4089 etype = lang_hooks.types.signed_type (etype);
4090 exp = fold_convert (etype, exp);
4092 return fold_build2 (GT_EXPR, type, exp,
4093 build_int_cst (etype, 0));
4097 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4098 This requires wrap-around arithmetics for the type of the expression. */
4099 switch (TREE_CODE (etype))
4101 case INTEGER_TYPE:
4102 /* There is no requirement that LOW be within the range of ETYPE
4103 if the latter is a subtype. It must, however, be within the base
4104 type of ETYPE. So be sure we do the subtraction in that type. */
4105 if (TREE_TYPE (etype))
4106 etype = TREE_TYPE (etype);
4107 break;
4109 case ENUMERAL_TYPE:
4110 case BOOLEAN_TYPE:
4111 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4112 TYPE_UNSIGNED (etype));
4113 break;
4115 default:
4116 break;
4119 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4120 if (TREE_CODE (etype) == INTEGER_TYPE
4121 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4123 tree utype, minv, maxv;
4125 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4126 for the type in question, as we rely on this here. */
4127 utype = lang_hooks.types.unsigned_type (etype);
4128 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4129 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4130 integer_one_node, 1);
4131 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4133 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4134 minv, 1, maxv, 1)))
4135 etype = utype;
4136 else
4137 return 0;
4140 high = fold_convert (etype, high);
4141 low = fold_convert (etype, low);
4142 exp = fold_convert (etype, exp);
4144 value = const_binop (MINUS_EXPR, high, low, 0);
4146 if (value != 0 && !TREE_OVERFLOW (value))
4147 return build_range_check (type,
4148 fold_build2 (MINUS_EXPR, etype, exp, low),
4149 1, build_int_cst (etype, 0), value);
4151 return 0;
4154 /* Return the predecessor of VAL in its type, handling the infinite case. */
4156 static tree
4157 range_predecessor (tree val)
4159 tree type = TREE_TYPE (val);
4161 if (INTEGRAL_TYPE_P (type) && val == TYPE_MIN_VALUE (type))
4162 return 0;
4163 else
4164 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4167 /* Return the successor of VAL in its type, handling the infinite case. */
4169 static tree
4170 range_successor (tree val)
4172 tree type = TREE_TYPE (val);
4174 if (INTEGRAL_TYPE_P (type) && val == TYPE_MAX_VALUE (type))
4175 return 0;
4176 else
4177 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4180 /* Given two ranges, see if we can merge them into one. Return 1 if we
4181 can, 0 if we can't. Set the output range into the specified parameters. */
4183 static int
4184 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4185 tree high0, int in1_p, tree low1, tree high1)
4187 int no_overlap;
4188 int subset;
4189 int temp;
4190 tree tem;
4191 int in_p;
4192 tree low, high;
4193 int lowequal = ((low0 == 0 && low1 == 0)
4194 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4195 low0, 0, low1, 0)));
4196 int highequal = ((high0 == 0 && high1 == 0)
4197 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4198 high0, 1, high1, 1)));
4200 /* Make range 0 be the range that starts first, or ends last if they
4201 start at the same value. Swap them if it isn't. */
4202 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4203 low0, 0, low1, 0))
4204 || (lowequal
4205 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4206 high1, 1, high0, 1))))
4208 temp = in0_p, in0_p = in1_p, in1_p = temp;
4209 tem = low0, low0 = low1, low1 = tem;
4210 tem = high0, high0 = high1, high1 = tem;
4213 /* Now flag two cases, whether the ranges are disjoint or whether the
4214 second range is totally subsumed in the first. Note that the tests
4215 below are simplified by the ones above. */
4216 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4217 high0, 1, low1, 0));
4218 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4219 high1, 1, high0, 1));
4221 /* We now have four cases, depending on whether we are including or
4222 excluding the two ranges. */
4223 if (in0_p && in1_p)
4225 /* If they don't overlap, the result is false. If the second range
4226 is a subset it is the result. Otherwise, the range is from the start
4227 of the second to the end of the first. */
4228 if (no_overlap)
4229 in_p = 0, low = high = 0;
4230 else if (subset)
4231 in_p = 1, low = low1, high = high1;
4232 else
4233 in_p = 1, low = low1, high = high0;
4236 else if (in0_p && ! in1_p)
4238 /* If they don't overlap, the result is the first range. If they are
4239 equal, the result is false. If the second range is a subset of the
4240 first, and the ranges begin at the same place, we go from just after
4241 the end of the second range to the end of the first. If the second
4242 range is not a subset of the first, or if it is a subset and both
4243 ranges end at the same place, the range starts at the start of the
4244 first range and ends just before the second range.
4245 Otherwise, we can't describe this as a single range. */
4246 if (no_overlap)
4247 in_p = 1, low = low0, high = high0;
4248 else if (lowequal && highequal)
4249 in_p = 0, low = high = 0;
4250 else if (subset && lowequal)
4252 low = range_successor (high1);
4253 high = high0;
4254 in_p = (low != 0);
4256 else if (! subset || highequal)
4258 low = low0;
4259 high = range_predecessor (low1);
4260 in_p = (high != 0);
4262 else
4263 return 0;
4266 else if (! in0_p && in1_p)
4268 /* If they don't overlap, the result is the second range. If the second
4269 is a subset of the first, the result is false. Otherwise,
4270 the range starts just after the first range and ends at the
4271 end of the second. */
4272 if (no_overlap)
4273 in_p = 1, low = low1, high = high1;
4274 else if (subset || highequal)
4275 in_p = 0, low = high = 0;
4276 else
4278 low = range_successor (high0);
4279 high = high1;
4280 in_p = (low != 0);
4284 else
4286 /* The case where we are excluding both ranges. Here the complex case
4287 is if they don't overlap. In that case, the only time we have a
4288 range is if they are adjacent. If the second is a subset of the
4289 first, the result is the first. Otherwise, the range to exclude
4290 starts at the beginning of the first range and ends at the end of the
4291 second. */
4292 if (no_overlap)
4294 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4295 range_successor (high0),
4296 1, low1, 0)))
4297 in_p = 0, low = low0, high = high1;
4298 else
4300 /* Canonicalize - [min, x] into - [-, x]. */
4301 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4302 switch (TREE_CODE (TREE_TYPE (low0)))
4304 case ENUMERAL_TYPE:
4305 if (TYPE_PRECISION (TREE_TYPE (low0))
4306 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4307 break;
4308 /* FALLTHROUGH */
4309 case INTEGER_TYPE:
4310 if (tree_int_cst_equal (low0,
4311 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4312 low0 = 0;
4313 break;
4314 case POINTER_TYPE:
4315 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4316 && integer_zerop (low0))
4317 low0 = 0;
4318 break;
4319 default:
4320 break;
4323 /* Canonicalize - [x, max] into - [x, -]. */
4324 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4325 switch (TREE_CODE (TREE_TYPE (high1)))
4327 case ENUMERAL_TYPE:
4328 if (TYPE_PRECISION (TREE_TYPE (high1))
4329 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4330 break;
4331 /* FALLTHROUGH */
4332 case INTEGER_TYPE:
4333 if (tree_int_cst_equal (high1,
4334 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4335 high1 = 0;
4336 break;
4337 case POINTER_TYPE:
4338 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4339 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4340 high1, 1,
4341 integer_one_node, 1)))
4342 high1 = 0;
4343 break;
4344 default:
4345 break;
4348 /* The ranges might be also adjacent between the maximum and
4349 minimum values of the given type. For
4350 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4351 return + [x + 1, y - 1]. */
4352 if (low0 == 0 && high1 == 0)
4354 low = range_successor (high0);
4355 high = range_predecessor (low1);
4356 if (low == 0 || high == 0)
4357 return 0;
4359 in_p = 1;
4361 else
4362 return 0;
4365 else if (subset)
4366 in_p = 0, low = low0, high = high0;
4367 else
4368 in_p = 0, low = low0, high = high1;
4371 *pin_p = in_p, *plow = low, *phigh = high;
4372 return 1;
4376 /* Subroutine of fold, looking inside expressions of the form
4377 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4378 of the COND_EXPR. This function is being used also to optimize
4379 A op B ? C : A, by reversing the comparison first.
4381 Return a folded expression whose code is not a COND_EXPR
4382 anymore, or NULL_TREE if no folding opportunity is found. */
4384 static tree
4385 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4387 enum tree_code comp_code = TREE_CODE (arg0);
4388 tree arg00 = TREE_OPERAND (arg0, 0);
4389 tree arg01 = TREE_OPERAND (arg0, 1);
4390 tree arg1_type = TREE_TYPE (arg1);
4391 tree tem;
4393 STRIP_NOPS (arg1);
4394 STRIP_NOPS (arg2);
4396 /* If we have A op 0 ? A : -A, consider applying the following
4397 transformations:
4399 A == 0? A : -A same as -A
4400 A != 0? A : -A same as A
4401 A >= 0? A : -A same as abs (A)
4402 A > 0? A : -A same as abs (A)
4403 A <= 0? A : -A same as -abs (A)
4404 A < 0? A : -A same as -abs (A)
4406 None of these transformations work for modes with signed
4407 zeros. If A is +/-0, the first two transformations will
4408 change the sign of the result (from +0 to -0, or vice
4409 versa). The last four will fix the sign of the result,
4410 even though the original expressions could be positive or
4411 negative, depending on the sign of A.
4413 Note that all these transformations are correct if A is
4414 NaN, since the two alternatives (A and -A) are also NaNs. */
4415 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4416 ? real_zerop (arg01)
4417 : integer_zerop (arg01))
4418 && ((TREE_CODE (arg2) == NEGATE_EXPR
4419 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4420 /* In the case that A is of the form X-Y, '-A' (arg2) may
4421 have already been folded to Y-X, check for that. */
4422 || (TREE_CODE (arg1) == MINUS_EXPR
4423 && TREE_CODE (arg2) == MINUS_EXPR
4424 && operand_equal_p (TREE_OPERAND (arg1, 0),
4425 TREE_OPERAND (arg2, 1), 0)
4426 && operand_equal_p (TREE_OPERAND (arg1, 1),
4427 TREE_OPERAND (arg2, 0), 0))))
4428 switch (comp_code)
4430 case EQ_EXPR:
4431 case UNEQ_EXPR:
4432 tem = fold_convert (arg1_type, arg1);
4433 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4434 case NE_EXPR:
4435 case LTGT_EXPR:
4436 return pedantic_non_lvalue (fold_convert (type, arg1));
4437 case UNGE_EXPR:
4438 case UNGT_EXPR:
4439 if (flag_trapping_math)
4440 break;
4441 /* Fall through. */
4442 case GE_EXPR:
4443 case GT_EXPR:
4444 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4445 arg1 = fold_convert (lang_hooks.types.signed_type
4446 (TREE_TYPE (arg1)), arg1);
4447 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4448 return pedantic_non_lvalue (fold_convert (type, tem));
4449 case UNLE_EXPR:
4450 case UNLT_EXPR:
4451 if (flag_trapping_math)
4452 break;
4453 case LE_EXPR:
4454 case LT_EXPR:
4455 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4456 arg1 = fold_convert (lang_hooks.types.signed_type
4457 (TREE_TYPE (arg1)), arg1);
4458 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4459 return negate_expr (fold_convert (type, tem));
4460 default:
4461 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4462 break;
4465 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4466 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4467 both transformations are correct when A is NaN: A != 0
4468 is then true, and A == 0 is false. */
4470 if (integer_zerop (arg01) && integer_zerop (arg2))
4472 if (comp_code == NE_EXPR)
4473 return pedantic_non_lvalue (fold_convert (type, arg1));
4474 else if (comp_code == EQ_EXPR)
4475 return build_int_cst (type, 0);
4478 /* Try some transformations of A op B ? A : B.
4480 A == B? A : B same as B
4481 A != B? A : B same as A
4482 A >= B? A : B same as max (A, B)
4483 A > B? A : B same as max (B, A)
4484 A <= B? A : B same as min (A, B)
4485 A < B? A : B same as min (B, A)
4487 As above, these transformations don't work in the presence
4488 of signed zeros. For example, if A and B are zeros of
4489 opposite sign, the first two transformations will change
4490 the sign of the result. In the last four, the original
4491 expressions give different results for (A=+0, B=-0) and
4492 (A=-0, B=+0), but the transformed expressions do not.
4494 The first two transformations are correct if either A or B
4495 is a NaN. In the first transformation, the condition will
4496 be false, and B will indeed be chosen. In the case of the
4497 second transformation, the condition A != B will be true,
4498 and A will be chosen.
4500 The conversions to max() and min() are not correct if B is
4501 a number and A is not. The conditions in the original
4502 expressions will be false, so all four give B. The min()
4503 and max() versions would give a NaN instead. */
4504 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4505 /* Avoid these transformations if the COND_EXPR may be used
4506 as an lvalue in the C++ front-end. PR c++/19199. */
4507 && (in_gimple_form
4508 || strcmp (lang_hooks.name, "GNU C++") != 0
4509 || ! maybe_lvalue_p (arg1)
4510 || ! maybe_lvalue_p (arg2)))
4512 tree comp_op0 = arg00;
4513 tree comp_op1 = arg01;
4514 tree comp_type = TREE_TYPE (comp_op0);
4516 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4517 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4519 comp_type = type;
4520 comp_op0 = arg1;
4521 comp_op1 = arg2;
4524 switch (comp_code)
4526 case EQ_EXPR:
4527 return pedantic_non_lvalue (fold_convert (type, arg2));
4528 case NE_EXPR:
4529 return pedantic_non_lvalue (fold_convert (type, arg1));
4530 case LE_EXPR:
4531 case LT_EXPR:
4532 case UNLE_EXPR:
4533 case UNLT_EXPR:
4534 /* In C++ a ?: expression can be an lvalue, so put the
4535 operand which will be used if they are equal first
4536 so that we can convert this back to the
4537 corresponding COND_EXPR. */
4538 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4540 comp_op0 = fold_convert (comp_type, comp_op0);
4541 comp_op1 = fold_convert (comp_type, comp_op1);
4542 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4543 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4544 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4545 return pedantic_non_lvalue (fold_convert (type, tem));
4547 break;
4548 case GE_EXPR:
4549 case GT_EXPR:
4550 case UNGE_EXPR:
4551 case UNGT_EXPR:
4552 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4554 comp_op0 = fold_convert (comp_type, comp_op0);
4555 comp_op1 = fold_convert (comp_type, comp_op1);
4556 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4557 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4558 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4559 return pedantic_non_lvalue (fold_convert (type, tem));
4561 break;
4562 case UNEQ_EXPR:
4563 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4564 return pedantic_non_lvalue (fold_convert (type, arg2));
4565 break;
4566 case LTGT_EXPR:
4567 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4568 return pedantic_non_lvalue (fold_convert (type, arg1));
4569 break;
4570 default:
4571 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4572 break;
4576 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4577 we might still be able to simplify this. For example,
4578 if C1 is one less or one more than C2, this might have started
4579 out as a MIN or MAX and been transformed by this function.
4580 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4582 if (INTEGRAL_TYPE_P (type)
4583 && TREE_CODE (arg01) == INTEGER_CST
4584 && TREE_CODE (arg2) == INTEGER_CST)
4585 switch (comp_code)
4587 case EQ_EXPR:
4588 /* We can replace A with C1 in this case. */
4589 arg1 = fold_convert (type, arg01);
4590 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4592 case LT_EXPR:
4593 /* If C1 is C2 + 1, this is min(A, C2). */
4594 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4595 OEP_ONLY_CONST)
4596 && operand_equal_p (arg01,
4597 const_binop (PLUS_EXPR, arg2,
4598 integer_one_node, 0),
4599 OEP_ONLY_CONST))
4600 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4601 type, arg1, arg2));
4602 break;
4604 case LE_EXPR:
4605 /* If C1 is C2 - 1, this is min(A, C2). */
4606 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4607 OEP_ONLY_CONST)
4608 && operand_equal_p (arg01,
4609 const_binop (MINUS_EXPR, arg2,
4610 integer_one_node, 0),
4611 OEP_ONLY_CONST))
4612 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4613 type, arg1, arg2));
4614 break;
4616 case GT_EXPR:
4617 /* If C1 is C2 - 1, this is max(A, C2). */
4618 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4619 OEP_ONLY_CONST)
4620 && operand_equal_p (arg01,
4621 const_binop (MINUS_EXPR, arg2,
4622 integer_one_node, 0),
4623 OEP_ONLY_CONST))
4624 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4625 type, arg1, arg2));
4626 break;
4628 case GE_EXPR:
4629 /* If C1 is C2 + 1, this is max(A, C2). */
4630 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4631 OEP_ONLY_CONST)
4632 && operand_equal_p (arg01,
4633 const_binop (PLUS_EXPR, arg2,
4634 integer_one_node, 0),
4635 OEP_ONLY_CONST))
4636 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4637 type, arg1, arg2));
4638 break;
4639 case NE_EXPR:
4640 break;
4641 default:
4642 gcc_unreachable ();
4645 return NULL_TREE;
4650 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4651 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4652 #endif
4654 /* EXP is some logical combination of boolean tests. See if we can
4655 merge it into some range test. Return the new tree if so. */
4657 static tree
4658 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4660 int or_op = (code == TRUTH_ORIF_EXPR
4661 || code == TRUTH_OR_EXPR);
4662 int in0_p, in1_p, in_p;
4663 tree low0, low1, low, high0, high1, high;
4664 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4665 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4666 tree tem;
4668 /* If this is an OR operation, invert both sides; we will invert
4669 again at the end. */
4670 if (or_op)
4671 in0_p = ! in0_p, in1_p = ! in1_p;
4673 /* If both expressions are the same, if we can merge the ranges, and we
4674 can build the range test, return it or it inverted. If one of the
4675 ranges is always true or always false, consider it to be the same
4676 expression as the other. */
4677 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4678 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4679 in1_p, low1, high1)
4680 && 0 != (tem = (build_range_check (type,
4681 lhs != 0 ? lhs
4682 : rhs != 0 ? rhs : integer_zero_node,
4683 in_p, low, high))))
4684 return or_op ? invert_truthvalue (tem) : tem;
4686 /* On machines where the branch cost is expensive, if this is a
4687 short-circuited branch and the underlying object on both sides
4688 is the same, make a non-short-circuit operation. */
4689 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4690 && lhs != 0 && rhs != 0
4691 && (code == TRUTH_ANDIF_EXPR
4692 || code == TRUTH_ORIF_EXPR)
4693 && operand_equal_p (lhs, rhs, 0))
4695 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4696 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4697 which cases we can't do this. */
4698 if (simple_operand_p (lhs))
4699 return build2 (code == TRUTH_ANDIF_EXPR
4700 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4701 type, op0, op1);
4703 else if (lang_hooks.decls.global_bindings_p () == 0
4704 && ! CONTAINS_PLACEHOLDER_P (lhs))
4706 tree common = save_expr (lhs);
4708 if (0 != (lhs = build_range_check (type, common,
4709 or_op ? ! in0_p : in0_p,
4710 low0, high0))
4711 && (0 != (rhs = build_range_check (type, common,
4712 or_op ? ! in1_p : in1_p,
4713 low1, high1))))
4714 return build2 (code == TRUTH_ANDIF_EXPR
4715 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4716 type, lhs, rhs);
4720 return 0;
4723 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4724 bit value. Arrange things so the extra bits will be set to zero if and
4725 only if C is signed-extended to its full width. If MASK is nonzero,
4726 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4728 static tree
4729 unextend (tree c, int p, int unsignedp, tree mask)
4731 tree type = TREE_TYPE (c);
4732 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4733 tree temp;
4735 if (p == modesize || unsignedp)
4736 return c;
4738 /* We work by getting just the sign bit into the low-order bit, then
4739 into the high-order bit, then sign-extend. We then XOR that value
4740 with C. */
4741 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4742 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4744 /* We must use a signed type in order to get an arithmetic right shift.
4745 However, we must also avoid introducing accidental overflows, so that
4746 a subsequent call to integer_zerop will work. Hence we must
4747 do the type conversion here. At this point, the constant is either
4748 zero or one, and the conversion to a signed type can never overflow.
4749 We could get an overflow if this conversion is done anywhere else. */
4750 if (TYPE_UNSIGNED (type))
4751 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4753 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4754 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4755 if (mask != 0)
4756 temp = const_binop (BIT_AND_EXPR, temp,
4757 fold_convert (TREE_TYPE (c), mask), 0);
4758 /* If necessary, convert the type back to match the type of C. */
4759 if (TYPE_UNSIGNED (type))
4760 temp = fold_convert (type, temp);
4762 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4765 /* Find ways of folding logical expressions of LHS and RHS:
4766 Try to merge two comparisons to the same innermost item.
4767 Look for range tests like "ch >= '0' && ch <= '9'".
4768 Look for combinations of simple terms on machines with expensive branches
4769 and evaluate the RHS unconditionally.
4771 For example, if we have p->a == 2 && p->b == 4 and we can make an
4772 object large enough to span both A and B, we can do this with a comparison
4773 against the object ANDed with the a mask.
4775 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4776 operations to do this with one comparison.
4778 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4779 function and the one above.
4781 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4782 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4784 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4785 two operands.
4787 We return the simplified tree or 0 if no optimization is possible. */
4789 static tree
4790 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4792 /* If this is the "or" of two comparisons, we can do something if
4793 the comparisons are NE_EXPR. If this is the "and", we can do something
4794 if the comparisons are EQ_EXPR. I.e.,
4795 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4797 WANTED_CODE is this operation code. For single bit fields, we can
4798 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4799 comparison for one-bit fields. */
4801 enum tree_code wanted_code;
4802 enum tree_code lcode, rcode;
4803 tree ll_arg, lr_arg, rl_arg, rr_arg;
4804 tree ll_inner, lr_inner, rl_inner, rr_inner;
4805 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4806 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4807 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4808 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4809 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4810 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4811 enum machine_mode lnmode, rnmode;
4812 tree ll_mask, lr_mask, rl_mask, rr_mask;
4813 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4814 tree l_const, r_const;
4815 tree lntype, rntype, result;
4816 int first_bit, end_bit;
4817 int volatilep;
4819 /* Start by getting the comparison codes. Fail if anything is volatile.
4820 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4821 it were surrounded with a NE_EXPR. */
4823 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4824 return 0;
4826 lcode = TREE_CODE (lhs);
4827 rcode = TREE_CODE (rhs);
4829 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4831 lhs = build2 (NE_EXPR, truth_type, lhs,
4832 build_int_cst (TREE_TYPE (lhs), 0));
4833 lcode = NE_EXPR;
4836 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4838 rhs = build2 (NE_EXPR, truth_type, rhs,
4839 build_int_cst (TREE_TYPE (rhs), 0));
4840 rcode = NE_EXPR;
4843 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4844 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4845 return 0;
4847 ll_arg = TREE_OPERAND (lhs, 0);
4848 lr_arg = TREE_OPERAND (lhs, 1);
4849 rl_arg = TREE_OPERAND (rhs, 0);
4850 rr_arg = TREE_OPERAND (rhs, 1);
4852 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4853 if (simple_operand_p (ll_arg)
4854 && simple_operand_p (lr_arg))
4856 tree result;
4857 if (operand_equal_p (ll_arg, rl_arg, 0)
4858 && operand_equal_p (lr_arg, rr_arg, 0))
4860 result = combine_comparisons (code, lcode, rcode,
4861 truth_type, ll_arg, lr_arg);
4862 if (result)
4863 return result;
4865 else if (operand_equal_p (ll_arg, rr_arg, 0)
4866 && operand_equal_p (lr_arg, rl_arg, 0))
4868 result = combine_comparisons (code, lcode,
4869 swap_tree_comparison (rcode),
4870 truth_type, ll_arg, lr_arg);
4871 if (result)
4872 return result;
4876 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4877 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4879 /* If the RHS can be evaluated unconditionally and its operands are
4880 simple, it wins to evaluate the RHS unconditionally on machines
4881 with expensive branches. In this case, this isn't a comparison
4882 that can be merged. Avoid doing this if the RHS is a floating-point
4883 comparison since those can trap. */
4885 if (BRANCH_COST >= 2
4886 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4887 && simple_operand_p (rl_arg)
4888 && simple_operand_p (rr_arg))
4890 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4891 if (code == TRUTH_OR_EXPR
4892 && lcode == NE_EXPR && integer_zerop (lr_arg)
4893 && rcode == NE_EXPR && integer_zerop (rr_arg)
4894 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4895 return build2 (NE_EXPR, truth_type,
4896 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4897 ll_arg, rl_arg),
4898 build_int_cst (TREE_TYPE (ll_arg), 0));
4900 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4901 if (code == TRUTH_AND_EXPR
4902 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4903 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4904 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4905 return build2 (EQ_EXPR, truth_type,
4906 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4907 ll_arg, rl_arg),
4908 build_int_cst (TREE_TYPE (ll_arg), 0));
4910 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4911 return build2 (code, truth_type, lhs, rhs);
4914 /* See if the comparisons can be merged. Then get all the parameters for
4915 each side. */
4917 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4918 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4919 return 0;
4921 volatilep = 0;
4922 ll_inner = decode_field_reference (ll_arg,
4923 &ll_bitsize, &ll_bitpos, &ll_mode,
4924 &ll_unsignedp, &volatilep, &ll_mask,
4925 &ll_and_mask);
4926 lr_inner = decode_field_reference (lr_arg,
4927 &lr_bitsize, &lr_bitpos, &lr_mode,
4928 &lr_unsignedp, &volatilep, &lr_mask,
4929 &lr_and_mask);
4930 rl_inner = decode_field_reference (rl_arg,
4931 &rl_bitsize, &rl_bitpos, &rl_mode,
4932 &rl_unsignedp, &volatilep, &rl_mask,
4933 &rl_and_mask);
4934 rr_inner = decode_field_reference (rr_arg,
4935 &rr_bitsize, &rr_bitpos, &rr_mode,
4936 &rr_unsignedp, &volatilep, &rr_mask,
4937 &rr_and_mask);
4939 /* It must be true that the inner operation on the lhs of each
4940 comparison must be the same if we are to be able to do anything.
4941 Then see if we have constants. If not, the same must be true for
4942 the rhs's. */
4943 if (volatilep || ll_inner == 0 || rl_inner == 0
4944 || ! operand_equal_p (ll_inner, rl_inner, 0))
4945 return 0;
4947 if (TREE_CODE (lr_arg) == INTEGER_CST
4948 && TREE_CODE (rr_arg) == INTEGER_CST)
4949 l_const = lr_arg, r_const = rr_arg;
4950 else if (lr_inner == 0 || rr_inner == 0
4951 || ! operand_equal_p (lr_inner, rr_inner, 0))
4952 return 0;
4953 else
4954 l_const = r_const = 0;
4956 /* If either comparison code is not correct for our logical operation,
4957 fail. However, we can convert a one-bit comparison against zero into
4958 the opposite comparison against that bit being set in the field. */
4960 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4961 if (lcode != wanted_code)
4963 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4965 /* Make the left operand unsigned, since we are only interested
4966 in the value of one bit. Otherwise we are doing the wrong
4967 thing below. */
4968 ll_unsignedp = 1;
4969 l_const = ll_mask;
4971 else
4972 return 0;
4975 /* This is analogous to the code for l_const above. */
4976 if (rcode != wanted_code)
4978 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4980 rl_unsignedp = 1;
4981 r_const = rl_mask;
4983 else
4984 return 0;
4987 /* After this point all optimizations will generate bit-field
4988 references, which we might not want. */
4989 if (! lang_hooks.can_use_bit_fields_p ())
4990 return 0;
4992 /* See if we can find a mode that contains both fields being compared on
4993 the left. If we can't, fail. Otherwise, update all constants and masks
4994 to be relative to a field of that size. */
4995 first_bit = MIN (ll_bitpos, rl_bitpos);
4996 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4997 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4998 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4999 volatilep);
5000 if (lnmode == VOIDmode)
5001 return 0;
5003 lnbitsize = GET_MODE_BITSIZE (lnmode);
5004 lnbitpos = first_bit & ~ (lnbitsize - 1);
5005 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5006 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5008 if (BYTES_BIG_ENDIAN)
5010 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5011 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5014 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5015 size_int (xll_bitpos), 0);
5016 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5017 size_int (xrl_bitpos), 0);
5019 if (l_const)
5021 l_const = fold_convert (lntype, l_const);
5022 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5023 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5024 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5025 fold_build1 (BIT_NOT_EXPR,
5026 lntype, ll_mask),
5027 0)))
5029 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5031 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5034 if (r_const)
5036 r_const = fold_convert (lntype, r_const);
5037 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5038 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5039 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5040 fold_build1 (BIT_NOT_EXPR,
5041 lntype, rl_mask),
5042 0)))
5044 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5046 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5050 /* If the right sides are not constant, do the same for it. Also,
5051 disallow this optimization if a size or signedness mismatch occurs
5052 between the left and right sides. */
5053 if (l_const == 0)
5055 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5056 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5057 /* Make sure the two fields on the right
5058 correspond to the left without being swapped. */
5059 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5060 return 0;
5062 first_bit = MIN (lr_bitpos, rr_bitpos);
5063 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5064 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5065 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5066 volatilep);
5067 if (rnmode == VOIDmode)
5068 return 0;
5070 rnbitsize = GET_MODE_BITSIZE (rnmode);
5071 rnbitpos = first_bit & ~ (rnbitsize - 1);
5072 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5073 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5075 if (BYTES_BIG_ENDIAN)
5077 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5078 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5081 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5082 size_int (xlr_bitpos), 0);
5083 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5084 size_int (xrr_bitpos), 0);
5086 /* Make a mask that corresponds to both fields being compared.
5087 Do this for both items being compared. If the operands are the
5088 same size and the bits being compared are in the same position
5089 then we can do this by masking both and comparing the masked
5090 results. */
5091 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5092 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5093 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5095 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5096 ll_unsignedp || rl_unsignedp);
5097 if (! all_ones_mask_p (ll_mask, lnbitsize))
5098 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5100 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5101 lr_unsignedp || rr_unsignedp);
5102 if (! all_ones_mask_p (lr_mask, rnbitsize))
5103 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5105 return build2 (wanted_code, truth_type, lhs, rhs);
5108 /* There is still another way we can do something: If both pairs of
5109 fields being compared are adjacent, we may be able to make a wider
5110 field containing them both.
5112 Note that we still must mask the lhs/rhs expressions. Furthermore,
5113 the mask must be shifted to account for the shift done by
5114 make_bit_field_ref. */
5115 if ((ll_bitsize + ll_bitpos == rl_bitpos
5116 && lr_bitsize + lr_bitpos == rr_bitpos)
5117 || (ll_bitpos == rl_bitpos + rl_bitsize
5118 && lr_bitpos == rr_bitpos + rr_bitsize))
5120 tree type;
5122 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5123 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5124 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5125 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5127 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5128 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5129 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5130 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5132 /* Convert to the smaller type before masking out unwanted bits. */
5133 type = lntype;
5134 if (lntype != rntype)
5136 if (lnbitsize > rnbitsize)
5138 lhs = fold_convert (rntype, lhs);
5139 ll_mask = fold_convert (rntype, ll_mask);
5140 type = rntype;
5142 else if (lnbitsize < rnbitsize)
5144 rhs = fold_convert (lntype, rhs);
5145 lr_mask = fold_convert (lntype, lr_mask);
5146 type = lntype;
5150 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5151 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5153 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5154 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5156 return build2 (wanted_code, truth_type, lhs, rhs);
5159 return 0;
5162 /* Handle the case of comparisons with constants. If there is something in
5163 common between the masks, those bits of the constants must be the same.
5164 If not, the condition is always false. Test for this to avoid generating
5165 incorrect code below. */
5166 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5167 if (! integer_zerop (result)
5168 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5169 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5171 if (wanted_code == NE_EXPR)
5173 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5174 return constant_boolean_node (true, truth_type);
5176 else
5178 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5179 return constant_boolean_node (false, truth_type);
5183 /* Construct the expression we will return. First get the component
5184 reference we will make. Unless the mask is all ones the width of
5185 that field, perform the mask operation. Then compare with the
5186 merged constant. */
5187 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5188 ll_unsignedp || rl_unsignedp);
5190 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5191 if (! all_ones_mask_p (ll_mask, lnbitsize))
5192 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5194 return build2 (wanted_code, truth_type, result,
5195 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5198 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5199 constant. */
5201 static tree
5202 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5204 tree arg0 = op0;
5205 enum tree_code op_code;
5206 tree comp_const = op1;
5207 tree minmax_const;
5208 int consts_equal, consts_lt;
5209 tree inner;
5211 STRIP_SIGN_NOPS (arg0);
5213 op_code = TREE_CODE (arg0);
5214 minmax_const = TREE_OPERAND (arg0, 1);
5215 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5216 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5217 inner = TREE_OPERAND (arg0, 0);
5219 /* If something does not permit us to optimize, return the original tree. */
5220 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5221 || TREE_CODE (comp_const) != INTEGER_CST
5222 || TREE_CONSTANT_OVERFLOW (comp_const)
5223 || TREE_CODE (minmax_const) != INTEGER_CST
5224 || TREE_CONSTANT_OVERFLOW (minmax_const))
5225 return NULL_TREE;
5227 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5228 and GT_EXPR, doing the rest with recursive calls using logical
5229 simplifications. */
5230 switch (code)
5232 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5234 /* FIXME: We should be able to invert code without building a
5235 scratch tree node, but doing so would require us to
5236 duplicate a part of invert_truthvalue here. */
5237 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5238 tem = optimize_minmax_comparison (TREE_CODE (tem),
5239 TREE_TYPE (tem),
5240 TREE_OPERAND (tem, 0),
5241 TREE_OPERAND (tem, 1));
5242 return invert_truthvalue (tem);
5245 case GE_EXPR:
5246 return
5247 fold_build2 (TRUTH_ORIF_EXPR, type,
5248 optimize_minmax_comparison
5249 (EQ_EXPR, type, arg0, comp_const),
5250 optimize_minmax_comparison
5251 (GT_EXPR, type, arg0, comp_const));
5253 case EQ_EXPR:
5254 if (op_code == MAX_EXPR && consts_equal)
5255 /* MAX (X, 0) == 0 -> X <= 0 */
5256 return fold_build2 (LE_EXPR, type, inner, comp_const);
5258 else if (op_code == MAX_EXPR && consts_lt)
5259 /* MAX (X, 0) == 5 -> X == 5 */
5260 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5262 else if (op_code == MAX_EXPR)
5263 /* MAX (X, 0) == -1 -> false */
5264 return omit_one_operand (type, integer_zero_node, inner);
5266 else if (consts_equal)
5267 /* MIN (X, 0) == 0 -> X >= 0 */
5268 return fold_build2 (GE_EXPR, type, inner, comp_const);
5270 else if (consts_lt)
5271 /* MIN (X, 0) == 5 -> false */
5272 return omit_one_operand (type, integer_zero_node, inner);
5274 else
5275 /* MIN (X, 0) == -1 -> X == -1 */
5276 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5278 case GT_EXPR:
5279 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5280 /* MAX (X, 0) > 0 -> X > 0
5281 MAX (X, 0) > 5 -> X > 5 */
5282 return fold_build2 (GT_EXPR, type, inner, comp_const);
5284 else if (op_code == MAX_EXPR)
5285 /* MAX (X, 0) > -1 -> true */
5286 return omit_one_operand (type, integer_one_node, inner);
5288 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5289 /* MIN (X, 0) > 0 -> false
5290 MIN (X, 0) > 5 -> false */
5291 return omit_one_operand (type, integer_zero_node, inner);
5293 else
5294 /* MIN (X, 0) > -1 -> X > -1 */
5295 return fold_build2 (GT_EXPR, type, inner, comp_const);
5297 default:
5298 return NULL_TREE;
5302 /* T is an integer expression that is being multiplied, divided, or taken a
5303 modulus (CODE says which and what kind of divide or modulus) by a
5304 constant C. See if we can eliminate that operation by folding it with
5305 other operations already in T. WIDE_TYPE, if non-null, is a type that
5306 should be used for the computation if wider than our type.
5308 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5309 (X * 2) + (Y * 4). We must, however, be assured that either the original
5310 expression would not overflow or that overflow is undefined for the type
5311 in the language in question.
5313 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5314 the machine has a multiply-accumulate insn or that this is part of an
5315 addressing calculation.
5317 If we return a non-null expression, it is an equivalent form of the
5318 original computation, but need not be in the original type. */
5320 static tree
5321 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5323 /* To avoid exponential search depth, refuse to allow recursion past
5324 three levels. Beyond that (1) it's highly unlikely that we'll find
5325 something interesting and (2) we've probably processed it before
5326 when we built the inner expression. */
5328 static int depth;
5329 tree ret;
5331 if (depth > 3)
5332 return NULL;
5334 depth++;
5335 ret = extract_muldiv_1 (t, c, code, wide_type);
5336 depth--;
5338 return ret;
5341 static tree
5342 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5344 tree type = TREE_TYPE (t);
5345 enum tree_code tcode = TREE_CODE (t);
5346 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5347 > GET_MODE_SIZE (TYPE_MODE (type)))
5348 ? wide_type : type);
5349 tree t1, t2;
5350 int same_p = tcode == code;
5351 tree op0 = NULL_TREE, op1 = NULL_TREE;
5353 /* Don't deal with constants of zero here; they confuse the code below. */
5354 if (integer_zerop (c))
5355 return NULL_TREE;
5357 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5358 op0 = TREE_OPERAND (t, 0);
5360 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5361 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5363 /* Note that we need not handle conditional operations here since fold
5364 already handles those cases. So just do arithmetic here. */
5365 switch (tcode)
5367 case INTEGER_CST:
5368 /* For a constant, we can always simplify if we are a multiply
5369 or (for divide and modulus) if it is a multiple of our constant. */
5370 if (code == MULT_EXPR
5371 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5372 return const_binop (code, fold_convert (ctype, t),
5373 fold_convert (ctype, c), 0);
5374 break;
5376 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5377 /* If op0 is an expression ... */
5378 if ((COMPARISON_CLASS_P (op0)
5379 || UNARY_CLASS_P (op0)
5380 || BINARY_CLASS_P (op0)
5381 || EXPRESSION_CLASS_P (op0))
5382 /* ... and is unsigned, and its type is smaller than ctype,
5383 then we cannot pass through as widening. */
5384 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5385 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5386 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5387 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5388 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5389 /* ... or this is a truncation (t is narrower than op0),
5390 then we cannot pass through this narrowing. */
5391 || (GET_MODE_SIZE (TYPE_MODE (type))
5392 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5393 /* ... or signedness changes for division or modulus,
5394 then we cannot pass through this conversion. */
5395 || (code != MULT_EXPR
5396 && (TYPE_UNSIGNED (ctype)
5397 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5398 break;
5400 /* Pass the constant down and see if we can make a simplification. If
5401 we can, replace this expression with the inner simplification for
5402 possible later conversion to our or some other type. */
5403 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5404 && TREE_CODE (t2) == INTEGER_CST
5405 && ! TREE_CONSTANT_OVERFLOW (t2)
5406 && (0 != (t1 = extract_muldiv (op0, t2, code,
5407 code == MULT_EXPR
5408 ? ctype : NULL_TREE))))
5409 return t1;
5410 break;
5412 case ABS_EXPR:
5413 /* If widening the type changes it from signed to unsigned, then we
5414 must avoid building ABS_EXPR itself as unsigned. */
5415 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5417 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5418 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5420 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5421 return fold_convert (ctype, t1);
5423 break;
5425 /* FALLTHROUGH */
5426 case NEGATE_EXPR:
5427 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5428 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5429 break;
5431 case MIN_EXPR: case MAX_EXPR:
5432 /* If widening the type changes the signedness, then we can't perform
5433 this optimization as that changes the result. */
5434 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5435 break;
5437 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5438 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5439 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5441 if (tree_int_cst_sgn (c) < 0)
5442 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5444 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5445 fold_convert (ctype, t2));
5447 break;
5449 case LSHIFT_EXPR: case RSHIFT_EXPR:
5450 /* If the second operand is constant, this is a multiplication
5451 or floor division, by a power of two, so we can treat it that
5452 way unless the multiplier or divisor overflows. Signed
5453 left-shift overflow is implementation-defined rather than
5454 undefined in C90, so do not convert signed left shift into
5455 multiplication. */
5456 if (TREE_CODE (op1) == INTEGER_CST
5457 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5458 /* const_binop may not detect overflow correctly,
5459 so check for it explicitly here. */
5460 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5461 && TREE_INT_CST_HIGH (op1) == 0
5462 && 0 != (t1 = fold_convert (ctype,
5463 const_binop (LSHIFT_EXPR,
5464 size_one_node,
5465 op1, 0)))
5466 && ! TREE_OVERFLOW (t1))
5467 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5468 ? MULT_EXPR : FLOOR_DIV_EXPR,
5469 ctype, fold_convert (ctype, op0), t1),
5470 c, code, wide_type);
5471 break;
5473 case PLUS_EXPR: case MINUS_EXPR:
5474 /* See if we can eliminate the operation on both sides. If we can, we
5475 can return a new PLUS or MINUS. If we can't, the only remaining
5476 cases where we can do anything are if the second operand is a
5477 constant. */
5478 t1 = extract_muldiv (op0, c, code, wide_type);
5479 t2 = extract_muldiv (op1, c, code, wide_type);
5480 if (t1 != 0 && t2 != 0
5481 && (code == MULT_EXPR
5482 /* If not multiplication, we can only do this if both operands
5483 are divisible by c. */
5484 || (multiple_of_p (ctype, op0, c)
5485 && multiple_of_p (ctype, op1, c))))
5486 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5487 fold_convert (ctype, t2));
5489 /* If this was a subtraction, negate OP1 and set it to be an addition.
5490 This simplifies the logic below. */
5491 if (tcode == MINUS_EXPR)
5492 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5494 if (TREE_CODE (op1) != INTEGER_CST)
5495 break;
5497 /* If either OP1 or C are negative, this optimization is not safe for
5498 some of the division and remainder types while for others we need
5499 to change the code. */
5500 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5502 if (code == CEIL_DIV_EXPR)
5503 code = FLOOR_DIV_EXPR;
5504 else if (code == FLOOR_DIV_EXPR)
5505 code = CEIL_DIV_EXPR;
5506 else if (code != MULT_EXPR
5507 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5508 break;
5511 /* If it's a multiply or a division/modulus operation of a multiple
5512 of our constant, do the operation and verify it doesn't overflow. */
5513 if (code == MULT_EXPR
5514 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5516 op1 = const_binop (code, fold_convert (ctype, op1),
5517 fold_convert (ctype, c), 0);
5518 /* We allow the constant to overflow with wrapping semantics. */
5519 if (op1 == 0
5520 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5521 break;
5523 else
5524 break;
5526 /* If we have an unsigned type is not a sizetype, we cannot widen
5527 the operation since it will change the result if the original
5528 computation overflowed. */
5529 if (TYPE_UNSIGNED (ctype)
5530 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5531 && ctype != type)
5532 break;
5534 /* If we were able to eliminate our operation from the first side,
5535 apply our operation to the second side and reform the PLUS. */
5536 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5537 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5539 /* The last case is if we are a multiply. In that case, we can
5540 apply the distributive law to commute the multiply and addition
5541 if the multiplication of the constants doesn't overflow. */
5542 if (code == MULT_EXPR)
5543 return fold_build2 (tcode, ctype,
5544 fold_build2 (code, ctype,
5545 fold_convert (ctype, op0),
5546 fold_convert (ctype, c)),
5547 op1);
5549 break;
5551 case MULT_EXPR:
5552 /* We have a special case here if we are doing something like
5553 (C * 8) % 4 since we know that's zero. */
5554 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5555 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5556 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5557 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5558 return omit_one_operand (type, integer_zero_node, op0);
5560 /* ... fall through ... */
5562 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5563 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5564 /* If we can extract our operation from the LHS, do so and return a
5565 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5566 do something only if the second operand is a constant. */
5567 if (same_p
5568 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5569 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5570 fold_convert (ctype, op1));
5571 else if (tcode == MULT_EXPR && code == MULT_EXPR
5572 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5573 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5574 fold_convert (ctype, t1));
5575 else if (TREE_CODE (op1) != INTEGER_CST)
5576 return 0;
5578 /* If these are the same operation types, we can associate them
5579 assuming no overflow. */
5580 if (tcode == code
5581 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5582 fold_convert (ctype, c), 0))
5583 && ! TREE_OVERFLOW (t1))
5584 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5586 /* If these operations "cancel" each other, we have the main
5587 optimizations of this pass, which occur when either constant is a
5588 multiple of the other, in which case we replace this with either an
5589 operation or CODE or TCODE.
5591 If we have an unsigned type that is not a sizetype, we cannot do
5592 this since it will change the result if the original computation
5593 overflowed. */
5594 if ((! TYPE_UNSIGNED (ctype)
5595 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5596 && ! flag_wrapv
5597 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5598 || (tcode == MULT_EXPR
5599 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5600 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5602 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5603 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5604 fold_convert (ctype,
5605 const_binop (TRUNC_DIV_EXPR,
5606 op1, c, 0)));
5607 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5608 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5609 fold_convert (ctype,
5610 const_binop (TRUNC_DIV_EXPR,
5611 c, op1, 0)));
5613 break;
5615 default:
5616 break;
5619 return 0;
5622 /* Return a node which has the indicated constant VALUE (either 0 or
5623 1), and is of the indicated TYPE. */
5625 tree
5626 constant_boolean_node (int value, tree type)
5628 if (type == integer_type_node)
5629 return value ? integer_one_node : integer_zero_node;
5630 else if (type == boolean_type_node)
5631 return value ? boolean_true_node : boolean_false_node;
5632 else
5633 return build_int_cst (type, value);
5637 /* Return true if expr looks like an ARRAY_REF and set base and
5638 offset to the appropriate trees. If there is no offset,
5639 offset is set to NULL_TREE. Base will be canonicalized to
5640 something you can get the element type from using
5641 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5642 in bytes to the base. */
5644 static bool
5645 extract_array_ref (tree expr, tree *base, tree *offset)
5647 /* One canonical form is a PLUS_EXPR with the first
5648 argument being an ADDR_EXPR with a possible NOP_EXPR
5649 attached. */
5650 if (TREE_CODE (expr) == PLUS_EXPR)
5652 tree op0 = TREE_OPERAND (expr, 0);
5653 tree inner_base, dummy1;
5654 /* Strip NOP_EXPRs here because the C frontends and/or
5655 folders present us (int *)&x.a + 4B possibly. */
5656 STRIP_NOPS (op0);
5657 if (extract_array_ref (op0, &inner_base, &dummy1))
5659 *base = inner_base;
5660 if (dummy1 == NULL_TREE)
5661 *offset = TREE_OPERAND (expr, 1);
5662 else
5663 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5664 dummy1, TREE_OPERAND (expr, 1));
5665 return true;
5668 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5669 which we transform into an ADDR_EXPR with appropriate
5670 offset. For other arguments to the ADDR_EXPR we assume
5671 zero offset and as such do not care about the ADDR_EXPR
5672 type and strip possible nops from it. */
5673 else if (TREE_CODE (expr) == ADDR_EXPR)
5675 tree op0 = TREE_OPERAND (expr, 0);
5676 if (TREE_CODE (op0) == ARRAY_REF)
5678 tree idx = TREE_OPERAND (op0, 1);
5679 *base = TREE_OPERAND (op0, 0);
5680 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5681 array_ref_element_size (op0));
5683 else
5685 /* Handle array-to-pointer decay as &a. */
5686 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5687 *base = TREE_OPERAND (expr, 0);
5688 else
5689 *base = expr;
5690 *offset = NULL_TREE;
5692 return true;
5694 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5695 else if (SSA_VAR_P (expr)
5696 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5698 *base = expr;
5699 *offset = NULL_TREE;
5700 return true;
5703 return false;
5707 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5708 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5709 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5710 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5711 COND is the first argument to CODE; otherwise (as in the example
5712 given here), it is the second argument. TYPE is the type of the
5713 original expression. Return NULL_TREE if no simplification is
5714 possible. */
5716 static tree
5717 fold_binary_op_with_conditional_arg (enum tree_code code,
5718 tree type, tree op0, tree op1,
5719 tree cond, tree arg, int cond_first_p)
5721 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5722 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5723 tree test, true_value, false_value;
5724 tree lhs = NULL_TREE;
5725 tree rhs = NULL_TREE;
5727 /* This transformation is only worthwhile if we don't have to wrap
5728 arg in a SAVE_EXPR, and the operation can be simplified on at least
5729 one of the branches once its pushed inside the COND_EXPR. */
5730 if (!TREE_CONSTANT (arg))
5731 return NULL_TREE;
5733 if (TREE_CODE (cond) == COND_EXPR)
5735 test = TREE_OPERAND (cond, 0);
5736 true_value = TREE_OPERAND (cond, 1);
5737 false_value = TREE_OPERAND (cond, 2);
5738 /* If this operand throws an expression, then it does not make
5739 sense to try to perform a logical or arithmetic operation
5740 involving it. */
5741 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5742 lhs = true_value;
5743 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5744 rhs = false_value;
5746 else
5748 tree testtype = TREE_TYPE (cond);
5749 test = cond;
5750 true_value = constant_boolean_node (true, testtype);
5751 false_value = constant_boolean_node (false, testtype);
5754 arg = fold_convert (arg_type, arg);
5755 if (lhs == 0)
5757 true_value = fold_convert (cond_type, true_value);
5758 if (cond_first_p)
5759 lhs = fold_build2 (code, type, true_value, arg);
5760 else
5761 lhs = fold_build2 (code, type, arg, true_value);
5763 if (rhs == 0)
5765 false_value = fold_convert (cond_type, false_value);
5766 if (cond_first_p)
5767 rhs = fold_build2 (code, type, false_value, arg);
5768 else
5769 rhs = fold_build2 (code, type, arg, false_value);
5772 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5773 return fold_convert (type, test);
5777 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5779 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5780 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5781 ADDEND is the same as X.
5783 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5784 and finite. The problematic cases are when X is zero, and its mode
5785 has signed zeros. In the case of rounding towards -infinity,
5786 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5787 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5789 static bool
5790 fold_real_zero_addition_p (tree type, tree addend, int negate)
5792 if (!real_zerop (addend))
5793 return false;
5795 /* Don't allow the fold with -fsignaling-nans. */
5796 if (HONOR_SNANS (TYPE_MODE (type)))
5797 return false;
5799 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5800 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5801 return true;
5803 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5804 if (TREE_CODE (addend) == REAL_CST
5805 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5806 negate = !negate;
5808 /* The mode has signed zeros, and we have to honor their sign.
5809 In this situation, there is only one case we can return true for.
5810 X - 0 is the same as X unless rounding towards -infinity is
5811 supported. */
5812 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5815 /* Subroutine of fold() that checks comparisons of built-in math
5816 functions against real constants.
5818 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5819 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5820 is the type of the result and ARG0 and ARG1 are the operands of the
5821 comparison. ARG1 must be a TREE_REAL_CST.
5823 The function returns the constant folded tree if a simplification
5824 can be made, and NULL_TREE otherwise. */
5826 static tree
5827 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5828 tree type, tree arg0, tree arg1)
5830 REAL_VALUE_TYPE c;
5832 if (BUILTIN_SQRT_P (fcode))
5834 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5835 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5837 c = TREE_REAL_CST (arg1);
5838 if (REAL_VALUE_NEGATIVE (c))
5840 /* sqrt(x) < y is always false, if y is negative. */
5841 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5842 return omit_one_operand (type, integer_zero_node, arg);
5844 /* sqrt(x) > y is always true, if y is negative and we
5845 don't care about NaNs, i.e. negative values of x. */
5846 if (code == NE_EXPR || !HONOR_NANS (mode))
5847 return omit_one_operand (type, integer_one_node, arg);
5849 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5850 return fold_build2 (GE_EXPR, type, arg,
5851 build_real (TREE_TYPE (arg), dconst0));
5853 else if (code == GT_EXPR || code == GE_EXPR)
5855 REAL_VALUE_TYPE c2;
5857 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5858 real_convert (&c2, mode, &c2);
5860 if (REAL_VALUE_ISINF (c2))
5862 /* sqrt(x) > y is x == +Inf, when y is very large. */
5863 if (HONOR_INFINITIES (mode))
5864 return fold_build2 (EQ_EXPR, type, arg,
5865 build_real (TREE_TYPE (arg), c2));
5867 /* sqrt(x) > y is always false, when y is very large
5868 and we don't care about infinities. */
5869 return omit_one_operand (type, integer_zero_node, arg);
5872 /* sqrt(x) > c is the same as x > c*c. */
5873 return fold_build2 (code, type, arg,
5874 build_real (TREE_TYPE (arg), c2));
5876 else if (code == LT_EXPR || code == LE_EXPR)
5878 REAL_VALUE_TYPE c2;
5880 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5881 real_convert (&c2, mode, &c2);
5883 if (REAL_VALUE_ISINF (c2))
5885 /* sqrt(x) < y is always true, when y is a very large
5886 value and we don't care about NaNs or Infinities. */
5887 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5888 return omit_one_operand (type, integer_one_node, arg);
5890 /* sqrt(x) < y is x != +Inf when y is very large and we
5891 don't care about NaNs. */
5892 if (! HONOR_NANS (mode))
5893 return fold_build2 (NE_EXPR, type, arg,
5894 build_real (TREE_TYPE (arg), c2));
5896 /* sqrt(x) < y is x >= 0 when y is very large and we
5897 don't care about Infinities. */
5898 if (! HONOR_INFINITIES (mode))
5899 return fold_build2 (GE_EXPR, type, arg,
5900 build_real (TREE_TYPE (arg), dconst0));
5902 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5903 if (lang_hooks.decls.global_bindings_p () != 0
5904 || CONTAINS_PLACEHOLDER_P (arg))
5905 return NULL_TREE;
5907 arg = save_expr (arg);
5908 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5909 fold_build2 (GE_EXPR, type, arg,
5910 build_real (TREE_TYPE (arg),
5911 dconst0)),
5912 fold_build2 (NE_EXPR, type, arg,
5913 build_real (TREE_TYPE (arg),
5914 c2)));
5917 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5918 if (! HONOR_NANS (mode))
5919 return fold_build2 (code, type, arg,
5920 build_real (TREE_TYPE (arg), c2));
5922 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5923 if (lang_hooks.decls.global_bindings_p () == 0
5924 && ! CONTAINS_PLACEHOLDER_P (arg))
5926 arg = save_expr (arg);
5927 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5928 fold_build2 (GE_EXPR, type, arg,
5929 build_real (TREE_TYPE (arg),
5930 dconst0)),
5931 fold_build2 (code, type, arg,
5932 build_real (TREE_TYPE (arg),
5933 c2)));
5938 return NULL_TREE;
5941 /* Subroutine of fold() that optimizes comparisons against Infinities,
5942 either +Inf or -Inf.
5944 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5945 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5946 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5948 The function returns the constant folded tree if a simplification
5949 can be made, and NULL_TREE otherwise. */
5951 static tree
5952 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5954 enum machine_mode mode;
5955 REAL_VALUE_TYPE max;
5956 tree temp;
5957 bool neg;
5959 mode = TYPE_MODE (TREE_TYPE (arg0));
5961 /* For negative infinity swap the sense of the comparison. */
5962 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5963 if (neg)
5964 code = swap_tree_comparison (code);
5966 switch (code)
5968 case GT_EXPR:
5969 /* x > +Inf is always false, if with ignore sNANs. */
5970 if (HONOR_SNANS (mode))
5971 return NULL_TREE;
5972 return omit_one_operand (type, integer_zero_node, arg0);
5974 case LE_EXPR:
5975 /* x <= +Inf is always true, if we don't case about NaNs. */
5976 if (! HONOR_NANS (mode))
5977 return omit_one_operand (type, integer_one_node, arg0);
5979 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5980 if (lang_hooks.decls.global_bindings_p () == 0
5981 && ! CONTAINS_PLACEHOLDER_P (arg0))
5983 arg0 = save_expr (arg0);
5984 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5986 break;
5988 case EQ_EXPR:
5989 case GE_EXPR:
5990 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5991 real_maxval (&max, neg, mode);
5992 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5993 arg0, build_real (TREE_TYPE (arg0), max));
5995 case LT_EXPR:
5996 /* x < +Inf is always equal to x <= DBL_MAX. */
5997 real_maxval (&max, neg, mode);
5998 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5999 arg0, build_real (TREE_TYPE (arg0), max));
6001 case NE_EXPR:
6002 /* x != +Inf is always equal to !(x > DBL_MAX). */
6003 real_maxval (&max, neg, mode);
6004 if (! HONOR_NANS (mode))
6005 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6006 arg0, build_real (TREE_TYPE (arg0), max));
6008 /* The transformation below creates non-gimple code and thus is
6009 not appropriate if we are in gimple form. */
6010 if (in_gimple_form)
6011 return NULL_TREE;
6013 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6014 arg0, build_real (TREE_TYPE (arg0), max));
6015 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6017 default:
6018 break;
6021 return NULL_TREE;
6024 /* Subroutine of fold() that optimizes comparisons of a division by
6025 a nonzero integer constant against an integer constant, i.e.
6026 X/C1 op C2.
6028 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6029 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6030 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6032 The function returns the constant folded tree if a simplification
6033 can be made, and NULL_TREE otherwise. */
6035 static tree
6036 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6038 tree prod, tmp, hi, lo;
6039 tree arg00 = TREE_OPERAND (arg0, 0);
6040 tree arg01 = TREE_OPERAND (arg0, 1);
6041 unsigned HOST_WIDE_INT lpart;
6042 HOST_WIDE_INT hpart;
6043 bool neg_overflow;
6044 int overflow;
6046 /* We have to do this the hard way to detect unsigned overflow.
6047 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6048 overflow = mul_double (TREE_INT_CST_LOW (arg01),
6049 TREE_INT_CST_HIGH (arg01),
6050 TREE_INT_CST_LOW (arg1),
6051 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
6052 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6053 prod = force_fit_type (prod, -1, overflow, false);
6054 neg_overflow = false;
6056 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
6058 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6059 lo = prod;
6061 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6062 overflow = add_double (TREE_INT_CST_LOW (prod),
6063 TREE_INT_CST_HIGH (prod),
6064 TREE_INT_CST_LOW (tmp),
6065 TREE_INT_CST_HIGH (tmp),
6066 &lpart, &hpart);
6067 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6068 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6069 TREE_CONSTANT_OVERFLOW (prod));
6071 else if (tree_int_cst_sgn (arg01) >= 0)
6073 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6074 switch (tree_int_cst_sgn (arg1))
6076 case -1:
6077 neg_overflow = true;
6078 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6079 hi = prod;
6080 break;
6082 case 0:
6083 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6084 hi = tmp;
6085 break;
6087 case 1:
6088 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6089 lo = prod;
6090 break;
6092 default:
6093 gcc_unreachable ();
6096 else
6098 /* A negative divisor reverses the relational operators. */
6099 code = swap_tree_comparison (code);
6101 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6102 switch (tree_int_cst_sgn (arg1))
6104 case -1:
6105 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6106 lo = prod;
6107 break;
6109 case 0:
6110 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6111 lo = tmp;
6112 break;
6114 case 1:
6115 neg_overflow = true;
6116 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6117 hi = prod;
6118 break;
6120 default:
6121 gcc_unreachable ();
6125 switch (code)
6127 case EQ_EXPR:
6128 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6129 return omit_one_operand (type, integer_zero_node, arg00);
6130 if (TREE_OVERFLOW (hi))
6131 return fold_build2 (GE_EXPR, type, arg00, lo);
6132 if (TREE_OVERFLOW (lo))
6133 return fold_build2 (LE_EXPR, type, arg00, hi);
6134 return build_range_check (type, arg00, 1, lo, hi);
6136 case NE_EXPR:
6137 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6138 return omit_one_operand (type, integer_one_node, arg00);
6139 if (TREE_OVERFLOW (hi))
6140 return fold_build2 (LT_EXPR, type, arg00, lo);
6141 if (TREE_OVERFLOW (lo))
6142 return fold_build2 (GT_EXPR, type, arg00, hi);
6143 return build_range_check (type, arg00, 0, lo, hi);
6145 case LT_EXPR:
6146 if (TREE_OVERFLOW (lo))
6148 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6149 return omit_one_operand (type, tmp, arg00);
6151 return fold_build2 (LT_EXPR, type, arg00, lo);
6153 case LE_EXPR:
6154 if (TREE_OVERFLOW (hi))
6156 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6157 return omit_one_operand (type, tmp, arg00);
6159 return fold_build2 (LE_EXPR, type, arg00, hi);
6161 case GT_EXPR:
6162 if (TREE_OVERFLOW (hi))
6164 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6165 return omit_one_operand (type, tmp, arg00);
6167 return fold_build2 (GT_EXPR, type, arg00, hi);
6169 case GE_EXPR:
6170 if (TREE_OVERFLOW (lo))
6172 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6173 return omit_one_operand (type, tmp, arg00);
6175 return fold_build2 (GE_EXPR, type, arg00, lo);
6177 default:
6178 break;
6181 return NULL_TREE;
6185 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6186 equality/inequality test, then return a simplified form of the test
6187 using a sign testing. Otherwise return NULL. TYPE is the desired
6188 result type. */
6190 static tree
6191 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6192 tree result_type)
6194 /* If this is testing a single bit, we can optimize the test. */
6195 if ((code == NE_EXPR || code == EQ_EXPR)
6196 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6197 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6199 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6200 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6201 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6203 if (arg00 != NULL_TREE
6204 /* This is only a win if casting to a signed type is cheap,
6205 i.e. when arg00's type is not a partial mode. */
6206 && TYPE_PRECISION (TREE_TYPE (arg00))
6207 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6209 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6210 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6211 result_type, fold_convert (stype, arg00),
6212 build_int_cst (stype, 0));
6216 return NULL_TREE;
6219 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6220 equality/inequality test, then return a simplified form of
6221 the test using shifts and logical operations. Otherwise return
6222 NULL. TYPE is the desired result type. */
6224 tree
6225 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6226 tree result_type)
6228 /* If this is testing a single bit, we can optimize the test. */
6229 if ((code == NE_EXPR || code == EQ_EXPR)
6230 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6231 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6233 tree inner = TREE_OPERAND (arg0, 0);
6234 tree type = TREE_TYPE (arg0);
6235 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6236 enum machine_mode operand_mode = TYPE_MODE (type);
6237 int ops_unsigned;
6238 tree signed_type, unsigned_type, intermediate_type;
6239 tree tem;
6241 /* First, see if we can fold the single bit test into a sign-bit
6242 test. */
6243 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6244 result_type);
6245 if (tem)
6246 return tem;
6248 /* Otherwise we have (A & C) != 0 where C is a single bit,
6249 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6250 Similarly for (A & C) == 0. */
6252 /* If INNER is a right shift of a constant and it plus BITNUM does
6253 not overflow, adjust BITNUM and INNER. */
6254 if (TREE_CODE (inner) == RSHIFT_EXPR
6255 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6256 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6257 && bitnum < TYPE_PRECISION (type)
6258 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6259 bitnum - TYPE_PRECISION (type)))
6261 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6262 inner = TREE_OPERAND (inner, 0);
6265 /* If we are going to be able to omit the AND below, we must do our
6266 operations as unsigned. If we must use the AND, we have a choice.
6267 Normally unsigned is faster, but for some machines signed is. */
6268 #ifdef LOAD_EXTEND_OP
6269 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6270 && !flag_syntax_only) ? 0 : 1;
6271 #else
6272 ops_unsigned = 1;
6273 #endif
6275 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6276 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6277 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6278 inner = fold_convert (intermediate_type, inner);
6280 if (bitnum != 0)
6281 inner = build2 (RSHIFT_EXPR, intermediate_type,
6282 inner, size_int (bitnum));
6284 if (code == EQ_EXPR)
6285 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6286 inner, integer_one_node);
6288 /* Put the AND last so it can combine with more things. */
6289 inner = build2 (BIT_AND_EXPR, intermediate_type,
6290 inner, integer_one_node);
6292 /* Make sure to return the proper type. */
6293 inner = fold_convert (result_type, inner);
6295 return inner;
6297 return NULL_TREE;
6300 /* Check whether we are allowed to reorder operands arg0 and arg1,
6301 such that the evaluation of arg1 occurs before arg0. */
6303 static bool
6304 reorder_operands_p (tree arg0, tree arg1)
6306 if (! flag_evaluation_order)
6307 return true;
6308 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6309 return true;
6310 return ! TREE_SIDE_EFFECTS (arg0)
6311 && ! TREE_SIDE_EFFECTS (arg1);
6314 /* Test whether it is preferable two swap two operands, ARG0 and
6315 ARG1, for example because ARG0 is an integer constant and ARG1
6316 isn't. If REORDER is true, only recommend swapping if we can
6317 evaluate the operands in reverse order. */
6319 bool
6320 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6322 STRIP_SIGN_NOPS (arg0);
6323 STRIP_SIGN_NOPS (arg1);
6325 if (TREE_CODE (arg1) == INTEGER_CST)
6326 return 0;
6327 if (TREE_CODE (arg0) == INTEGER_CST)
6328 return 1;
6330 if (TREE_CODE (arg1) == REAL_CST)
6331 return 0;
6332 if (TREE_CODE (arg0) == REAL_CST)
6333 return 1;
6335 if (TREE_CODE (arg1) == COMPLEX_CST)
6336 return 0;
6337 if (TREE_CODE (arg0) == COMPLEX_CST)
6338 return 1;
6340 if (TREE_CONSTANT (arg1))
6341 return 0;
6342 if (TREE_CONSTANT (arg0))
6343 return 1;
6345 if (optimize_size)
6346 return 0;
6348 if (reorder && flag_evaluation_order
6349 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6350 return 0;
6352 if (DECL_P (arg1))
6353 return 0;
6354 if (DECL_P (arg0))
6355 return 1;
6357 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6358 for commutative and comparison operators. Ensuring a canonical
6359 form allows the optimizers to find additional redundancies without
6360 having to explicitly check for both orderings. */
6361 if (TREE_CODE (arg0) == SSA_NAME
6362 && TREE_CODE (arg1) == SSA_NAME
6363 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6364 return 1;
6366 return 0;
6369 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6370 ARG0 is extended to a wider type. */
6372 static tree
6373 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6375 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6376 tree arg1_unw;
6377 tree shorter_type, outer_type;
6378 tree min, max;
6379 bool above, below;
6381 if (arg0_unw == arg0)
6382 return NULL_TREE;
6383 shorter_type = TREE_TYPE (arg0_unw);
6385 #ifdef HAVE_canonicalize_funcptr_for_compare
6386 /* Disable this optimization if we're casting a function pointer
6387 type on targets that require function pointer canonicalization. */
6388 if (HAVE_canonicalize_funcptr_for_compare
6389 && TREE_CODE (shorter_type) == POINTER_TYPE
6390 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6391 return NULL_TREE;
6392 #endif
6394 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6395 return NULL_TREE;
6397 arg1_unw = get_unwidened (arg1, shorter_type);
6399 /* If possible, express the comparison in the shorter mode. */
6400 if ((code == EQ_EXPR || code == NE_EXPR
6401 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6402 && (TREE_TYPE (arg1_unw) == shorter_type
6403 || (TREE_CODE (arg1_unw) == INTEGER_CST
6404 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6405 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6406 && int_fits_type_p (arg1_unw, shorter_type))))
6407 return fold_build2 (code, type, arg0_unw,
6408 fold_convert (shorter_type, arg1_unw));
6410 if (TREE_CODE (arg1_unw) != INTEGER_CST
6411 || TREE_CODE (shorter_type) != INTEGER_TYPE
6412 || !int_fits_type_p (arg1_unw, shorter_type))
6413 return NULL_TREE;
6415 /* If we are comparing with the integer that does not fit into the range
6416 of the shorter type, the result is known. */
6417 outer_type = TREE_TYPE (arg1_unw);
6418 min = lower_bound_in_type (outer_type, shorter_type);
6419 max = upper_bound_in_type (outer_type, shorter_type);
6421 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6422 max, arg1_unw));
6423 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6424 arg1_unw, min));
6426 switch (code)
6428 case EQ_EXPR:
6429 if (above || below)
6430 return omit_one_operand (type, integer_zero_node, arg0);
6431 break;
6433 case NE_EXPR:
6434 if (above || below)
6435 return omit_one_operand (type, integer_one_node, arg0);
6436 break;
6438 case LT_EXPR:
6439 case LE_EXPR:
6440 if (above)
6441 return omit_one_operand (type, integer_one_node, arg0);
6442 else if (below)
6443 return omit_one_operand (type, integer_zero_node, arg0);
6445 case GT_EXPR:
6446 case GE_EXPR:
6447 if (above)
6448 return omit_one_operand (type, integer_zero_node, arg0);
6449 else if (below)
6450 return omit_one_operand (type, integer_one_node, arg0);
6452 default:
6453 break;
6456 return NULL_TREE;
6459 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6460 ARG0 just the signedness is changed. */
6462 static tree
6463 fold_sign_changed_comparison (enum tree_code code, tree type,
6464 tree arg0, tree arg1)
6466 tree arg0_inner, tmp;
6467 tree inner_type, outer_type;
6469 if (TREE_CODE (arg0) != NOP_EXPR
6470 && TREE_CODE (arg0) != CONVERT_EXPR)
6471 return NULL_TREE;
6473 outer_type = TREE_TYPE (arg0);
6474 arg0_inner = TREE_OPERAND (arg0, 0);
6475 inner_type = TREE_TYPE (arg0_inner);
6477 #ifdef HAVE_canonicalize_funcptr_for_compare
6478 /* Disable this optimization if we're casting a function pointer
6479 type on targets that require function pointer canonicalization. */
6480 if (HAVE_canonicalize_funcptr_for_compare
6481 && TREE_CODE (inner_type) == POINTER_TYPE
6482 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6483 return NULL_TREE;
6484 #endif
6486 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6487 return NULL_TREE;
6489 if (TREE_CODE (arg1) != INTEGER_CST
6490 && !((TREE_CODE (arg1) == NOP_EXPR
6491 || TREE_CODE (arg1) == CONVERT_EXPR)
6492 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6493 return NULL_TREE;
6495 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6496 && code != NE_EXPR
6497 && code != EQ_EXPR)
6498 return NULL_TREE;
6500 if (TREE_CODE (arg1) == INTEGER_CST)
6502 tmp = build_int_cst_wide (inner_type,
6503 TREE_INT_CST_LOW (arg1),
6504 TREE_INT_CST_HIGH (arg1));
6505 arg1 = force_fit_type (tmp, 0,
6506 TREE_OVERFLOW (arg1),
6507 TREE_CONSTANT_OVERFLOW (arg1));
6509 else
6510 arg1 = fold_convert (inner_type, arg1);
6512 return fold_build2 (code, type, arg0_inner, arg1);
6515 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6516 step of the array. Reconstructs s and delta in the case of s * delta
6517 being an integer constant (and thus already folded).
6518 ADDR is the address. MULT is the multiplicative expression.
6519 If the function succeeds, the new address expression is returned. Otherwise
6520 NULL_TREE is returned. */
6522 static tree
6523 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6525 tree s, delta, step;
6526 tree ref = TREE_OPERAND (addr, 0), pref;
6527 tree ret, pos;
6528 tree itype;
6530 /* Canonicalize op1 into a possibly non-constant delta
6531 and an INTEGER_CST s. */
6532 if (TREE_CODE (op1) == MULT_EXPR)
6534 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6536 STRIP_NOPS (arg0);
6537 STRIP_NOPS (arg1);
6539 if (TREE_CODE (arg0) == INTEGER_CST)
6541 s = arg0;
6542 delta = arg1;
6544 else if (TREE_CODE (arg1) == INTEGER_CST)
6546 s = arg1;
6547 delta = arg0;
6549 else
6550 return NULL_TREE;
6552 else if (TREE_CODE (op1) == INTEGER_CST)
6554 delta = op1;
6555 s = NULL_TREE;
6557 else
6559 /* Simulate we are delta * 1. */
6560 delta = op1;
6561 s = integer_one_node;
6564 for (;; ref = TREE_OPERAND (ref, 0))
6566 if (TREE_CODE (ref) == ARRAY_REF)
6568 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6569 if (! itype)
6570 continue;
6572 step = array_ref_element_size (ref);
6573 if (TREE_CODE (step) != INTEGER_CST)
6574 continue;
6576 if (s)
6578 if (! tree_int_cst_equal (step, s))
6579 continue;
6581 else
6583 /* Try if delta is a multiple of step. */
6584 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6585 if (! tmp)
6586 continue;
6587 delta = tmp;
6590 break;
6593 if (!handled_component_p (ref))
6594 return NULL_TREE;
6597 /* We found the suitable array reference. So copy everything up to it,
6598 and replace the index. */
6600 pref = TREE_OPERAND (addr, 0);
6601 ret = copy_node (pref);
6602 pos = ret;
6604 while (pref != ref)
6606 pref = TREE_OPERAND (pref, 0);
6607 TREE_OPERAND (pos, 0) = copy_node (pref);
6608 pos = TREE_OPERAND (pos, 0);
6611 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6612 fold_convert (itype,
6613 TREE_OPERAND (pos, 1)),
6614 fold_convert (itype, delta));
6616 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6620 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6621 means A >= Y && A != MAX, but in this case we know that
6622 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6624 static tree
6625 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6627 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6629 if (TREE_CODE (bound) == LT_EXPR)
6630 a = TREE_OPERAND (bound, 0);
6631 else if (TREE_CODE (bound) == GT_EXPR)
6632 a = TREE_OPERAND (bound, 1);
6633 else
6634 return NULL_TREE;
6636 typea = TREE_TYPE (a);
6637 if (!INTEGRAL_TYPE_P (typea)
6638 && !POINTER_TYPE_P (typea))
6639 return NULL_TREE;
6641 if (TREE_CODE (ineq) == LT_EXPR)
6643 a1 = TREE_OPERAND (ineq, 1);
6644 y = TREE_OPERAND (ineq, 0);
6646 else if (TREE_CODE (ineq) == GT_EXPR)
6648 a1 = TREE_OPERAND (ineq, 0);
6649 y = TREE_OPERAND (ineq, 1);
6651 else
6652 return NULL_TREE;
6654 if (TREE_TYPE (a1) != typea)
6655 return NULL_TREE;
6657 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6658 if (!integer_onep (diff))
6659 return NULL_TREE;
6661 return fold_build2 (GE_EXPR, type, a, y);
6664 /* Fold a sum or difference of at least one multiplication.
6665 Returns the folded tree or NULL if no simplification could be made. */
6667 static tree
6668 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6670 tree arg00, arg01, arg10, arg11;
6671 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6673 /* (A * C) +- (B * C) -> (A+-B) * C.
6674 (A * C) +- A -> A * (C+-1).
6675 We are most concerned about the case where C is a constant,
6676 but other combinations show up during loop reduction. Since
6677 it is not difficult, try all four possibilities. */
6679 if (TREE_CODE (arg0) == MULT_EXPR)
6681 arg00 = TREE_OPERAND (arg0, 0);
6682 arg01 = TREE_OPERAND (arg0, 1);
6684 else
6686 arg00 = arg0;
6687 if (!FLOAT_TYPE_P (type))
6688 arg01 = build_int_cst (type, 1);
6689 else
6690 arg01 = build_real (type, dconst1);
6692 if (TREE_CODE (arg1) == MULT_EXPR)
6694 arg10 = TREE_OPERAND (arg1, 0);
6695 arg11 = TREE_OPERAND (arg1, 1);
6697 else
6699 arg10 = arg1;
6700 if (!FLOAT_TYPE_P (type))
6701 arg11 = build_int_cst (type, 1);
6702 else
6703 arg11 = build_real (type, dconst1);
6705 same = NULL_TREE;
6707 if (operand_equal_p (arg01, arg11, 0))
6708 same = arg01, alt0 = arg00, alt1 = arg10;
6709 else if (operand_equal_p (arg00, arg10, 0))
6710 same = arg00, alt0 = arg01, alt1 = arg11;
6711 else if (operand_equal_p (arg00, arg11, 0))
6712 same = arg00, alt0 = arg01, alt1 = arg10;
6713 else if (operand_equal_p (arg01, arg10, 0))
6714 same = arg01, alt0 = arg00, alt1 = arg11;
6716 /* No identical multiplicands; see if we can find a common
6717 power-of-two factor in non-power-of-two multiplies. This
6718 can help in multi-dimensional array access. */
6719 else if (host_integerp (arg01, 0)
6720 && host_integerp (arg11, 0))
6722 HOST_WIDE_INT int01, int11, tmp;
6723 bool swap = false;
6724 tree maybe_same;
6725 int01 = TREE_INT_CST_LOW (arg01);
6726 int11 = TREE_INT_CST_LOW (arg11);
6728 /* Move min of absolute values to int11. */
6729 if ((int01 >= 0 ? int01 : -int01)
6730 < (int11 >= 0 ? int11 : -int11))
6732 tmp = int01, int01 = int11, int11 = tmp;
6733 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6734 maybe_same = arg01;
6735 swap = true;
6737 else
6738 maybe_same = arg11;
6740 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6742 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6743 build_int_cst (TREE_TYPE (arg00),
6744 int01 / int11));
6745 alt1 = arg10;
6746 same = maybe_same;
6747 if (swap)
6748 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6752 if (same)
6753 return fold_build2 (MULT_EXPR, type,
6754 fold_build2 (code, type,
6755 fold_convert (type, alt0),
6756 fold_convert (type, alt1)),
6757 fold_convert (type, same));
6759 return NULL_TREE;
6762 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6763 specified by EXPR into the buffer PTR of length LEN bytes.
6764 Return the number of bytes placed in the buffer, or zero
6765 upon failure. */
6767 static int
6768 native_encode_int (tree expr, unsigned char *ptr, int len)
6770 tree type = TREE_TYPE (expr);
6771 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6772 int byte, offset, word, words;
6773 unsigned char value;
6775 if (total_bytes > len)
6776 return 0;
6777 words = total_bytes / UNITS_PER_WORD;
6779 for (byte = 0; byte < total_bytes; byte++)
6781 int bitpos = byte * BITS_PER_UNIT;
6782 if (bitpos < HOST_BITS_PER_WIDE_INT)
6783 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6784 else
6785 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6786 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6788 if (total_bytes > UNITS_PER_WORD)
6790 word = byte / UNITS_PER_WORD;
6791 if (WORDS_BIG_ENDIAN)
6792 word = (words - 1) - word;
6793 offset = word * UNITS_PER_WORD;
6794 if (BYTES_BIG_ENDIAN)
6795 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6796 else
6797 offset += byte % UNITS_PER_WORD;
6799 else
6800 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6801 ptr[offset] = value;
6803 return total_bytes;
6807 /* Subroutine of native_encode_expr. Encode the REAL_CST
6808 specified by EXPR into the buffer PTR of length LEN bytes.
6809 Return the number of bytes placed in the buffer, or zero
6810 upon failure. */
6812 static int
6813 native_encode_real (tree expr, unsigned char *ptr, int len)
6815 tree type = TREE_TYPE (expr);
6816 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6817 int byte, offset, word, words;
6818 unsigned char value;
6820 /* There are always 32 bits in each long, no matter the size of
6821 the hosts long. We handle floating point representations with
6822 up to 192 bits. */
6823 long tmp[6];
6825 if (total_bytes > len)
6826 return 0;
6827 words = total_bytes / UNITS_PER_WORD;
6829 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6831 for (byte = 0; byte < total_bytes; byte++)
6833 int bitpos = byte * BITS_PER_UNIT;
6834 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6836 if (total_bytes > UNITS_PER_WORD)
6838 word = byte / UNITS_PER_WORD;
6839 if (FLOAT_WORDS_BIG_ENDIAN)
6840 word = (words - 1) - word;
6841 offset = word * UNITS_PER_WORD;
6842 if (BYTES_BIG_ENDIAN)
6843 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6844 else
6845 offset += byte % UNITS_PER_WORD;
6847 else
6848 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6849 ptr[offset] = value;
6851 return total_bytes;
6854 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6855 specified by EXPR into the buffer PTR of length LEN bytes.
6856 Return the number of bytes placed in the buffer, or zero
6857 upon failure. */
6859 static int
6860 native_encode_complex (tree expr, unsigned char *ptr, int len)
6862 int rsize, isize;
6863 tree part;
6865 part = TREE_REALPART (expr);
6866 rsize = native_encode_expr (part, ptr, len);
6867 if (rsize == 0)
6868 return 0;
6869 part = TREE_IMAGPART (expr);
6870 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6871 if (isize != rsize)
6872 return 0;
6873 return rsize + isize;
6877 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6878 specified by EXPR into the buffer PTR of length LEN bytes.
6879 Return the number of bytes placed in the buffer, or zero
6880 upon failure. */
6882 static int
6883 native_encode_vector (tree expr, unsigned char *ptr, int len)
6885 int i, size, offset, count;
6886 tree elem, elements;
6888 size = 0;
6889 offset = 0;
6890 elements = TREE_VECTOR_CST_ELTS (expr);
6891 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6892 for (i = 0; i < count; i++)
6894 if (elements)
6896 elem = TREE_VALUE (elements);
6897 elements = TREE_CHAIN (elements);
6899 else
6900 elem = NULL_TREE;
6902 if (elem)
6904 size = native_encode_expr (elem, ptr+offset, len-offset);
6905 if (size == 0)
6906 return 0;
6908 else if (size != 0)
6910 if (offset + size > len)
6911 return 0;
6912 memset (ptr+offset, 0, size);
6914 else
6915 return 0;
6916 offset += size;
6918 return offset;
6922 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
6923 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
6924 buffer PTR of length LEN bytes. Return the number of bytes
6925 placed in the buffer, or zero upon failure. */
6927 static int
6928 native_encode_expr (tree expr, unsigned char *ptr, int len)
6930 switch (TREE_CODE (expr))
6932 case INTEGER_CST:
6933 return native_encode_int (expr, ptr, len);
6935 case REAL_CST:
6936 return native_encode_real (expr, ptr, len);
6938 case COMPLEX_CST:
6939 return native_encode_complex (expr, ptr, len);
6941 case VECTOR_CST:
6942 return native_encode_vector (expr, ptr, len);
6944 default:
6945 return 0;
6950 /* Subroutine of native_interpret_expr. Interpret the contents of
6951 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
6952 If the buffer cannot be interpreted, return NULL_TREE. */
6954 static tree
6955 native_interpret_int (tree type, unsigned char *ptr, int len)
6957 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6958 int byte, offset, word, words;
6959 unsigned char value;
6960 unsigned int HOST_WIDE_INT lo = 0;
6961 HOST_WIDE_INT hi = 0;
6963 if (total_bytes > len)
6964 return NULL_TREE;
6965 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
6966 return NULL_TREE;
6967 words = total_bytes / UNITS_PER_WORD;
6969 for (byte = 0; byte < total_bytes; byte++)
6971 int bitpos = byte * BITS_PER_UNIT;
6972 if (total_bytes > UNITS_PER_WORD)
6974 word = byte / UNITS_PER_WORD;
6975 if (WORDS_BIG_ENDIAN)
6976 word = (words - 1) - word;
6977 offset = word * UNITS_PER_WORD;
6978 if (BYTES_BIG_ENDIAN)
6979 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6980 else
6981 offset += byte % UNITS_PER_WORD;
6983 else
6984 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6985 value = ptr[offset];
6987 if (bitpos < HOST_BITS_PER_WIDE_INT)
6988 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
6989 else
6990 hi |= (unsigned HOST_WIDE_INT) value
6991 << (bitpos - HOST_BITS_PER_WIDE_INT);
6994 return force_fit_type (build_int_cst_wide (type, lo, hi),
6995 0, false, false);
6999 /* Subroutine of native_interpret_expr. Interpret the contents of
7000 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7001 If the buffer cannot be interpreted, return NULL_TREE. */
7003 static tree
7004 native_interpret_real (tree type, unsigned char *ptr, int len)
7006 enum machine_mode mode = TYPE_MODE (type);
7007 int total_bytes = GET_MODE_SIZE (mode);
7008 int byte, offset, word, words;
7009 unsigned char value;
7010 /* There are always 32 bits in each long, no matter the size of
7011 the hosts long. We handle floating point representations with
7012 up to 192 bits. */
7013 REAL_VALUE_TYPE r;
7014 long tmp[6];
7016 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7017 if (total_bytes > len || total_bytes > 24)
7018 return NULL_TREE;
7019 words = total_bytes / UNITS_PER_WORD;
7021 memset (tmp, 0, sizeof (tmp));
7022 for (byte = 0; byte < total_bytes; byte++)
7024 int bitpos = byte * BITS_PER_UNIT;
7025 if (total_bytes > UNITS_PER_WORD)
7027 word = byte / UNITS_PER_WORD;
7028 if (FLOAT_WORDS_BIG_ENDIAN)
7029 word = (words - 1) - word;
7030 offset = word * UNITS_PER_WORD;
7031 if (BYTES_BIG_ENDIAN)
7032 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7033 else
7034 offset += byte % UNITS_PER_WORD;
7036 else
7037 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7038 value = ptr[offset];
7040 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7043 real_from_target (&r, tmp, mode);
7044 return build_real (type, r);
7048 /* Subroutine of native_interpret_expr. Interpret the contents of
7049 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7050 If the buffer cannot be interpreted, return NULL_TREE. */
7052 static tree
7053 native_interpret_complex (tree type, unsigned char *ptr, int len)
7055 tree etype, rpart, ipart;
7056 int size;
7058 etype = TREE_TYPE (type);
7059 size = GET_MODE_SIZE (TYPE_MODE (etype));
7060 if (size * 2 > len)
7061 return NULL_TREE;
7062 rpart = native_interpret_expr (etype, ptr, size);
7063 if (!rpart)
7064 return NULL_TREE;
7065 ipart = native_interpret_expr (etype, ptr+size, size);
7066 if (!ipart)
7067 return NULL_TREE;
7068 return build_complex (type, rpart, ipart);
7072 /* Subroutine of native_interpret_expr. Interpret the contents of
7073 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7074 If the buffer cannot be interpreted, return NULL_TREE. */
7076 static tree
7077 native_interpret_vector (tree type, unsigned char *ptr, int len)
7079 tree etype, elem, elements;
7080 int i, size, count;
7082 etype = TREE_TYPE (type);
7083 size = GET_MODE_SIZE (TYPE_MODE (etype));
7084 count = TYPE_VECTOR_SUBPARTS (type);
7085 if (size * count > len)
7086 return NULL_TREE;
7088 elements = NULL_TREE;
7089 for (i = count - 1; i >= 0; i--)
7091 elem = native_interpret_expr (etype, ptr+(i*size), size);
7092 if (!elem)
7093 return NULL_TREE;
7094 elements = tree_cons (NULL_TREE, elem, elements);
7096 return build_vector (type, elements);
7100 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7101 the buffer PTR of length LEN as a constant of type TYPE. For
7102 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7103 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7104 return NULL_TREE. */
7106 static tree
7107 native_interpret_expr (tree type, unsigned char *ptr, int len)
7109 switch (TREE_CODE (type))
7111 case INTEGER_TYPE:
7112 case ENUMERAL_TYPE:
7113 case BOOLEAN_TYPE:
7114 return native_interpret_int (type, ptr, len);
7116 case REAL_TYPE:
7117 return native_interpret_real (type, ptr, len);
7119 case COMPLEX_TYPE:
7120 return native_interpret_complex (type, ptr, len);
7122 case VECTOR_TYPE:
7123 return native_interpret_vector (type, ptr, len);
7125 default:
7126 return NULL_TREE;
7131 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7132 TYPE at compile-time. If we're unable to perform the conversion
7133 return NULL_TREE. */
7135 static tree
7136 fold_view_convert_expr (tree type, tree expr)
7138 /* We support up to 512-bit values (for V8DFmode). */
7139 unsigned char buffer[64];
7140 int len;
7142 /* Check that the host and target are sane. */
7143 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7144 return NULL_TREE;
7146 len = native_encode_expr (expr, buffer, sizeof (buffer));
7147 if (len == 0)
7148 return NULL_TREE;
7150 return native_interpret_expr (type, buffer, len);
7154 /* Fold a unary expression of code CODE and type TYPE with operand
7155 OP0. Return the folded expression if folding is successful.
7156 Otherwise, return NULL_TREE. */
7158 tree
7159 fold_unary (enum tree_code code, tree type, tree op0)
7161 tree tem;
7162 tree arg0;
7163 enum tree_code_class kind = TREE_CODE_CLASS (code);
7165 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7166 && TREE_CODE_LENGTH (code) == 1);
7168 arg0 = op0;
7169 if (arg0)
7171 if (code == NOP_EXPR || code == CONVERT_EXPR
7172 || code == FLOAT_EXPR || code == ABS_EXPR)
7174 /* Don't use STRIP_NOPS, because signedness of argument type
7175 matters. */
7176 STRIP_SIGN_NOPS (arg0);
7178 else
7180 /* Strip any conversions that don't change the mode. This
7181 is safe for every expression, except for a comparison
7182 expression because its signedness is derived from its
7183 operands.
7185 Note that this is done as an internal manipulation within
7186 the constant folder, in order to find the simplest
7187 representation of the arguments so that their form can be
7188 studied. In any cases, the appropriate type conversions
7189 should be put back in the tree that will get out of the
7190 constant folder. */
7191 STRIP_NOPS (arg0);
7195 if (TREE_CODE_CLASS (code) == tcc_unary)
7197 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7198 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7199 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7200 else if (TREE_CODE (arg0) == COND_EXPR)
7202 tree arg01 = TREE_OPERAND (arg0, 1);
7203 tree arg02 = TREE_OPERAND (arg0, 2);
7204 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7205 arg01 = fold_build1 (code, type, arg01);
7206 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7207 arg02 = fold_build1 (code, type, arg02);
7208 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7209 arg01, arg02);
7211 /* If this was a conversion, and all we did was to move into
7212 inside the COND_EXPR, bring it back out. But leave it if
7213 it is a conversion from integer to integer and the
7214 result precision is no wider than a word since such a
7215 conversion is cheap and may be optimized away by combine,
7216 while it couldn't if it were outside the COND_EXPR. Then return
7217 so we don't get into an infinite recursion loop taking the
7218 conversion out and then back in. */
7220 if ((code == NOP_EXPR || code == CONVERT_EXPR
7221 || code == NON_LVALUE_EXPR)
7222 && TREE_CODE (tem) == COND_EXPR
7223 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7224 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7225 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7226 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7227 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7228 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7229 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7230 && (INTEGRAL_TYPE_P
7231 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7232 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7233 || flag_syntax_only))
7234 tem = build1 (code, type,
7235 build3 (COND_EXPR,
7236 TREE_TYPE (TREE_OPERAND
7237 (TREE_OPERAND (tem, 1), 0)),
7238 TREE_OPERAND (tem, 0),
7239 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7240 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7241 return tem;
7243 else if (COMPARISON_CLASS_P (arg0))
7245 if (TREE_CODE (type) == BOOLEAN_TYPE)
7247 arg0 = copy_node (arg0);
7248 TREE_TYPE (arg0) = type;
7249 return arg0;
7251 else if (TREE_CODE (type) != INTEGER_TYPE)
7252 return fold_build3 (COND_EXPR, type, arg0,
7253 fold_build1 (code, type,
7254 integer_one_node),
7255 fold_build1 (code, type,
7256 integer_zero_node));
7260 switch (code)
7262 case NOP_EXPR:
7263 case FLOAT_EXPR:
7264 case CONVERT_EXPR:
7265 case FIX_TRUNC_EXPR:
7266 case FIX_CEIL_EXPR:
7267 case FIX_FLOOR_EXPR:
7268 case FIX_ROUND_EXPR:
7269 if (TREE_TYPE (op0) == type)
7270 return op0;
7272 /* If we have (type) (a CMP b) and type is an integral type, return
7273 new expression involving the new type. */
7274 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7275 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7276 TREE_OPERAND (op0, 1));
7278 /* Handle cases of two conversions in a row. */
7279 if (TREE_CODE (op0) == NOP_EXPR
7280 || TREE_CODE (op0) == CONVERT_EXPR)
7282 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7283 tree inter_type = TREE_TYPE (op0);
7284 int inside_int = INTEGRAL_TYPE_P (inside_type);
7285 int inside_ptr = POINTER_TYPE_P (inside_type);
7286 int inside_float = FLOAT_TYPE_P (inside_type);
7287 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7288 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7289 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7290 int inter_int = INTEGRAL_TYPE_P (inter_type);
7291 int inter_ptr = POINTER_TYPE_P (inter_type);
7292 int inter_float = FLOAT_TYPE_P (inter_type);
7293 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7294 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7295 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7296 int final_int = INTEGRAL_TYPE_P (type);
7297 int final_ptr = POINTER_TYPE_P (type);
7298 int final_float = FLOAT_TYPE_P (type);
7299 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7300 unsigned int final_prec = TYPE_PRECISION (type);
7301 int final_unsignedp = TYPE_UNSIGNED (type);
7303 /* In addition to the cases of two conversions in a row
7304 handled below, if we are converting something to its own
7305 type via an object of identical or wider precision, neither
7306 conversion is needed. */
7307 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7308 && ((inter_int && final_int) || (inter_float && final_float))
7309 && inter_prec >= final_prec)
7310 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7312 /* Likewise, if the intermediate and final types are either both
7313 float or both integer, we don't need the middle conversion if
7314 it is wider than the final type and doesn't change the signedness
7315 (for integers). Avoid this if the final type is a pointer
7316 since then we sometimes need the inner conversion. Likewise if
7317 the outer has a precision not equal to the size of its mode. */
7318 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7319 || (inter_float && inside_float)
7320 || (inter_vec && inside_vec))
7321 && inter_prec >= inside_prec
7322 && (inter_float || inter_vec
7323 || inter_unsignedp == inside_unsignedp)
7324 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7325 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7326 && ! final_ptr
7327 && (! final_vec || inter_prec == inside_prec))
7328 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7330 /* If we have a sign-extension of a zero-extended value, we can
7331 replace that by a single zero-extension. */
7332 if (inside_int && inter_int && final_int
7333 && inside_prec < inter_prec && inter_prec < final_prec
7334 && inside_unsignedp && !inter_unsignedp)
7335 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7337 /* Two conversions in a row are not needed unless:
7338 - some conversion is floating-point (overstrict for now), or
7339 - some conversion is a vector (overstrict for now), or
7340 - the intermediate type is narrower than both initial and
7341 final, or
7342 - the intermediate type and innermost type differ in signedness,
7343 and the outermost type is wider than the intermediate, or
7344 - the initial type is a pointer type and the precisions of the
7345 intermediate and final types differ, or
7346 - the final type is a pointer type and the precisions of the
7347 initial and intermediate types differ. */
7348 if (! inside_float && ! inter_float && ! final_float
7349 && ! inside_vec && ! inter_vec && ! final_vec
7350 && (inter_prec > inside_prec || inter_prec > final_prec)
7351 && ! (inside_int && inter_int
7352 && inter_unsignedp != inside_unsignedp
7353 && inter_prec < final_prec)
7354 && ((inter_unsignedp && inter_prec > inside_prec)
7355 == (final_unsignedp && final_prec > inter_prec))
7356 && ! (inside_ptr && inter_prec != final_prec)
7357 && ! (final_ptr && inside_prec != inter_prec)
7358 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7359 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7360 && ! final_ptr)
7361 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7364 /* Handle (T *)&A.B.C for A being of type T and B and C
7365 living at offset zero. This occurs frequently in
7366 C++ upcasting and then accessing the base. */
7367 if (TREE_CODE (op0) == ADDR_EXPR
7368 && POINTER_TYPE_P (type)
7369 && handled_component_p (TREE_OPERAND (op0, 0)))
7371 HOST_WIDE_INT bitsize, bitpos;
7372 tree offset;
7373 enum machine_mode mode;
7374 int unsignedp, volatilep;
7375 tree base = TREE_OPERAND (op0, 0);
7376 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7377 &mode, &unsignedp, &volatilep, false);
7378 /* If the reference was to a (constant) zero offset, we can use
7379 the address of the base if it has the same base type
7380 as the result type. */
7381 if (! offset && bitpos == 0
7382 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7383 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7384 return fold_convert (type, build_fold_addr_expr (base));
7387 if (TREE_CODE (op0) == MODIFY_EXPR
7388 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7389 /* Detect assigning a bitfield. */
7390 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7391 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7393 /* Don't leave an assignment inside a conversion
7394 unless assigning a bitfield. */
7395 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7396 /* First do the assignment, then return converted constant. */
7397 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7398 TREE_NO_WARNING (tem) = 1;
7399 TREE_USED (tem) = 1;
7400 return tem;
7403 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7404 constants (if x has signed type, the sign bit cannot be set
7405 in c). This folds extension into the BIT_AND_EXPR. */
7406 if (INTEGRAL_TYPE_P (type)
7407 && TREE_CODE (type) != BOOLEAN_TYPE
7408 && TREE_CODE (op0) == BIT_AND_EXPR
7409 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7411 tree and = op0;
7412 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7413 int change = 0;
7415 if (TYPE_UNSIGNED (TREE_TYPE (and))
7416 || (TYPE_PRECISION (type)
7417 <= TYPE_PRECISION (TREE_TYPE (and))))
7418 change = 1;
7419 else if (TYPE_PRECISION (TREE_TYPE (and1))
7420 <= HOST_BITS_PER_WIDE_INT
7421 && host_integerp (and1, 1))
7423 unsigned HOST_WIDE_INT cst;
7425 cst = tree_low_cst (and1, 1);
7426 cst &= (HOST_WIDE_INT) -1
7427 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7428 change = (cst == 0);
7429 #ifdef LOAD_EXTEND_OP
7430 if (change
7431 && !flag_syntax_only
7432 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7433 == ZERO_EXTEND))
7435 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7436 and0 = fold_convert (uns, and0);
7437 and1 = fold_convert (uns, and1);
7439 #endif
7441 if (change)
7443 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7444 TREE_INT_CST_HIGH (and1));
7445 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7446 TREE_CONSTANT_OVERFLOW (and1));
7447 return fold_build2 (BIT_AND_EXPR, type,
7448 fold_convert (type, and0), tem);
7452 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7453 T2 being pointers to types of the same size. */
7454 if (POINTER_TYPE_P (type)
7455 && BINARY_CLASS_P (arg0)
7456 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7457 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7459 tree arg00 = TREE_OPERAND (arg0, 0);
7460 tree t0 = type;
7461 tree t1 = TREE_TYPE (arg00);
7462 tree tt0 = TREE_TYPE (t0);
7463 tree tt1 = TREE_TYPE (t1);
7464 tree s0 = TYPE_SIZE (tt0);
7465 tree s1 = TYPE_SIZE (tt1);
7467 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7468 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7469 TREE_OPERAND (arg0, 1));
7472 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7473 of the same precision, and X is a integer type not narrower than
7474 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7475 if (INTEGRAL_TYPE_P (type)
7476 && TREE_CODE (op0) == BIT_NOT_EXPR
7477 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7478 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7479 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7480 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7482 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7483 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7484 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7485 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7488 tem = fold_convert_const (code, type, arg0);
7489 return tem ? tem : NULL_TREE;
7491 case VIEW_CONVERT_EXPR:
7492 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7493 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7494 return fold_view_convert_expr (type, op0);
7496 case NEGATE_EXPR:
7497 if (negate_expr_p (arg0))
7498 return fold_convert (type, negate_expr (arg0));
7499 return NULL_TREE;
7501 case ABS_EXPR:
7502 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7503 return fold_abs_const (arg0, type);
7504 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7505 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7506 /* Convert fabs((double)float) into (double)fabsf(float). */
7507 else if (TREE_CODE (arg0) == NOP_EXPR
7508 && TREE_CODE (type) == REAL_TYPE)
7510 tree targ0 = strip_float_extensions (arg0);
7511 if (targ0 != arg0)
7512 return fold_convert (type, fold_build1 (ABS_EXPR,
7513 TREE_TYPE (targ0),
7514 targ0));
7516 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7517 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7518 return arg0;
7520 /* Strip sign ops from argument. */
7521 if (TREE_CODE (type) == REAL_TYPE)
7523 tem = fold_strip_sign_ops (arg0);
7524 if (tem)
7525 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7527 return NULL_TREE;
7529 case CONJ_EXPR:
7530 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7531 return fold_convert (type, arg0);
7532 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7533 return build2 (COMPLEX_EXPR, type,
7534 TREE_OPERAND (arg0, 0),
7535 negate_expr (TREE_OPERAND (arg0, 1)));
7536 else if (TREE_CODE (arg0) == COMPLEX_CST)
7537 return build_complex (type, TREE_REALPART (arg0),
7538 negate_expr (TREE_IMAGPART (arg0)));
7539 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7540 return fold_build2 (TREE_CODE (arg0), type,
7541 fold_build1 (CONJ_EXPR, type,
7542 TREE_OPERAND (arg0, 0)),
7543 fold_build1 (CONJ_EXPR, type,
7544 TREE_OPERAND (arg0, 1)));
7545 else if (TREE_CODE (arg0) == CONJ_EXPR)
7546 return TREE_OPERAND (arg0, 0);
7547 return NULL_TREE;
7549 case BIT_NOT_EXPR:
7550 if (TREE_CODE (arg0) == INTEGER_CST)
7551 return fold_not_const (arg0, type);
7552 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7553 return TREE_OPERAND (arg0, 0);
7554 /* Convert ~ (-A) to A - 1. */
7555 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7556 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7557 build_int_cst (type, 1));
7558 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7559 else if (INTEGRAL_TYPE_P (type)
7560 && ((TREE_CODE (arg0) == MINUS_EXPR
7561 && integer_onep (TREE_OPERAND (arg0, 1)))
7562 || (TREE_CODE (arg0) == PLUS_EXPR
7563 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7564 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7565 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7566 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7567 && (tem = fold_unary (BIT_NOT_EXPR, type,
7568 fold_convert (type,
7569 TREE_OPERAND (arg0, 0)))))
7570 return fold_build2 (BIT_XOR_EXPR, type, tem,
7571 fold_convert (type, TREE_OPERAND (arg0, 1)));
7572 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7573 && (tem = fold_unary (BIT_NOT_EXPR, type,
7574 fold_convert (type,
7575 TREE_OPERAND (arg0, 1)))))
7576 return fold_build2 (BIT_XOR_EXPR, type,
7577 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7579 return NULL_TREE;
7581 case TRUTH_NOT_EXPR:
7582 /* The argument to invert_truthvalue must have Boolean type. */
7583 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7584 arg0 = fold_convert (boolean_type_node, arg0);
7586 /* Note that the operand of this must be an int
7587 and its values must be 0 or 1.
7588 ("true" is a fixed value perhaps depending on the language,
7589 but we don't handle values other than 1 correctly yet.) */
7590 tem = invert_truthvalue (arg0);
7591 /* Avoid infinite recursion. */
7592 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7593 return NULL_TREE;
7594 return fold_convert (type, tem);
7596 case REALPART_EXPR:
7597 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7598 return NULL_TREE;
7599 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7600 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7601 TREE_OPERAND (arg0, 1));
7602 else if (TREE_CODE (arg0) == COMPLEX_CST)
7603 return TREE_REALPART (arg0);
7604 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7605 return fold_build2 (TREE_CODE (arg0), type,
7606 fold_build1 (REALPART_EXPR, type,
7607 TREE_OPERAND (arg0, 0)),
7608 fold_build1 (REALPART_EXPR, type,
7609 TREE_OPERAND (arg0, 1)));
7610 return NULL_TREE;
7612 case IMAGPART_EXPR:
7613 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7614 return fold_convert (type, integer_zero_node);
7615 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7616 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7617 TREE_OPERAND (arg0, 0));
7618 else if (TREE_CODE (arg0) == COMPLEX_CST)
7619 return TREE_IMAGPART (arg0);
7620 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7621 return fold_build2 (TREE_CODE (arg0), type,
7622 fold_build1 (IMAGPART_EXPR, type,
7623 TREE_OPERAND (arg0, 0)),
7624 fold_build1 (IMAGPART_EXPR, type,
7625 TREE_OPERAND (arg0, 1)));
7626 return NULL_TREE;
7628 default:
7629 return NULL_TREE;
7630 } /* switch (code) */
7633 /* Fold a binary expression of code CODE and type TYPE with operands
7634 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7635 Return the folded expression if folding is successful. Otherwise,
7636 return NULL_TREE. */
7638 static tree
7639 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7641 enum tree_code compl_code;
7643 if (code == MIN_EXPR)
7644 compl_code = MAX_EXPR;
7645 else if (code == MAX_EXPR)
7646 compl_code = MIN_EXPR;
7647 else
7648 gcc_unreachable ();
7650 /* MIN (MAX (a, b), b) == b.  */
7651 if (TREE_CODE (op0) == compl_code
7652 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7653 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7655 /* MIN (MAX (b, a), b) == b.  */
7656 if (TREE_CODE (op0) == compl_code
7657 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7658 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7659 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7661 /* MIN (a, MAX (a, b)) == a.  */
7662 if (TREE_CODE (op1) == compl_code
7663 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7664 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7665 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7667 /* MIN (a, MAX (b, a)) == a.  */
7668 if (TREE_CODE (op1) == compl_code
7669 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7670 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7671 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7673 return NULL_TREE;
7676 /* Subroutine of fold_binary. This routine performs all of the
7677 transformations that are common to the equality/inequality
7678 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7679 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7680 fold_binary should call fold_binary. Fold a comparison with
7681 tree code CODE and type TYPE with operands OP0 and OP1. Return
7682 the folded comparison or NULL_TREE. */
7684 static tree
7685 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7687 tree arg0, arg1, tem;
7689 arg0 = op0;
7690 arg1 = op1;
7692 STRIP_SIGN_NOPS (arg0);
7693 STRIP_SIGN_NOPS (arg1);
7695 tem = fold_relational_const (code, type, arg0, arg1);
7696 if (tem != NULL_TREE)
7697 return tem;
7699 /* If one arg is a real or integer constant, put it last. */
7700 if (tree_swap_operands_p (arg0, arg1, true))
7701 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7703 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7704 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7705 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7706 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7707 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7708 && !(flag_wrapv || flag_trapv))
7709 && (TREE_CODE (arg1) == INTEGER_CST
7710 && !TREE_OVERFLOW (arg1)))
7712 tree const1 = TREE_OPERAND (arg0, 1);
7713 tree const2 = arg1;
7714 tree variable = TREE_OPERAND (arg0, 0);
7715 tree lhs;
7716 int lhs_add;
7717 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7719 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7720 TREE_TYPE (arg1), const2, const1);
7721 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7722 && (TREE_CODE (lhs) != INTEGER_CST
7723 || !TREE_OVERFLOW (lhs)))
7724 return fold_build2 (code, type, variable, lhs);
7727 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7729 tree targ0 = strip_float_extensions (arg0);
7730 tree targ1 = strip_float_extensions (arg1);
7731 tree newtype = TREE_TYPE (targ0);
7733 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7734 newtype = TREE_TYPE (targ1);
7736 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7737 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7738 return fold_build2 (code, type, fold_convert (newtype, targ0),
7739 fold_convert (newtype, targ1));
7741 /* (-a) CMP (-b) -> b CMP a */
7742 if (TREE_CODE (arg0) == NEGATE_EXPR
7743 && TREE_CODE (arg1) == NEGATE_EXPR)
7744 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
7745 TREE_OPERAND (arg0, 0));
7747 if (TREE_CODE (arg1) == REAL_CST)
7749 REAL_VALUE_TYPE cst;
7750 cst = TREE_REAL_CST (arg1);
7752 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7753 if (TREE_CODE (arg0) == NEGATE_EXPR)
7754 return fold_build2 (swap_tree_comparison (code), type,
7755 TREE_OPERAND (arg0, 0),
7756 build_real (TREE_TYPE (arg1),
7757 REAL_VALUE_NEGATE (cst)));
7759 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7760 /* a CMP (-0) -> a CMP 0 */
7761 if (REAL_VALUE_MINUS_ZERO (cst))
7762 return fold_build2 (code, type, arg0,
7763 build_real (TREE_TYPE (arg1), dconst0));
7765 /* x != NaN is always true, other ops are always false. */
7766 if (REAL_VALUE_ISNAN (cst)
7767 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7769 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7770 return omit_one_operand (type, tem, arg0);
7773 /* Fold comparisons against infinity. */
7774 if (REAL_VALUE_ISINF (cst))
7776 tem = fold_inf_compare (code, type, arg0, arg1);
7777 if (tem != NULL_TREE)
7778 return tem;
7782 /* If this is a comparison of a real constant with a PLUS_EXPR
7783 or a MINUS_EXPR of a real constant, we can convert it into a
7784 comparison with a revised real constant as long as no overflow
7785 occurs when unsafe_math_optimizations are enabled. */
7786 if (flag_unsafe_math_optimizations
7787 && TREE_CODE (arg1) == REAL_CST
7788 && (TREE_CODE (arg0) == PLUS_EXPR
7789 || TREE_CODE (arg0) == MINUS_EXPR)
7790 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7791 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7792 ? MINUS_EXPR : PLUS_EXPR,
7793 arg1, TREE_OPERAND (arg0, 1), 0))
7794 && ! TREE_CONSTANT_OVERFLOW (tem))
7795 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
7797 /* Likewise, we can simplify a comparison of a real constant with
7798 a MINUS_EXPR whose first operand is also a real constant, i.e.
7799 (c1 - x) < c2 becomes x > c1-c2. */
7800 if (flag_unsafe_math_optimizations
7801 && TREE_CODE (arg1) == REAL_CST
7802 && TREE_CODE (arg0) == MINUS_EXPR
7803 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7804 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7805 arg1, 0))
7806 && ! TREE_CONSTANT_OVERFLOW (tem))
7807 return fold_build2 (swap_tree_comparison (code), type,
7808 TREE_OPERAND (arg0, 1), tem);
7810 /* Fold comparisons against built-in math functions. */
7811 if (TREE_CODE (arg1) == REAL_CST
7812 && flag_unsafe_math_optimizations
7813 && ! flag_errno_math)
7815 enum built_in_function fcode = builtin_mathfn_code (arg0);
7817 if (fcode != END_BUILTINS)
7819 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7820 if (tem != NULL_TREE)
7821 return tem;
7826 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7827 if (TREE_CONSTANT (arg1)
7828 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7829 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7830 /* This optimization is invalid for ordered comparisons
7831 if CONST+INCR overflows or if foo+incr might overflow.
7832 This optimization is invalid for floating point due to rounding.
7833 For pointer types we assume overflow doesn't happen. */
7834 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7835 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7836 && (code == EQ_EXPR || code == NE_EXPR))))
7838 tree varop, newconst;
7840 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7842 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
7843 arg1, TREE_OPERAND (arg0, 1));
7844 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7845 TREE_OPERAND (arg0, 0),
7846 TREE_OPERAND (arg0, 1));
7848 else
7850 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
7851 arg1, TREE_OPERAND (arg0, 1));
7852 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7853 TREE_OPERAND (arg0, 0),
7854 TREE_OPERAND (arg0, 1));
7858 /* If VAROP is a reference to a bitfield, we must mask
7859 the constant by the width of the field. */
7860 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7861 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
7862 && host_integerp (DECL_SIZE (TREE_OPERAND
7863 (TREE_OPERAND (varop, 0), 1)), 1))
7865 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7866 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
7867 tree folded_compare, shift;
7869 /* First check whether the comparison would come out
7870 always the same. If we don't do that we would
7871 change the meaning with the masking. */
7872 folded_compare = fold_build2 (code, type,
7873 TREE_OPERAND (varop, 0), arg1);
7874 if (TREE_CODE (folded_compare) == INTEGER_CST)
7875 return omit_one_operand (type, folded_compare, varop);
7877 shift = build_int_cst (NULL_TREE,
7878 TYPE_PRECISION (TREE_TYPE (varop)) - size);
7879 shift = fold_convert (TREE_TYPE (varop), shift);
7880 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7881 newconst, shift);
7882 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7883 newconst, shift);
7886 return fold_build2 (code, type, varop, newconst);
7889 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7890 && (TREE_CODE (arg0) == NOP_EXPR
7891 || TREE_CODE (arg0) == CONVERT_EXPR))
7893 /* If we are widening one operand of an integer comparison,
7894 see if the other operand is similarly being widened. Perhaps we
7895 can do the comparison in the narrower type. */
7896 tem = fold_widened_comparison (code, type, arg0, arg1);
7897 if (tem)
7898 return tem;
7900 /* Or if we are changing signedness. */
7901 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
7902 if (tem)
7903 return tem;
7906 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7907 constant, we can simplify it. */
7908 if (TREE_CODE (arg1) == INTEGER_CST
7909 && (TREE_CODE (arg0) == MIN_EXPR
7910 || TREE_CODE (arg0) == MAX_EXPR)
7911 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7913 tem = optimize_minmax_comparison (code, type, op0, op1);
7914 if (tem)
7915 return tem;
7918 /* Simplify comparison of something with itself. (For IEEE
7919 floating-point, we can only do some of these simplifications.) */
7920 if (operand_equal_p (arg0, arg1, 0))
7922 switch (code)
7924 case EQ_EXPR:
7925 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7926 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7927 return constant_boolean_node (1, type);
7928 break;
7930 case GE_EXPR:
7931 case LE_EXPR:
7932 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7933 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7934 return constant_boolean_node (1, type);
7935 return fold_build2 (EQ_EXPR, type, arg0, arg1);
7937 case NE_EXPR:
7938 /* For NE, we can only do this simplification if integer
7939 or we don't honor IEEE floating point NaNs. */
7940 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7941 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7942 break;
7943 /* ... fall through ... */
7944 case GT_EXPR:
7945 case LT_EXPR:
7946 return constant_boolean_node (0, type);
7947 default:
7948 gcc_unreachable ();
7952 /* If we are comparing an expression that just has comparisons
7953 of two integer values, arithmetic expressions of those comparisons,
7954 and constants, we can simplify it. There are only three cases
7955 to check: the two values can either be equal, the first can be
7956 greater, or the second can be greater. Fold the expression for
7957 those three values. Since each value must be 0 or 1, we have
7958 eight possibilities, each of which corresponds to the constant 0
7959 or 1 or one of the six possible comparisons.
7961 This handles common cases like (a > b) == 0 but also handles
7962 expressions like ((x > y) - (y > x)) > 0, which supposedly
7963 occur in macroized code. */
7965 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7967 tree cval1 = 0, cval2 = 0;
7968 int save_p = 0;
7970 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7971 /* Don't handle degenerate cases here; they should already
7972 have been handled anyway. */
7973 && cval1 != 0 && cval2 != 0
7974 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7975 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7976 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7977 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7978 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7979 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7980 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7982 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7983 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7985 /* We can't just pass T to eval_subst in case cval1 or cval2
7986 was the same as ARG1. */
7988 tree high_result
7989 = fold_build2 (code, type,
7990 eval_subst (arg0, cval1, maxval,
7991 cval2, minval),
7992 arg1);
7993 tree equal_result
7994 = fold_build2 (code, type,
7995 eval_subst (arg0, cval1, maxval,
7996 cval2, maxval),
7997 arg1);
7998 tree low_result
7999 = fold_build2 (code, type,
8000 eval_subst (arg0, cval1, minval,
8001 cval2, maxval),
8002 arg1);
8004 /* All three of these results should be 0 or 1. Confirm they are.
8005 Then use those values to select the proper code to use. */
8007 if (TREE_CODE (high_result) == INTEGER_CST
8008 && TREE_CODE (equal_result) == INTEGER_CST
8009 && TREE_CODE (low_result) == INTEGER_CST)
8011 /* Make a 3-bit mask with the high-order bit being the
8012 value for `>', the next for '=', and the low for '<'. */
8013 switch ((integer_onep (high_result) * 4)
8014 + (integer_onep (equal_result) * 2)
8015 + integer_onep (low_result))
8017 case 0:
8018 /* Always false. */
8019 return omit_one_operand (type, integer_zero_node, arg0);
8020 case 1:
8021 code = LT_EXPR;
8022 break;
8023 case 2:
8024 code = EQ_EXPR;
8025 break;
8026 case 3:
8027 code = LE_EXPR;
8028 break;
8029 case 4:
8030 code = GT_EXPR;
8031 break;
8032 case 5:
8033 code = NE_EXPR;
8034 break;
8035 case 6:
8036 code = GE_EXPR;
8037 break;
8038 case 7:
8039 /* Always true. */
8040 return omit_one_operand (type, integer_one_node, arg0);
8043 if (save_p)
8044 return save_expr (build2 (code, type, cval1, cval2));
8045 return fold_build2 (code, type, cval1, cval2);
8050 /* Fold a comparison of the address of COMPONENT_REFs with the same
8051 type and component to a comparison of the address of the base
8052 object. In short, &x->a OP &y->a to x OP y and
8053 &x->a OP &y.a to x OP &y */
8054 if (TREE_CODE (arg0) == ADDR_EXPR
8055 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8056 && TREE_CODE (arg1) == ADDR_EXPR
8057 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8059 tree cref0 = TREE_OPERAND (arg0, 0);
8060 tree cref1 = TREE_OPERAND (arg1, 0);
8061 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8063 tree op0 = TREE_OPERAND (cref0, 0);
8064 tree op1 = TREE_OPERAND (cref1, 0);
8065 return fold_build2 (code, type,
8066 build_fold_addr_expr (op0),
8067 build_fold_addr_expr (op1));
8071 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8072 into a single range test. */
8073 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8074 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8075 && TREE_CODE (arg1) == INTEGER_CST
8076 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8077 && !integer_zerop (TREE_OPERAND (arg0, 1))
8078 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8079 && !TREE_OVERFLOW (arg1))
8081 tem = fold_div_compare (code, type, arg0, arg1);
8082 if (tem != NULL_TREE)
8083 return tem;
8086 return NULL_TREE;
8089 /* Fold a binary expression of code CODE and type TYPE with operands
8090 OP0 and OP1. Return the folded expression if folding is
8091 successful. Otherwise, return NULL_TREE. */
8093 tree
8094 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8096 enum tree_code_class kind = TREE_CODE_CLASS (code);
8097 tree arg0, arg1, tem;
8098 tree t1 = NULL_TREE;
8100 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8101 && TREE_CODE_LENGTH (code) == 2
8102 && op0 != NULL_TREE
8103 && op1 != NULL_TREE);
8105 arg0 = op0;
8106 arg1 = op1;
8108 /* Strip any conversions that don't change the mode. This is
8109 safe for every expression, except for a comparison expression
8110 because its signedness is derived from its operands. So, in
8111 the latter case, only strip conversions that don't change the
8112 signedness.
8114 Note that this is done as an internal manipulation within the
8115 constant folder, in order to find the simplest representation
8116 of the arguments so that their form can be studied. In any
8117 cases, the appropriate type conversions should be put back in
8118 the tree that will get out of the constant folder. */
8120 if (kind == tcc_comparison)
8122 STRIP_SIGN_NOPS (arg0);
8123 STRIP_SIGN_NOPS (arg1);
8125 else
8127 STRIP_NOPS (arg0);
8128 STRIP_NOPS (arg1);
8131 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8132 constant but we can't do arithmetic on them. */
8133 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8134 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8135 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8136 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8138 if (kind == tcc_binary)
8139 tem = const_binop (code, arg0, arg1, 0);
8140 else if (kind == tcc_comparison)
8141 tem = fold_relational_const (code, type, arg0, arg1);
8142 else
8143 tem = NULL_TREE;
8145 if (tem != NULL_TREE)
8147 if (TREE_TYPE (tem) != type)
8148 tem = fold_convert (type, tem);
8149 return tem;
8153 /* If this is a commutative operation, and ARG0 is a constant, move it
8154 to ARG1 to reduce the number of tests below. */
8155 if (commutative_tree_code (code)
8156 && tree_swap_operands_p (arg0, arg1, true))
8157 return fold_build2 (code, type, op1, op0);
8159 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8161 First check for cases where an arithmetic operation is applied to a
8162 compound, conditional, or comparison operation. Push the arithmetic
8163 operation inside the compound or conditional to see if any folding
8164 can then be done. Convert comparison to conditional for this purpose.
8165 The also optimizes non-constant cases that used to be done in
8166 expand_expr.
8168 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8169 one of the operands is a comparison and the other is a comparison, a
8170 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8171 code below would make the expression more complex. Change it to a
8172 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8173 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8175 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8176 || code == EQ_EXPR || code == NE_EXPR)
8177 && ((truth_value_p (TREE_CODE (arg0))
8178 && (truth_value_p (TREE_CODE (arg1))
8179 || (TREE_CODE (arg1) == BIT_AND_EXPR
8180 && integer_onep (TREE_OPERAND (arg1, 1)))))
8181 || (truth_value_p (TREE_CODE (arg1))
8182 && (truth_value_p (TREE_CODE (arg0))
8183 || (TREE_CODE (arg0) == BIT_AND_EXPR
8184 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8186 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8187 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8188 : TRUTH_XOR_EXPR,
8189 boolean_type_node,
8190 fold_convert (boolean_type_node, arg0),
8191 fold_convert (boolean_type_node, arg1));
8193 if (code == EQ_EXPR)
8194 tem = invert_truthvalue (tem);
8196 return fold_convert (type, tem);
8199 if (TREE_CODE_CLASS (code) == tcc_binary
8200 || TREE_CODE_CLASS (code) == tcc_comparison)
8202 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8203 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8204 fold_build2 (code, type,
8205 TREE_OPERAND (arg0, 1), op1));
8206 if (TREE_CODE (arg1) == COMPOUND_EXPR
8207 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8208 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8209 fold_build2 (code, type,
8210 op0, TREE_OPERAND (arg1, 1)));
8212 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8214 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8215 arg0, arg1,
8216 /*cond_first_p=*/1);
8217 if (tem != NULL_TREE)
8218 return tem;
8221 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8223 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8224 arg1, arg0,
8225 /*cond_first_p=*/0);
8226 if (tem != NULL_TREE)
8227 return tem;
8231 switch (code)
8233 case PLUS_EXPR:
8234 /* A + (-B) -> A - B */
8235 if (TREE_CODE (arg1) == NEGATE_EXPR)
8236 return fold_build2 (MINUS_EXPR, type,
8237 fold_convert (type, arg0),
8238 fold_convert (type, TREE_OPERAND (arg1, 0)));
8239 /* (-A) + B -> B - A */
8240 if (TREE_CODE (arg0) == NEGATE_EXPR
8241 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8242 return fold_build2 (MINUS_EXPR, type,
8243 fold_convert (type, arg1),
8244 fold_convert (type, TREE_OPERAND (arg0, 0)));
8245 /* Convert ~A + 1 to -A. */
8246 if (INTEGRAL_TYPE_P (type)
8247 && TREE_CODE (arg0) == BIT_NOT_EXPR
8248 && integer_onep (arg1))
8249 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8251 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8252 same or one. */
8253 if ((TREE_CODE (arg0) == MULT_EXPR
8254 || TREE_CODE (arg1) == MULT_EXPR)
8255 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8257 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8258 if (tem)
8259 return tem;
8262 if (! FLOAT_TYPE_P (type))
8264 if (integer_zerop (arg1))
8265 return non_lvalue (fold_convert (type, arg0));
8267 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8268 with a constant, and the two constants have no bits in common,
8269 we should treat this as a BIT_IOR_EXPR since this may produce more
8270 simplifications. */
8271 if (TREE_CODE (arg0) == BIT_AND_EXPR
8272 && TREE_CODE (arg1) == BIT_AND_EXPR
8273 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8274 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8275 && integer_zerop (const_binop (BIT_AND_EXPR,
8276 TREE_OPERAND (arg0, 1),
8277 TREE_OPERAND (arg1, 1), 0)))
8279 code = BIT_IOR_EXPR;
8280 goto bit_ior;
8283 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8284 (plus (plus (mult) (mult)) (foo)) so that we can
8285 take advantage of the factoring cases below. */
8286 if (((TREE_CODE (arg0) == PLUS_EXPR
8287 || TREE_CODE (arg0) == MINUS_EXPR)
8288 && TREE_CODE (arg1) == MULT_EXPR)
8289 || ((TREE_CODE (arg1) == PLUS_EXPR
8290 || TREE_CODE (arg1) == MINUS_EXPR)
8291 && TREE_CODE (arg0) == MULT_EXPR))
8293 tree parg0, parg1, parg, marg;
8294 enum tree_code pcode;
8296 if (TREE_CODE (arg1) == MULT_EXPR)
8297 parg = arg0, marg = arg1;
8298 else
8299 parg = arg1, marg = arg0;
8300 pcode = TREE_CODE (parg);
8301 parg0 = TREE_OPERAND (parg, 0);
8302 parg1 = TREE_OPERAND (parg, 1);
8303 STRIP_NOPS (parg0);
8304 STRIP_NOPS (parg1);
8306 if (TREE_CODE (parg0) == MULT_EXPR
8307 && TREE_CODE (parg1) != MULT_EXPR)
8308 return fold_build2 (pcode, type,
8309 fold_build2 (PLUS_EXPR, type,
8310 fold_convert (type, parg0),
8311 fold_convert (type, marg)),
8312 fold_convert (type, parg1));
8313 if (TREE_CODE (parg0) != MULT_EXPR
8314 && TREE_CODE (parg1) == MULT_EXPR)
8315 return fold_build2 (PLUS_EXPR, type,
8316 fold_convert (type, parg0),
8317 fold_build2 (pcode, type,
8318 fold_convert (type, marg),
8319 fold_convert (type,
8320 parg1)));
8323 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8324 of the array. Loop optimizer sometimes produce this type of
8325 expressions. */
8326 if (TREE_CODE (arg0) == ADDR_EXPR)
8328 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8329 if (tem)
8330 return fold_convert (type, tem);
8332 else if (TREE_CODE (arg1) == ADDR_EXPR)
8334 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8335 if (tem)
8336 return fold_convert (type, tem);
8339 else
8341 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8342 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8343 return non_lvalue (fold_convert (type, arg0));
8345 /* Likewise if the operands are reversed. */
8346 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8347 return non_lvalue (fold_convert (type, arg1));
8349 /* Convert X + -C into X - C. */
8350 if (TREE_CODE (arg1) == REAL_CST
8351 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8353 tem = fold_negate_const (arg1, type);
8354 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8355 return fold_build2 (MINUS_EXPR, type,
8356 fold_convert (type, arg0),
8357 fold_convert (type, tem));
8360 if (flag_unsafe_math_optimizations
8361 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8362 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8363 && (tem = distribute_real_division (code, type, arg0, arg1)))
8364 return tem;
8366 /* Convert x+x into x*2.0. */
8367 if (operand_equal_p (arg0, arg1, 0)
8368 && SCALAR_FLOAT_TYPE_P (type))
8369 return fold_build2 (MULT_EXPR, type, arg0,
8370 build_real (type, dconst2));
8372 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8373 if (flag_unsafe_math_optimizations
8374 && TREE_CODE (arg1) == PLUS_EXPR
8375 && TREE_CODE (arg0) != MULT_EXPR)
8377 tree tree10 = TREE_OPERAND (arg1, 0);
8378 tree tree11 = TREE_OPERAND (arg1, 1);
8379 if (TREE_CODE (tree11) == MULT_EXPR
8380 && TREE_CODE (tree10) == MULT_EXPR)
8382 tree tree0;
8383 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8384 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8387 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8388 if (flag_unsafe_math_optimizations
8389 && TREE_CODE (arg0) == PLUS_EXPR
8390 && TREE_CODE (arg1) != MULT_EXPR)
8392 tree tree00 = TREE_OPERAND (arg0, 0);
8393 tree tree01 = TREE_OPERAND (arg0, 1);
8394 if (TREE_CODE (tree01) == MULT_EXPR
8395 && TREE_CODE (tree00) == MULT_EXPR)
8397 tree tree0;
8398 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8399 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8404 bit_rotate:
8405 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8406 is a rotate of A by C1 bits. */
8407 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8408 is a rotate of A by B bits. */
8410 enum tree_code code0, code1;
8411 code0 = TREE_CODE (arg0);
8412 code1 = TREE_CODE (arg1);
8413 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8414 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8415 && operand_equal_p (TREE_OPERAND (arg0, 0),
8416 TREE_OPERAND (arg1, 0), 0)
8417 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8419 tree tree01, tree11;
8420 enum tree_code code01, code11;
8422 tree01 = TREE_OPERAND (arg0, 1);
8423 tree11 = TREE_OPERAND (arg1, 1);
8424 STRIP_NOPS (tree01);
8425 STRIP_NOPS (tree11);
8426 code01 = TREE_CODE (tree01);
8427 code11 = TREE_CODE (tree11);
8428 if (code01 == INTEGER_CST
8429 && code11 == INTEGER_CST
8430 && TREE_INT_CST_HIGH (tree01) == 0
8431 && TREE_INT_CST_HIGH (tree11) == 0
8432 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8433 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8434 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8435 code0 == LSHIFT_EXPR ? tree01 : tree11);
8436 else if (code11 == MINUS_EXPR)
8438 tree tree110, tree111;
8439 tree110 = TREE_OPERAND (tree11, 0);
8440 tree111 = TREE_OPERAND (tree11, 1);
8441 STRIP_NOPS (tree110);
8442 STRIP_NOPS (tree111);
8443 if (TREE_CODE (tree110) == INTEGER_CST
8444 && 0 == compare_tree_int (tree110,
8445 TYPE_PRECISION
8446 (TREE_TYPE (TREE_OPERAND
8447 (arg0, 0))))
8448 && operand_equal_p (tree01, tree111, 0))
8449 return build2 ((code0 == LSHIFT_EXPR
8450 ? LROTATE_EXPR
8451 : RROTATE_EXPR),
8452 type, TREE_OPERAND (arg0, 0), tree01);
8454 else if (code01 == MINUS_EXPR)
8456 tree tree010, tree011;
8457 tree010 = TREE_OPERAND (tree01, 0);
8458 tree011 = TREE_OPERAND (tree01, 1);
8459 STRIP_NOPS (tree010);
8460 STRIP_NOPS (tree011);
8461 if (TREE_CODE (tree010) == INTEGER_CST
8462 && 0 == compare_tree_int (tree010,
8463 TYPE_PRECISION
8464 (TREE_TYPE (TREE_OPERAND
8465 (arg0, 0))))
8466 && operand_equal_p (tree11, tree011, 0))
8467 return build2 ((code0 != LSHIFT_EXPR
8468 ? LROTATE_EXPR
8469 : RROTATE_EXPR),
8470 type, TREE_OPERAND (arg0, 0), tree11);
8475 associate:
8476 /* In most languages, can't associate operations on floats through
8477 parentheses. Rather than remember where the parentheses were, we
8478 don't associate floats at all, unless the user has specified
8479 -funsafe-math-optimizations. */
8481 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8483 tree var0, con0, lit0, minus_lit0;
8484 tree var1, con1, lit1, minus_lit1;
8486 /* Split both trees into variables, constants, and literals. Then
8487 associate each group together, the constants with literals,
8488 then the result with variables. This increases the chances of
8489 literals being recombined later and of generating relocatable
8490 expressions for the sum of a constant and literal. */
8491 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8492 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8493 code == MINUS_EXPR);
8495 /* Only do something if we found more than two objects. Otherwise,
8496 nothing has changed and we risk infinite recursion. */
8497 if (2 < ((var0 != 0) + (var1 != 0)
8498 + (con0 != 0) + (con1 != 0)
8499 + (lit0 != 0) + (lit1 != 0)
8500 + (minus_lit0 != 0) + (minus_lit1 != 0)))
8502 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8503 if (code == MINUS_EXPR)
8504 code = PLUS_EXPR;
8506 var0 = associate_trees (var0, var1, code, type);
8507 con0 = associate_trees (con0, con1, code, type);
8508 lit0 = associate_trees (lit0, lit1, code, type);
8509 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8511 /* Preserve the MINUS_EXPR if the negative part of the literal is
8512 greater than the positive part. Otherwise, the multiplicative
8513 folding code (i.e extract_muldiv) may be fooled in case
8514 unsigned constants are subtracted, like in the following
8515 example: ((X*2 + 4) - 8U)/2. */
8516 if (minus_lit0 && lit0)
8518 if (TREE_CODE (lit0) == INTEGER_CST
8519 && TREE_CODE (minus_lit0) == INTEGER_CST
8520 && tree_int_cst_lt (lit0, minus_lit0))
8522 minus_lit0 = associate_trees (minus_lit0, lit0,
8523 MINUS_EXPR, type);
8524 lit0 = 0;
8526 else
8528 lit0 = associate_trees (lit0, minus_lit0,
8529 MINUS_EXPR, type);
8530 minus_lit0 = 0;
8533 if (minus_lit0)
8535 if (con0 == 0)
8536 return fold_convert (type,
8537 associate_trees (var0, minus_lit0,
8538 MINUS_EXPR, type));
8539 else
8541 con0 = associate_trees (con0, minus_lit0,
8542 MINUS_EXPR, type);
8543 return fold_convert (type,
8544 associate_trees (var0, con0,
8545 PLUS_EXPR, type));
8549 con0 = associate_trees (con0, lit0, code, type);
8550 return fold_convert (type, associate_trees (var0, con0,
8551 code, type));
8555 return NULL_TREE;
8557 case MINUS_EXPR:
8558 /* A - (-B) -> A + B */
8559 if (TREE_CODE (arg1) == NEGATE_EXPR)
8560 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8561 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8562 if (TREE_CODE (arg0) == NEGATE_EXPR
8563 && (FLOAT_TYPE_P (type)
8564 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8565 && negate_expr_p (arg1)
8566 && reorder_operands_p (arg0, arg1))
8567 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8568 TREE_OPERAND (arg0, 0));
8569 /* Convert -A - 1 to ~A. */
8570 if (INTEGRAL_TYPE_P (type)
8571 && TREE_CODE (arg0) == NEGATE_EXPR
8572 && integer_onep (arg1))
8573 return fold_build1 (BIT_NOT_EXPR, type,
8574 fold_convert (type, TREE_OPERAND (arg0, 0)));
8576 /* Convert -1 - A to ~A. */
8577 if (INTEGRAL_TYPE_P (type)
8578 && integer_all_onesp (arg0))
8579 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8581 if (! FLOAT_TYPE_P (type))
8583 if (integer_zerop (arg0))
8584 return negate_expr (fold_convert (type, arg1));
8585 if (integer_zerop (arg1))
8586 return non_lvalue (fold_convert (type, arg0));
8588 /* Fold A - (A & B) into ~B & A. */
8589 if (!TREE_SIDE_EFFECTS (arg0)
8590 && TREE_CODE (arg1) == BIT_AND_EXPR)
8592 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8593 return fold_build2 (BIT_AND_EXPR, type,
8594 fold_build1 (BIT_NOT_EXPR, type,
8595 TREE_OPERAND (arg1, 0)),
8596 arg0);
8597 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8598 return fold_build2 (BIT_AND_EXPR, type,
8599 fold_build1 (BIT_NOT_EXPR, type,
8600 TREE_OPERAND (arg1, 1)),
8601 arg0);
8604 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8605 any power of 2 minus 1. */
8606 if (TREE_CODE (arg0) == BIT_AND_EXPR
8607 && TREE_CODE (arg1) == BIT_AND_EXPR
8608 && operand_equal_p (TREE_OPERAND (arg0, 0),
8609 TREE_OPERAND (arg1, 0), 0))
8611 tree mask0 = TREE_OPERAND (arg0, 1);
8612 tree mask1 = TREE_OPERAND (arg1, 1);
8613 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
8615 if (operand_equal_p (tem, mask1, 0))
8617 tem = fold_build2 (BIT_XOR_EXPR, type,
8618 TREE_OPERAND (arg0, 0), mask1);
8619 return fold_build2 (MINUS_EXPR, type, tem, mask1);
8624 /* See if ARG1 is zero and X - ARG1 reduces to X. */
8625 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
8626 return non_lvalue (fold_convert (type, arg0));
8628 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
8629 ARG0 is zero and X + ARG0 reduces to X, since that would mean
8630 (-ARG1 + ARG0) reduces to -ARG1. */
8631 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8632 return negate_expr (fold_convert (type, arg1));
8634 /* Fold &x - &x. This can happen from &x.foo - &x.
8635 This is unsafe for certain floats even in non-IEEE formats.
8636 In IEEE, it is unsafe because it does wrong for NaNs.
8637 Also note that operand_equal_p is always false if an operand
8638 is volatile. */
8640 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8641 && operand_equal_p (arg0, arg1, 0))
8642 return fold_convert (type, integer_zero_node);
8644 /* A - B -> A + (-B) if B is easily negatable. */
8645 if (negate_expr_p (arg1)
8646 && ((FLOAT_TYPE_P (type)
8647 /* Avoid this transformation if B is a positive REAL_CST. */
8648 && (TREE_CODE (arg1) != REAL_CST
8649 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
8650 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
8651 return fold_build2 (PLUS_EXPR, type,
8652 fold_convert (type, arg0),
8653 fold_convert (type, negate_expr (arg1)));
8655 /* Try folding difference of addresses. */
8657 HOST_WIDE_INT diff;
8659 if ((TREE_CODE (arg0) == ADDR_EXPR
8660 || TREE_CODE (arg1) == ADDR_EXPR)
8661 && ptr_difference_const (arg0, arg1, &diff))
8662 return build_int_cst_type (type, diff);
8665 /* Fold &a[i] - &a[j] to i-j. */
8666 if (TREE_CODE (arg0) == ADDR_EXPR
8667 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
8668 && TREE_CODE (arg1) == ADDR_EXPR
8669 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
8671 tree aref0 = TREE_OPERAND (arg0, 0);
8672 tree aref1 = TREE_OPERAND (arg1, 0);
8673 if (operand_equal_p (TREE_OPERAND (aref0, 0),
8674 TREE_OPERAND (aref1, 0), 0))
8676 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
8677 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
8678 tree esz = array_ref_element_size (aref0);
8679 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8680 return fold_build2 (MULT_EXPR, type, diff,
8681 fold_convert (type, esz));
8686 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
8687 of the array. Loop optimizer sometimes produce this type of
8688 expressions. */
8689 if (TREE_CODE (arg0) == ADDR_EXPR)
8691 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
8692 if (tem)
8693 return fold_convert (type, tem);
8696 if (flag_unsafe_math_optimizations
8697 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8698 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8699 && (tem = distribute_real_division (code, type, arg0, arg1)))
8700 return tem;
8702 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
8703 same or one. */
8704 if ((TREE_CODE (arg0) == MULT_EXPR
8705 || TREE_CODE (arg1) == MULT_EXPR)
8706 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8708 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8709 if (tem)
8710 return tem;
8713 goto associate;
8715 case MULT_EXPR:
8716 /* (-A) * (-B) -> A * B */
8717 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8718 return fold_build2 (MULT_EXPR, type,
8719 TREE_OPERAND (arg0, 0),
8720 negate_expr (arg1));
8721 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8722 return fold_build2 (MULT_EXPR, type,
8723 negate_expr (arg0),
8724 TREE_OPERAND (arg1, 0));
8726 if (! FLOAT_TYPE_P (type))
8728 if (integer_zerop (arg1))
8729 return omit_one_operand (type, arg1, arg0);
8730 if (integer_onep (arg1))
8731 return non_lvalue (fold_convert (type, arg0));
8732 /* Transform x * -1 into -x. */
8733 if (integer_all_onesp (arg1))
8734 return fold_convert (type, negate_expr (arg0));
8736 /* (a * (1 << b)) is (a << b) */
8737 if (TREE_CODE (arg1) == LSHIFT_EXPR
8738 && integer_onep (TREE_OPERAND (arg1, 0)))
8739 return fold_build2 (LSHIFT_EXPR, type, arg0,
8740 TREE_OPERAND (arg1, 1));
8741 if (TREE_CODE (arg0) == LSHIFT_EXPR
8742 && integer_onep (TREE_OPERAND (arg0, 0)))
8743 return fold_build2 (LSHIFT_EXPR, type, arg1,
8744 TREE_OPERAND (arg0, 1));
8746 if (TREE_CODE (arg1) == INTEGER_CST
8747 && 0 != (tem = extract_muldiv (op0,
8748 fold_convert (type, arg1),
8749 code, NULL_TREE)))
8750 return fold_convert (type, tem);
8753 else
8755 /* Maybe fold x * 0 to 0. The expressions aren't the same
8756 when x is NaN, since x * 0 is also NaN. Nor are they the
8757 same in modes with signed zeros, since multiplying a
8758 negative value by 0 gives -0, not +0. */
8759 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8760 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8761 && real_zerop (arg1))
8762 return omit_one_operand (type, arg1, arg0);
8763 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
8764 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8765 && real_onep (arg1))
8766 return non_lvalue (fold_convert (type, arg0));
8768 /* Transform x * -1.0 into -x. */
8769 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8770 && real_minus_onep (arg1))
8771 return fold_convert (type, negate_expr (arg0));
8773 /* Convert (C1/X)*C2 into (C1*C2)/X. */
8774 if (flag_unsafe_math_optimizations
8775 && TREE_CODE (arg0) == RDIV_EXPR
8776 && TREE_CODE (arg1) == REAL_CST
8777 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
8779 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
8780 arg1, 0);
8781 if (tem)
8782 return fold_build2 (RDIV_EXPR, type, tem,
8783 TREE_OPERAND (arg0, 1));
8786 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
8787 if (operand_equal_p (arg0, arg1, 0))
8789 tree tem = fold_strip_sign_ops (arg0);
8790 if (tem != NULL_TREE)
8792 tem = fold_convert (type, tem);
8793 return fold_build2 (MULT_EXPR, type, tem, tem);
8797 if (flag_unsafe_math_optimizations)
8799 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8800 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8802 /* Optimizations of root(...)*root(...). */
8803 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
8805 tree rootfn, arg, arglist;
8806 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8807 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8809 /* Optimize sqrt(x)*sqrt(x) as x. */
8810 if (BUILTIN_SQRT_P (fcode0)
8811 && operand_equal_p (arg00, arg10, 0)
8812 && ! HONOR_SNANS (TYPE_MODE (type)))
8813 return arg00;
8815 /* Optimize root(x)*root(y) as root(x*y). */
8816 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8817 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8818 arglist = build_tree_list (NULL_TREE, arg);
8819 return build_function_call_expr (rootfn, arglist);
8822 /* Optimize expN(x)*expN(y) as expN(x+y). */
8823 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
8825 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8826 tree arg = fold_build2 (PLUS_EXPR, type,
8827 TREE_VALUE (TREE_OPERAND (arg0, 1)),
8828 TREE_VALUE (TREE_OPERAND (arg1, 1)));
8829 tree arglist = build_tree_list (NULL_TREE, arg);
8830 return build_function_call_expr (expfn, arglist);
8833 /* Optimizations of pow(...)*pow(...). */
8834 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
8835 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
8836 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
8838 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8839 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8840 1)));
8841 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8842 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8843 1)));
8845 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
8846 if (operand_equal_p (arg01, arg11, 0))
8848 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8849 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8850 tree arglist = tree_cons (NULL_TREE, arg,
8851 build_tree_list (NULL_TREE,
8852 arg01));
8853 return build_function_call_expr (powfn, arglist);
8856 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
8857 if (operand_equal_p (arg00, arg10, 0))
8859 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8860 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
8861 tree arglist = tree_cons (NULL_TREE, arg00,
8862 build_tree_list (NULL_TREE,
8863 arg));
8864 return build_function_call_expr (powfn, arglist);
8868 /* Optimize tan(x)*cos(x) as sin(x). */
8869 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
8870 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
8871 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
8872 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
8873 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
8874 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
8875 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8876 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8878 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
8880 if (sinfn != NULL_TREE)
8881 return build_function_call_expr (sinfn,
8882 TREE_OPERAND (arg0, 1));
8885 /* Optimize x*pow(x,c) as pow(x,c+1). */
8886 if (fcode1 == BUILT_IN_POW
8887 || fcode1 == BUILT_IN_POWF
8888 || fcode1 == BUILT_IN_POWL)
8890 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8891 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8892 1)));
8893 if (TREE_CODE (arg11) == REAL_CST
8894 && ! TREE_CONSTANT_OVERFLOW (arg11)
8895 && operand_equal_p (arg0, arg10, 0))
8897 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8898 REAL_VALUE_TYPE c;
8899 tree arg, arglist;
8901 c = TREE_REAL_CST (arg11);
8902 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8903 arg = build_real (type, c);
8904 arglist = build_tree_list (NULL_TREE, arg);
8905 arglist = tree_cons (NULL_TREE, arg0, arglist);
8906 return build_function_call_expr (powfn, arglist);
8910 /* Optimize pow(x,c)*x as pow(x,c+1). */
8911 if (fcode0 == BUILT_IN_POW
8912 || fcode0 == BUILT_IN_POWF
8913 || fcode0 == BUILT_IN_POWL)
8915 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8916 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8917 1)));
8918 if (TREE_CODE (arg01) == REAL_CST
8919 && ! TREE_CONSTANT_OVERFLOW (arg01)
8920 && operand_equal_p (arg1, arg00, 0))
8922 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8923 REAL_VALUE_TYPE c;
8924 tree arg, arglist;
8926 c = TREE_REAL_CST (arg01);
8927 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8928 arg = build_real (type, c);
8929 arglist = build_tree_list (NULL_TREE, arg);
8930 arglist = tree_cons (NULL_TREE, arg1, arglist);
8931 return build_function_call_expr (powfn, arglist);
8935 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8936 if (! optimize_size
8937 && operand_equal_p (arg0, arg1, 0))
8939 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8941 if (powfn)
8943 tree arg = build_real (type, dconst2);
8944 tree arglist = build_tree_list (NULL_TREE, arg);
8945 arglist = tree_cons (NULL_TREE, arg0, arglist);
8946 return build_function_call_expr (powfn, arglist);
8951 goto associate;
8953 case BIT_IOR_EXPR:
8954 bit_ior:
8955 if (integer_all_onesp (arg1))
8956 return omit_one_operand (type, arg1, arg0);
8957 if (integer_zerop (arg1))
8958 return non_lvalue (fold_convert (type, arg0));
8959 if (operand_equal_p (arg0, arg1, 0))
8960 return non_lvalue (fold_convert (type, arg0));
8962 /* ~X | X is -1. */
8963 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8964 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8966 t1 = build_int_cst (type, -1);
8967 t1 = force_fit_type (t1, 0, false, false);
8968 return omit_one_operand (type, t1, arg1);
8971 /* X | ~X is -1. */
8972 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8973 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8975 t1 = build_int_cst (type, -1);
8976 t1 = force_fit_type (t1, 0, false, false);
8977 return omit_one_operand (type, t1, arg0);
8980 /* Canonicalize (X & C1) | C2. */
8981 if (TREE_CODE (arg0) == BIT_AND_EXPR
8982 && TREE_CODE (arg1) == INTEGER_CST
8983 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8985 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
8986 int width = TYPE_PRECISION (type);
8987 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
8988 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8989 hi2 = TREE_INT_CST_HIGH (arg1);
8990 lo2 = TREE_INT_CST_LOW (arg1);
8992 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
8993 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
8994 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
8996 if (width > HOST_BITS_PER_WIDE_INT)
8998 mhi = (unsigned HOST_WIDE_INT) -1
8999 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9000 mlo = -1;
9002 else
9004 mhi = 0;
9005 mlo = (unsigned HOST_WIDE_INT) -1
9006 >> (HOST_BITS_PER_WIDE_INT - width);
9009 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9010 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9011 return fold_build2 (BIT_IOR_EXPR, type,
9012 TREE_OPERAND (arg0, 0), arg1);
9014 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9015 hi1 &= mhi;
9016 lo1 &= mlo;
9017 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9018 return fold_build2 (BIT_IOR_EXPR, type,
9019 fold_build2 (BIT_AND_EXPR, type,
9020 TREE_OPERAND (arg0, 0),
9021 build_int_cst_wide (type,
9022 lo1 & ~lo2,
9023 hi1 & ~hi2)),
9024 arg1);
9027 /* (X & Y) | Y is (X, Y). */
9028 if (TREE_CODE (arg0) == BIT_AND_EXPR
9029 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9030 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9031 /* (X & Y) | X is (Y, X). */
9032 if (TREE_CODE (arg0) == BIT_AND_EXPR
9033 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9034 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9035 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9036 /* X | (X & Y) is (Y, X). */
9037 if (TREE_CODE (arg1) == BIT_AND_EXPR
9038 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9039 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9040 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9041 /* X | (Y & X) is (Y, X). */
9042 if (TREE_CODE (arg1) == BIT_AND_EXPR
9043 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9044 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9045 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9047 t1 = distribute_bit_expr (code, type, arg0, arg1);
9048 if (t1 != NULL_TREE)
9049 return t1;
9051 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9053 This results in more efficient code for machines without a NAND
9054 instruction. Combine will canonicalize to the first form
9055 which will allow use of NAND instructions provided by the
9056 backend if they exist. */
9057 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9058 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9060 return fold_build1 (BIT_NOT_EXPR, type,
9061 build2 (BIT_AND_EXPR, type,
9062 TREE_OPERAND (arg0, 0),
9063 TREE_OPERAND (arg1, 0)));
9066 /* See if this can be simplified into a rotate first. If that
9067 is unsuccessful continue in the association code. */
9068 goto bit_rotate;
9070 case BIT_XOR_EXPR:
9071 if (integer_zerop (arg1))
9072 return non_lvalue (fold_convert (type, arg0));
9073 if (integer_all_onesp (arg1))
9074 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9075 if (operand_equal_p (arg0, arg1, 0))
9076 return omit_one_operand (type, integer_zero_node, arg0);
9078 /* ~X ^ X is -1. */
9079 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9080 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9082 t1 = build_int_cst (type, -1);
9083 t1 = force_fit_type (t1, 0, false, false);
9084 return omit_one_operand (type, t1, arg1);
9087 /* X ^ ~X is -1. */
9088 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9089 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9091 t1 = build_int_cst (type, -1);
9092 t1 = force_fit_type (t1, 0, false, false);
9093 return omit_one_operand (type, t1, arg0);
9096 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9097 with a constant, and the two constants have no bits in common,
9098 we should treat this as a BIT_IOR_EXPR since this may produce more
9099 simplifications. */
9100 if (TREE_CODE (arg0) == BIT_AND_EXPR
9101 && TREE_CODE (arg1) == BIT_AND_EXPR
9102 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9103 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9104 && integer_zerop (const_binop (BIT_AND_EXPR,
9105 TREE_OPERAND (arg0, 1),
9106 TREE_OPERAND (arg1, 1), 0)))
9108 code = BIT_IOR_EXPR;
9109 goto bit_ior;
9112 /* (X | Y) ^ X -> Y & ~ X*/
9113 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9114 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9116 tree t2 = TREE_OPERAND (arg0, 1);
9117 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9118 arg1);
9119 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9120 fold_convert (type, t1));
9121 return t1;
9124 /* (Y | X) ^ X -> Y & ~ X*/
9125 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9126 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9128 tree t2 = TREE_OPERAND (arg0, 0);
9129 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9130 arg1);
9131 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9132 fold_convert (type, t1));
9133 return t1;
9136 /* X ^ (X | Y) -> Y & ~ X*/
9137 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9138 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9140 tree t2 = TREE_OPERAND (arg1, 1);
9141 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9142 arg0);
9143 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9144 fold_convert (type, t1));
9145 return t1;
9148 /* X ^ (Y | X) -> Y & ~ X*/
9149 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9150 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9152 tree t2 = TREE_OPERAND (arg1, 0);
9153 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9154 arg0);
9155 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9156 fold_convert (type, t1));
9157 return t1;
9160 /* Convert ~X ^ ~Y to X ^ Y. */
9161 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9162 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9163 return fold_build2 (code, type,
9164 fold_convert (type, TREE_OPERAND (arg0, 0)),
9165 fold_convert (type, TREE_OPERAND (arg1, 0)));
9167 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9168 if (TREE_CODE (arg0) == BIT_AND_EXPR
9169 && integer_onep (TREE_OPERAND (arg0, 1))
9170 && integer_onep (arg1))
9171 return fold_build2 (EQ_EXPR, type, arg0,
9172 build_int_cst (TREE_TYPE (arg0), 0));
9174 /* Fold (X & Y) ^ Y as ~X & Y. */
9175 if (TREE_CODE (arg0) == BIT_AND_EXPR
9176 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9178 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9179 return fold_build2 (BIT_AND_EXPR, type,
9180 fold_build1 (BIT_NOT_EXPR, type, tem),
9181 fold_convert (type, arg1));
9183 /* Fold (X & Y) ^ X as ~Y & X. */
9184 if (TREE_CODE (arg0) == BIT_AND_EXPR
9185 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9186 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9188 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9189 return fold_build2 (BIT_AND_EXPR, type,
9190 fold_build1 (BIT_NOT_EXPR, type, tem),
9191 fold_convert (type, arg1));
9193 /* Fold X ^ (X & Y) as X & ~Y. */
9194 if (TREE_CODE (arg1) == BIT_AND_EXPR
9195 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9197 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9198 return fold_build2 (BIT_AND_EXPR, type,
9199 fold_convert (type, arg0),
9200 fold_build1 (BIT_NOT_EXPR, type, tem));
9202 /* Fold X ^ (Y & X) as ~Y & X. */
9203 if (TREE_CODE (arg1) == BIT_AND_EXPR
9204 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9205 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9207 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9208 return fold_build2 (BIT_AND_EXPR, type,
9209 fold_build1 (BIT_NOT_EXPR, type, tem),
9210 fold_convert (type, arg0));
9213 /* See if this can be simplified into a rotate first. If that
9214 is unsuccessful continue in the association code. */
9215 goto bit_rotate;
9217 case BIT_AND_EXPR:
9218 if (integer_all_onesp (arg1))
9219 return non_lvalue (fold_convert (type, arg0));
9220 if (integer_zerop (arg1))
9221 return omit_one_operand (type, arg1, arg0);
9222 if (operand_equal_p (arg0, arg1, 0))
9223 return non_lvalue (fold_convert (type, arg0));
9225 /* ~X & X is always zero. */
9226 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9227 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9228 return omit_one_operand (type, integer_zero_node, arg1);
9230 /* X & ~X is always zero. */
9231 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9232 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9233 return omit_one_operand (type, integer_zero_node, arg0);
9235 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9236 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9237 && TREE_CODE (arg1) == INTEGER_CST
9238 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9239 return fold_build2 (BIT_IOR_EXPR, type,
9240 fold_build2 (BIT_AND_EXPR, type,
9241 TREE_OPERAND (arg0, 0), arg1),
9242 fold_build2 (BIT_AND_EXPR, type,
9243 TREE_OPERAND (arg0, 1), arg1));
9245 /* (X | Y) & Y is (X, Y). */
9246 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9247 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9248 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9249 /* (X | Y) & X is (Y, X). */
9250 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9251 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9252 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9253 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9254 /* X & (X | Y) is (Y, X). */
9255 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9256 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9257 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9258 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9259 /* X & (Y | X) is (Y, X). */
9260 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9261 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9262 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9263 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9265 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9266 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9267 && integer_onep (TREE_OPERAND (arg0, 1))
9268 && integer_onep (arg1))
9270 tem = TREE_OPERAND (arg0, 0);
9271 return fold_build2 (EQ_EXPR, type,
9272 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9273 build_int_cst (TREE_TYPE (tem), 1)),
9274 build_int_cst (TREE_TYPE (tem), 0));
9276 /* Fold ~X & 1 as (X & 1) == 0. */
9277 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9278 && integer_onep (arg1))
9280 tem = TREE_OPERAND (arg0, 0);
9281 return fold_build2 (EQ_EXPR, type,
9282 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9283 build_int_cst (TREE_TYPE (tem), 1)),
9284 build_int_cst (TREE_TYPE (tem), 0));
9287 /* Fold (X ^ Y) & Y as ~X & Y. */
9288 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9289 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9291 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9292 return fold_build2 (BIT_AND_EXPR, type,
9293 fold_build1 (BIT_NOT_EXPR, type, tem),
9294 fold_convert (type, arg1));
9296 /* Fold (X ^ Y) & X as ~Y & X. */
9297 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9298 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9299 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9301 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9302 return fold_build2 (BIT_AND_EXPR, type,
9303 fold_build1 (BIT_NOT_EXPR, type, tem),
9304 fold_convert (type, arg1));
9306 /* Fold X & (X ^ Y) as X & ~Y. */
9307 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9308 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9310 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9311 return fold_build2 (BIT_AND_EXPR, type,
9312 fold_convert (type, arg0),
9313 fold_build1 (BIT_NOT_EXPR, type, tem));
9315 /* Fold X & (Y ^ X) as ~Y & X. */
9316 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9317 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9318 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9320 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9321 return fold_build2 (BIT_AND_EXPR, type,
9322 fold_build1 (BIT_NOT_EXPR, type, tem),
9323 fold_convert (type, arg0));
9326 t1 = distribute_bit_expr (code, type, arg0, arg1);
9327 if (t1 != NULL_TREE)
9328 return t1;
9329 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9330 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9331 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9333 unsigned int prec
9334 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9336 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9337 && (~TREE_INT_CST_LOW (arg1)
9338 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9339 return fold_convert (type, TREE_OPERAND (arg0, 0));
9342 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9344 This results in more efficient code for machines without a NOR
9345 instruction. Combine will canonicalize to the first form
9346 which will allow use of NOR instructions provided by the
9347 backend if they exist. */
9348 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9349 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9351 return fold_build1 (BIT_NOT_EXPR, type,
9352 build2 (BIT_IOR_EXPR, type,
9353 TREE_OPERAND (arg0, 0),
9354 TREE_OPERAND (arg1, 0)));
9357 goto associate;
9359 case RDIV_EXPR:
9360 /* Don't touch a floating-point divide by zero unless the mode
9361 of the constant can represent infinity. */
9362 if (TREE_CODE (arg1) == REAL_CST
9363 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9364 && real_zerop (arg1))
9365 return NULL_TREE;
9367 /* Optimize A / A to 1.0 if we don't care about
9368 NaNs or Infinities. Skip the transformation
9369 for non-real operands. */
9370 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9371 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9372 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9373 && operand_equal_p (arg0, arg1, 0))
9375 tree r = build_real (TREE_TYPE (arg0), dconst1);
9377 return omit_two_operands (type, r, arg0, arg1);
9380 /* The complex version of the above A / A optimization. */
9381 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9382 && operand_equal_p (arg0, arg1, 0))
9384 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9385 if (! HONOR_NANS (TYPE_MODE (elem_type))
9386 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9388 tree r = build_real (elem_type, dconst1);
9389 /* omit_two_operands will call fold_convert for us. */
9390 return omit_two_operands (type, r, arg0, arg1);
9394 /* (-A) / (-B) -> A / B */
9395 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9396 return fold_build2 (RDIV_EXPR, type,
9397 TREE_OPERAND (arg0, 0),
9398 negate_expr (arg1));
9399 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9400 return fold_build2 (RDIV_EXPR, type,
9401 negate_expr (arg0),
9402 TREE_OPERAND (arg1, 0));
9404 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9405 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9406 && real_onep (arg1))
9407 return non_lvalue (fold_convert (type, arg0));
9409 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9410 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9411 && real_minus_onep (arg1))
9412 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9414 /* If ARG1 is a constant, we can convert this to a multiply by the
9415 reciprocal. This does not have the same rounding properties,
9416 so only do this if -funsafe-math-optimizations. We can actually
9417 always safely do it if ARG1 is a power of two, but it's hard to
9418 tell if it is or not in a portable manner. */
9419 if (TREE_CODE (arg1) == REAL_CST)
9421 if (flag_unsafe_math_optimizations
9422 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9423 arg1, 0)))
9424 return fold_build2 (MULT_EXPR, type, arg0, tem);
9425 /* Find the reciprocal if optimizing and the result is exact. */
9426 if (optimize)
9428 REAL_VALUE_TYPE r;
9429 r = TREE_REAL_CST (arg1);
9430 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9432 tem = build_real (type, r);
9433 return fold_build2 (MULT_EXPR, type,
9434 fold_convert (type, arg0), tem);
9438 /* Convert A/B/C to A/(B*C). */
9439 if (flag_unsafe_math_optimizations
9440 && TREE_CODE (arg0) == RDIV_EXPR)
9441 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9442 fold_build2 (MULT_EXPR, type,
9443 TREE_OPERAND (arg0, 1), arg1));
9445 /* Convert A/(B/C) to (A/B)*C. */
9446 if (flag_unsafe_math_optimizations
9447 && TREE_CODE (arg1) == RDIV_EXPR)
9448 return fold_build2 (MULT_EXPR, type,
9449 fold_build2 (RDIV_EXPR, type, arg0,
9450 TREE_OPERAND (arg1, 0)),
9451 TREE_OPERAND (arg1, 1));
9453 /* Convert C1/(X*C2) into (C1/C2)/X. */
9454 if (flag_unsafe_math_optimizations
9455 && TREE_CODE (arg1) == MULT_EXPR
9456 && TREE_CODE (arg0) == REAL_CST
9457 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9459 tree tem = const_binop (RDIV_EXPR, arg0,
9460 TREE_OPERAND (arg1, 1), 0);
9461 if (tem)
9462 return fold_build2 (RDIV_EXPR, type, tem,
9463 TREE_OPERAND (arg1, 0));
9466 if (flag_unsafe_math_optimizations)
9468 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9469 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9471 /* Optimize sin(x)/cos(x) as tan(x). */
9472 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9473 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9474 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9475 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9476 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9478 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9480 if (tanfn != NULL_TREE)
9481 return build_function_call_expr (tanfn,
9482 TREE_OPERAND (arg0, 1));
9485 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9486 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9487 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9488 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9489 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9490 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9492 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9494 if (tanfn != NULL_TREE)
9496 tree tmp = TREE_OPERAND (arg0, 1);
9497 tmp = build_function_call_expr (tanfn, tmp);
9498 return fold_build2 (RDIV_EXPR, type,
9499 build_real (type, dconst1), tmp);
9503 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9504 NaNs or Infinities. */
9505 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9506 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9507 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9509 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9510 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9512 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9513 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9514 && operand_equal_p (arg00, arg01, 0))
9516 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9518 if (cosfn != NULL_TREE)
9519 return build_function_call_expr (cosfn,
9520 TREE_OPERAND (arg0, 1));
9524 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9525 NaNs or Infinities. */
9526 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9527 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9528 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9530 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9531 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9533 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9534 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9535 && operand_equal_p (arg00, arg01, 0))
9537 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9539 if (cosfn != NULL_TREE)
9541 tree tmp = TREE_OPERAND (arg0, 1);
9542 tmp = build_function_call_expr (cosfn, tmp);
9543 return fold_build2 (RDIV_EXPR, type,
9544 build_real (type, dconst1),
9545 tmp);
9550 /* Optimize pow(x,c)/x as pow(x,c-1). */
9551 if (fcode0 == BUILT_IN_POW
9552 || fcode0 == BUILT_IN_POWF
9553 || fcode0 == BUILT_IN_POWL)
9555 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9556 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9557 if (TREE_CODE (arg01) == REAL_CST
9558 && ! TREE_CONSTANT_OVERFLOW (arg01)
9559 && operand_equal_p (arg1, arg00, 0))
9561 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9562 REAL_VALUE_TYPE c;
9563 tree arg, arglist;
9565 c = TREE_REAL_CST (arg01);
9566 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9567 arg = build_real (type, c);
9568 arglist = build_tree_list (NULL_TREE, arg);
9569 arglist = tree_cons (NULL_TREE, arg1, arglist);
9570 return build_function_call_expr (powfn, arglist);
9574 /* Optimize x/expN(y) into x*expN(-y). */
9575 if (BUILTIN_EXPONENT_P (fcode1))
9577 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9578 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
9579 tree arglist = build_tree_list (NULL_TREE,
9580 fold_convert (type, arg));
9581 arg1 = build_function_call_expr (expfn, arglist);
9582 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9585 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9586 if (fcode1 == BUILT_IN_POW
9587 || fcode1 == BUILT_IN_POWF
9588 || fcode1 == BUILT_IN_POWL)
9590 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9591 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9592 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
9593 tree neg11 = fold_convert (type, negate_expr (arg11));
9594 tree arglist = tree_cons(NULL_TREE, arg10,
9595 build_tree_list (NULL_TREE, neg11));
9596 arg1 = build_function_call_expr (powfn, arglist);
9597 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9600 return NULL_TREE;
9602 case TRUNC_DIV_EXPR:
9603 case ROUND_DIV_EXPR:
9604 case FLOOR_DIV_EXPR:
9605 case CEIL_DIV_EXPR:
9606 case EXACT_DIV_EXPR:
9607 if (integer_onep (arg1))
9608 return non_lvalue (fold_convert (type, arg0));
9609 if (integer_zerop (arg1))
9610 return NULL_TREE;
9611 /* X / -1 is -X. */
9612 if (!TYPE_UNSIGNED (type)
9613 && TREE_CODE (arg1) == INTEGER_CST
9614 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9615 && TREE_INT_CST_HIGH (arg1) == -1)
9616 return fold_convert (type, negate_expr (arg0));
9618 /* Convert -A / -B to A / B when the type is signed and overflow is
9619 undefined. */
9620 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9621 && TREE_CODE (arg0) == NEGATE_EXPR
9622 && negate_expr_p (arg1))
9623 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9624 negate_expr (arg1));
9625 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9626 && TREE_CODE (arg1) == NEGATE_EXPR
9627 && negate_expr_p (arg0))
9628 return fold_build2 (code, type, negate_expr (arg0),
9629 TREE_OPERAND (arg1, 0));
9631 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
9632 operation, EXACT_DIV_EXPR.
9634 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
9635 At one time others generated faster code, it's not clear if they do
9636 after the last round to changes to the DIV code in expmed.c. */
9637 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
9638 && multiple_of_p (type, arg0, arg1))
9639 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
9641 if (TREE_CODE (arg1) == INTEGER_CST
9642 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9643 return fold_convert (type, tem);
9645 return NULL_TREE;
9647 case CEIL_MOD_EXPR:
9648 case FLOOR_MOD_EXPR:
9649 case ROUND_MOD_EXPR:
9650 case TRUNC_MOD_EXPR:
9651 /* X % 1 is always zero, but be sure to preserve any side
9652 effects in X. */
9653 if (integer_onep (arg1))
9654 return omit_one_operand (type, integer_zero_node, arg0);
9656 /* X % 0, return X % 0 unchanged so that we can get the
9657 proper warnings and errors. */
9658 if (integer_zerop (arg1))
9659 return NULL_TREE;
9661 /* 0 % X is always zero, but be sure to preserve any side
9662 effects in X. Place this after checking for X == 0. */
9663 if (integer_zerop (arg0))
9664 return omit_one_operand (type, integer_zero_node, arg1);
9666 /* X % -1 is zero. */
9667 if (!TYPE_UNSIGNED (type)
9668 && TREE_CODE (arg1) == INTEGER_CST
9669 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9670 && TREE_INT_CST_HIGH (arg1) == -1)
9671 return omit_one_operand (type, integer_zero_node, arg0);
9673 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
9674 i.e. "X % C" into "X & C2", if X and C are positive. */
9675 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
9676 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
9677 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
9679 unsigned HOST_WIDE_INT high, low;
9680 tree mask;
9681 int l;
9683 l = tree_log2 (arg1);
9684 if (l >= HOST_BITS_PER_WIDE_INT)
9686 high = ((unsigned HOST_WIDE_INT) 1
9687 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
9688 low = -1;
9690 else
9692 high = 0;
9693 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
9696 mask = build_int_cst_wide (type, low, high);
9697 return fold_build2 (BIT_AND_EXPR, type,
9698 fold_convert (type, arg0), mask);
9701 /* X % -C is the same as X % C. */
9702 if (code == TRUNC_MOD_EXPR
9703 && !TYPE_UNSIGNED (type)
9704 && TREE_CODE (arg1) == INTEGER_CST
9705 && !TREE_CONSTANT_OVERFLOW (arg1)
9706 && TREE_INT_CST_HIGH (arg1) < 0
9707 && !flag_trapv
9708 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
9709 && !sign_bit_p (arg1, arg1))
9710 return fold_build2 (code, type, fold_convert (type, arg0),
9711 fold_convert (type, negate_expr (arg1)));
9713 /* X % -Y is the same as X % Y. */
9714 if (code == TRUNC_MOD_EXPR
9715 && !TYPE_UNSIGNED (type)
9716 && TREE_CODE (arg1) == NEGATE_EXPR
9717 && !flag_trapv)
9718 return fold_build2 (code, type, fold_convert (type, arg0),
9719 fold_convert (type, TREE_OPERAND (arg1, 0)));
9721 if (TREE_CODE (arg1) == INTEGER_CST
9722 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9723 return fold_convert (type, tem);
9725 return NULL_TREE;
9727 case LROTATE_EXPR:
9728 case RROTATE_EXPR:
9729 if (integer_all_onesp (arg0))
9730 return omit_one_operand (type, arg0, arg1);
9731 goto shift;
9733 case RSHIFT_EXPR:
9734 /* Optimize -1 >> x for arithmetic right shifts. */
9735 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
9736 return omit_one_operand (type, arg0, arg1);
9737 /* ... fall through ... */
9739 case LSHIFT_EXPR:
9740 shift:
9741 if (integer_zerop (arg1))
9742 return non_lvalue (fold_convert (type, arg0));
9743 if (integer_zerop (arg0))
9744 return omit_one_operand (type, arg0, arg1);
9746 /* Since negative shift count is not well-defined,
9747 don't try to compute it in the compiler. */
9748 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
9749 return NULL_TREE;
9751 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
9752 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
9753 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9754 && host_integerp (TREE_OPERAND (arg0, 1), false)
9755 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9757 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
9758 + TREE_INT_CST_LOW (arg1));
9760 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
9761 being well defined. */
9762 if (low >= TYPE_PRECISION (type))
9764 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
9765 low = low % TYPE_PRECISION (type);
9766 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
9767 return build_int_cst (type, 0);
9768 else
9769 low = TYPE_PRECISION (type) - 1;
9772 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9773 build_int_cst (type, low));
9776 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
9777 into x & ((unsigned)-1 >> c) for unsigned types. */
9778 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
9779 || (TYPE_UNSIGNED (type)
9780 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
9781 && host_integerp (arg1, false)
9782 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9783 && host_integerp (TREE_OPERAND (arg0, 1), false)
9784 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9786 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9787 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
9788 tree lshift;
9789 tree arg00;
9791 if (low0 == low1)
9793 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9795 lshift = build_int_cst (type, -1);
9796 lshift = int_const_binop (code, lshift, arg1, 0);
9798 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
9802 /* Rewrite an LROTATE_EXPR by a constant into an
9803 RROTATE_EXPR by a new constant. */
9804 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
9806 tree tem = build_int_cst (NULL_TREE,
9807 GET_MODE_BITSIZE (TYPE_MODE (type)));
9808 tem = fold_convert (TREE_TYPE (arg1), tem);
9809 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
9810 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
9813 /* If we have a rotate of a bit operation with the rotate count and
9814 the second operand of the bit operation both constant,
9815 permute the two operations. */
9816 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9817 && (TREE_CODE (arg0) == BIT_AND_EXPR
9818 || TREE_CODE (arg0) == BIT_IOR_EXPR
9819 || TREE_CODE (arg0) == BIT_XOR_EXPR)
9820 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9821 return fold_build2 (TREE_CODE (arg0), type,
9822 fold_build2 (code, type,
9823 TREE_OPERAND (arg0, 0), arg1),
9824 fold_build2 (code, type,
9825 TREE_OPERAND (arg0, 1), arg1));
9827 /* Two consecutive rotates adding up to the width of the mode can
9828 be ignored. */
9829 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9830 && TREE_CODE (arg0) == RROTATE_EXPR
9831 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9832 && TREE_INT_CST_HIGH (arg1) == 0
9833 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
9834 && ((TREE_INT_CST_LOW (arg1)
9835 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
9836 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
9837 return TREE_OPERAND (arg0, 0);
9839 return NULL_TREE;
9841 case MIN_EXPR:
9842 if (operand_equal_p (arg0, arg1, 0))
9843 return omit_one_operand (type, arg0, arg1);
9844 if (INTEGRAL_TYPE_P (type)
9845 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9846 return omit_one_operand (type, arg1, arg0);
9847 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
9848 if (tem)
9849 return tem;
9850 goto associate;
9852 case MAX_EXPR:
9853 if (operand_equal_p (arg0, arg1, 0))
9854 return omit_one_operand (type, arg0, arg1);
9855 if (INTEGRAL_TYPE_P (type)
9856 && TYPE_MAX_VALUE (type)
9857 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
9858 return omit_one_operand (type, arg1, arg0);
9859 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
9860 if (tem)
9861 return tem;
9862 goto associate;
9864 case TRUTH_ANDIF_EXPR:
9865 /* Note that the operands of this must be ints
9866 and their values must be 0 or 1.
9867 ("true" is a fixed value perhaps depending on the language.) */
9868 /* If first arg is constant zero, return it. */
9869 if (integer_zerop (arg0))
9870 return fold_convert (type, arg0);
9871 case TRUTH_AND_EXPR:
9872 /* If either arg is constant true, drop it. */
9873 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
9874 return non_lvalue (fold_convert (type, arg1));
9875 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
9876 /* Preserve sequence points. */
9877 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
9878 return non_lvalue (fold_convert (type, arg0));
9879 /* If second arg is constant zero, result is zero, but first arg
9880 must be evaluated. */
9881 if (integer_zerop (arg1))
9882 return omit_one_operand (type, arg1, arg0);
9883 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
9884 case will be handled here. */
9885 if (integer_zerop (arg0))
9886 return omit_one_operand (type, arg0, arg1);
9888 /* !X && X is always false. */
9889 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9890 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9891 return omit_one_operand (type, integer_zero_node, arg1);
9892 /* X && !X is always false. */
9893 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
9894 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9895 return omit_one_operand (type, integer_zero_node, arg0);
9897 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
9898 means A >= Y && A != MAX, but in this case we know that
9899 A < X <= MAX. */
9901 if (!TREE_SIDE_EFFECTS (arg0)
9902 && !TREE_SIDE_EFFECTS (arg1))
9904 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
9905 if (tem && !operand_equal_p (tem, arg0, 0))
9906 return fold_build2 (code, type, tem, arg1);
9908 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
9909 if (tem && !operand_equal_p (tem, arg1, 0))
9910 return fold_build2 (code, type, arg0, tem);
9913 truth_andor:
9914 /* We only do these simplifications if we are optimizing. */
9915 if (!optimize)
9916 return NULL_TREE;
9918 /* Check for things like (A || B) && (A || C). We can convert this
9919 to A || (B && C). Note that either operator can be any of the four
9920 truth and/or operations and the transformation will still be
9921 valid. Also note that we only care about order for the
9922 ANDIF and ORIF operators. If B contains side effects, this
9923 might change the truth-value of A. */
9924 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9925 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9926 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9927 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9928 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9929 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9931 tree a00 = TREE_OPERAND (arg0, 0);
9932 tree a01 = TREE_OPERAND (arg0, 1);
9933 tree a10 = TREE_OPERAND (arg1, 0);
9934 tree a11 = TREE_OPERAND (arg1, 1);
9935 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9936 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9937 && (code == TRUTH_AND_EXPR
9938 || code == TRUTH_OR_EXPR));
9940 if (operand_equal_p (a00, a10, 0))
9941 return fold_build2 (TREE_CODE (arg0), type, a00,
9942 fold_build2 (code, type, a01, a11));
9943 else if (commutative && operand_equal_p (a00, a11, 0))
9944 return fold_build2 (TREE_CODE (arg0), type, a00,
9945 fold_build2 (code, type, a01, a10));
9946 else if (commutative && operand_equal_p (a01, a10, 0))
9947 return fold_build2 (TREE_CODE (arg0), type, a01,
9948 fold_build2 (code, type, a00, a11));
9950 /* This case if tricky because we must either have commutative
9951 operators or else A10 must not have side-effects. */
9953 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9954 && operand_equal_p (a01, a11, 0))
9955 return fold_build2 (TREE_CODE (arg0), type,
9956 fold_build2 (code, type, a00, a10),
9957 a01);
9960 /* See if we can build a range comparison. */
9961 if (0 != (tem = fold_range_test (code, type, op0, op1)))
9962 return tem;
9964 /* Check for the possibility of merging component references. If our
9965 lhs is another similar operation, try to merge its rhs with our
9966 rhs. Then try to merge our lhs and rhs. */
9967 if (TREE_CODE (arg0) == code
9968 && 0 != (tem = fold_truthop (code, type,
9969 TREE_OPERAND (arg0, 1), arg1)))
9970 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9972 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
9973 return tem;
9975 return NULL_TREE;
9977 case TRUTH_ORIF_EXPR:
9978 /* Note that the operands of this must be ints
9979 and their values must be 0 or true.
9980 ("true" is a fixed value perhaps depending on the language.) */
9981 /* If first arg is constant true, return it. */
9982 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
9983 return fold_convert (type, arg0);
9984 case TRUTH_OR_EXPR:
9985 /* If either arg is constant zero, drop it. */
9986 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
9987 return non_lvalue (fold_convert (type, arg1));
9988 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
9989 /* Preserve sequence points. */
9990 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
9991 return non_lvalue (fold_convert (type, arg0));
9992 /* If second arg is constant true, result is true, but we must
9993 evaluate first arg. */
9994 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
9995 return omit_one_operand (type, arg1, arg0);
9996 /* Likewise for first arg, but note this only occurs here for
9997 TRUTH_OR_EXPR. */
9998 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
9999 return omit_one_operand (type, arg0, arg1);
10001 /* !X || X is always true. */
10002 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10003 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10004 return omit_one_operand (type, integer_one_node, arg1);
10005 /* X || !X is always true. */
10006 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10007 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10008 return omit_one_operand (type, integer_one_node, arg0);
10010 goto truth_andor;
10012 case TRUTH_XOR_EXPR:
10013 /* If the second arg is constant zero, drop it. */
10014 if (integer_zerop (arg1))
10015 return non_lvalue (fold_convert (type, arg0));
10016 /* If the second arg is constant true, this is a logical inversion. */
10017 if (integer_onep (arg1))
10019 /* Only call invert_truthvalue if operand is a truth value. */
10020 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10021 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10022 else
10023 tem = invert_truthvalue (arg0);
10024 return non_lvalue (fold_convert (type, tem));
10026 /* Identical arguments cancel to zero. */
10027 if (operand_equal_p (arg0, arg1, 0))
10028 return omit_one_operand (type, integer_zero_node, arg0);
10030 /* !X ^ X is always true. */
10031 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10032 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10033 return omit_one_operand (type, integer_one_node, arg1);
10035 /* X ^ !X is always true. */
10036 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10037 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10038 return omit_one_operand (type, integer_one_node, arg0);
10040 return NULL_TREE;
10042 case EQ_EXPR:
10043 case NE_EXPR:
10044 tem = fold_comparison (code, type, op0, op1);
10045 if (tem != NULL_TREE)
10046 return tem;
10048 /* bool_var != 0 becomes bool_var. */
10049 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10050 && code == NE_EXPR)
10051 return non_lvalue (fold_convert (type, arg0));
10053 /* bool_var == 1 becomes bool_var. */
10054 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10055 && code == EQ_EXPR)
10056 return non_lvalue (fold_convert (type, arg0));
10058 /* bool_var != 1 becomes !bool_var. */
10059 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10060 && code == NE_EXPR)
10061 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10063 /* bool_var == 0 becomes !bool_var. */
10064 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10065 && code == EQ_EXPR)
10066 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10068 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
10069 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10070 && TREE_CODE (arg1) == INTEGER_CST)
10071 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10072 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10073 arg1));
10075 /* If this is an equality comparison of the address of a non-weak
10076 object against zero, then we know the result. */
10077 if (TREE_CODE (arg0) == ADDR_EXPR
10078 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10079 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10080 && integer_zerop (arg1))
10081 return constant_boolean_node (code != EQ_EXPR, type);
10083 /* If this is an equality comparison of the address of two non-weak,
10084 unaliased symbols neither of which are extern (since we do not
10085 have access to attributes for externs), then we know the result. */
10086 if (TREE_CODE (arg0) == ADDR_EXPR
10087 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10088 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10089 && ! lookup_attribute ("alias",
10090 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10091 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10092 && TREE_CODE (arg1) == ADDR_EXPR
10093 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10094 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10095 && ! lookup_attribute ("alias",
10096 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10097 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10099 /* We know that we're looking at the address of two
10100 non-weak, unaliased, static _DECL nodes.
10102 It is both wasteful and incorrect to call operand_equal_p
10103 to compare the two ADDR_EXPR nodes. It is wasteful in that
10104 all we need to do is test pointer equality for the arguments
10105 to the two ADDR_EXPR nodes. It is incorrect to use
10106 operand_equal_p as that function is NOT equivalent to a
10107 C equality test. It can in fact return false for two
10108 objects which would test as equal using the C equality
10109 operator. */
10110 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10111 return constant_boolean_node (equal
10112 ? code == EQ_EXPR : code != EQ_EXPR,
10113 type);
10116 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10117 a MINUS_EXPR of a constant, we can convert it into a comparison with
10118 a revised constant as long as no overflow occurs. */
10119 if (TREE_CODE (arg1) == INTEGER_CST
10120 && (TREE_CODE (arg0) == PLUS_EXPR
10121 || TREE_CODE (arg0) == MINUS_EXPR)
10122 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10123 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10124 ? MINUS_EXPR : PLUS_EXPR,
10125 arg1, TREE_OPERAND (arg0, 1), 0))
10126 && ! TREE_CONSTANT_OVERFLOW (tem))
10127 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10129 /* Similarly for a NEGATE_EXPR. */
10130 if (TREE_CODE (arg0) == NEGATE_EXPR
10131 && TREE_CODE (arg1) == INTEGER_CST
10132 && 0 != (tem = negate_expr (arg1))
10133 && TREE_CODE (tem) == INTEGER_CST
10134 && ! TREE_CONSTANT_OVERFLOW (tem))
10135 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10137 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10138 for !=. Don't do this for ordered comparisons due to overflow. */
10139 if (TREE_CODE (arg0) == MINUS_EXPR
10140 && integer_zerop (arg1))
10141 return fold_build2 (code, type,
10142 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10144 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10145 if (TREE_CODE (arg0) == ABS_EXPR
10146 && (integer_zerop (arg1) || real_zerop (arg1)))
10147 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10149 /* If this is an EQ or NE comparison with zero and ARG0 is
10150 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10151 two operations, but the latter can be done in one less insn
10152 on machines that have only two-operand insns or on which a
10153 constant cannot be the first operand. */
10154 if (TREE_CODE (arg0) == BIT_AND_EXPR
10155 && integer_zerop (arg1))
10157 tree arg00 = TREE_OPERAND (arg0, 0);
10158 tree arg01 = TREE_OPERAND (arg0, 1);
10159 if (TREE_CODE (arg00) == LSHIFT_EXPR
10160 && integer_onep (TREE_OPERAND (arg00, 0)))
10161 return
10162 fold_build2 (code, type,
10163 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10164 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10165 arg01, TREE_OPERAND (arg00, 1)),
10166 fold_convert (TREE_TYPE (arg0),
10167 integer_one_node)),
10168 arg1);
10169 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10170 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10171 return
10172 fold_build2 (code, type,
10173 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10174 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10175 arg00, TREE_OPERAND (arg01, 1)),
10176 fold_convert (TREE_TYPE (arg0),
10177 integer_one_node)),
10178 arg1);
10181 /* If this is an NE or EQ comparison of zero against the result of a
10182 signed MOD operation whose second operand is a power of 2, make
10183 the MOD operation unsigned since it is simpler and equivalent. */
10184 if (integer_zerop (arg1)
10185 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10186 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10187 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10188 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10189 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10190 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10192 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10193 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10194 fold_convert (newtype,
10195 TREE_OPERAND (arg0, 0)),
10196 fold_convert (newtype,
10197 TREE_OPERAND (arg0, 1)));
10199 return fold_build2 (code, type, newmod,
10200 fold_convert (newtype, arg1));
10203 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10204 C1 is a valid shift constant, and C2 is a power of two, i.e.
10205 a single bit. */
10206 if (TREE_CODE (arg0) == BIT_AND_EXPR
10207 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10208 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10209 == INTEGER_CST
10210 && integer_pow2p (TREE_OPERAND (arg0, 1))
10211 && integer_zerop (arg1))
10213 tree itype = TREE_TYPE (arg0);
10214 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10215 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10217 /* Check for a valid shift count. */
10218 if (TREE_INT_CST_HIGH (arg001) == 0
10219 && TREE_INT_CST_LOW (arg001) < prec)
10221 tree arg01 = TREE_OPERAND (arg0, 1);
10222 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10223 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10224 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10225 can be rewritten as (X & (C2 << C1)) != 0. */
10226 if ((log2 + TREE_INT_CST_LOW (arg01)) < prec)
10228 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10229 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10230 return fold_build2 (code, type, tem, arg1);
10232 /* Otherwise, for signed (arithmetic) shifts,
10233 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10234 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10235 else if (!TYPE_UNSIGNED (itype))
10236 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10237 arg000, build_int_cst (itype, 0));
10238 /* Otherwise, of unsigned (logical) shifts,
10239 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10240 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10241 else
10242 return omit_one_operand (type,
10243 code == EQ_EXPR ? integer_one_node
10244 : integer_zero_node,
10245 arg000);
10249 /* If this is an NE comparison of zero with an AND of one, remove the
10250 comparison since the AND will give the correct value. */
10251 if (code == NE_EXPR
10252 && integer_zerop (arg1)
10253 && TREE_CODE (arg0) == BIT_AND_EXPR
10254 && integer_onep (TREE_OPERAND (arg0, 1)))
10255 return fold_convert (type, arg0);
10257 /* If we have (A & C) == C where C is a power of 2, convert this into
10258 (A & C) != 0. Similarly for NE_EXPR. */
10259 if (TREE_CODE (arg0) == BIT_AND_EXPR
10260 && integer_pow2p (TREE_OPERAND (arg0, 1))
10261 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10262 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10263 arg0, fold_convert (TREE_TYPE (arg0),
10264 integer_zero_node));
10266 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10267 bit, then fold the expression into A < 0 or A >= 0. */
10268 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10269 if (tem)
10270 return tem;
10272 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10273 Similarly for NE_EXPR. */
10274 if (TREE_CODE (arg0) == BIT_AND_EXPR
10275 && TREE_CODE (arg1) == INTEGER_CST
10276 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10278 tree notc = fold_build1 (BIT_NOT_EXPR,
10279 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10280 TREE_OPERAND (arg0, 1));
10281 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10282 arg1, notc);
10283 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10284 if (integer_nonzerop (dandnotc))
10285 return omit_one_operand (type, rslt, arg0);
10288 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10289 Similarly for NE_EXPR. */
10290 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10291 && TREE_CODE (arg1) == INTEGER_CST
10292 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10294 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10295 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10296 TREE_OPERAND (arg0, 1), notd);
10297 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10298 if (integer_nonzerop (candnotd))
10299 return omit_one_operand (type, rslt, arg0);
10302 /* If this is a comparison of a field, we may be able to simplify it. */
10303 if (((TREE_CODE (arg0) == COMPONENT_REF
10304 && lang_hooks.can_use_bit_fields_p ())
10305 || TREE_CODE (arg0) == BIT_FIELD_REF)
10306 /* Handle the constant case even without -O
10307 to make sure the warnings are given. */
10308 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10310 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10311 if (t1)
10312 return t1;
10315 /* Optimize comparisons of strlen vs zero to a compare of the
10316 first character of the string vs zero. To wit,
10317 strlen(ptr) == 0 => *ptr == 0
10318 strlen(ptr) != 0 => *ptr != 0
10319 Other cases should reduce to one of these two (or a constant)
10320 due to the return value of strlen being unsigned. */
10321 if (TREE_CODE (arg0) == CALL_EXPR
10322 && integer_zerop (arg1))
10324 tree fndecl = get_callee_fndecl (arg0);
10325 tree arglist;
10327 if (fndecl
10328 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10329 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10330 && (arglist = TREE_OPERAND (arg0, 1))
10331 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10332 && ! TREE_CHAIN (arglist))
10334 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10335 return fold_build2 (code, type, iref,
10336 build_int_cst (TREE_TYPE (iref), 0));
10340 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10341 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10342 if (TREE_CODE (arg0) == RSHIFT_EXPR
10343 && integer_zerop (arg1)
10344 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10346 tree arg00 = TREE_OPERAND (arg0, 0);
10347 tree arg01 = TREE_OPERAND (arg0, 1);
10348 tree itype = TREE_TYPE (arg00);
10349 if (TREE_INT_CST_HIGH (arg01) == 0
10350 && TREE_INT_CST_LOW (arg01)
10351 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10353 if (TYPE_UNSIGNED (itype))
10355 itype = lang_hooks.types.signed_type (itype);
10356 arg00 = fold_convert (itype, arg00);
10358 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10359 type, arg00, build_int_cst (itype, 0));
10363 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10364 if (integer_zerop (arg1)
10365 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10366 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10367 TREE_OPERAND (arg0, 1));
10369 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10370 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10371 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10372 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10373 build_int_cst (TREE_TYPE (arg1), 0));
10374 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10375 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10376 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10377 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10378 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10379 build_int_cst (TREE_TYPE (arg1), 0));
10381 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10382 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10383 && TREE_CODE (arg1) == INTEGER_CST
10384 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10385 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10386 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10387 TREE_OPERAND (arg0, 1), arg1));
10389 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10390 (X & C) == 0 when C is a single bit. */
10391 if (TREE_CODE (arg0) == BIT_AND_EXPR
10392 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10393 && integer_zerop (arg1)
10394 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10396 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10397 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10398 TREE_OPERAND (arg0, 1));
10399 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10400 type, tem, arg1);
10403 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10404 constant C is a power of two, i.e. a single bit. */
10405 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10406 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10407 && integer_zerop (arg1)
10408 && integer_pow2p (TREE_OPERAND (arg0, 1))
10409 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10410 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10412 tree arg00 = TREE_OPERAND (arg0, 0);
10413 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10414 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10417 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10418 when is C is a power of two, i.e. a single bit. */
10419 if (TREE_CODE (arg0) == BIT_AND_EXPR
10420 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10421 && integer_zerop (arg1)
10422 && integer_pow2p (TREE_OPERAND (arg0, 1))
10423 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10424 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10426 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10427 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10428 arg000, TREE_OPERAND (arg0, 1));
10429 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10430 tem, build_int_cst (TREE_TYPE (tem), 0));
10433 /* If this is a comparison of two exprs that look like an
10434 ARRAY_REF of the same object, then we can fold this to a
10435 comparison of the two offsets. This is only safe for
10436 EQ_EXPR and NE_EXPR because of overflow issues. */
10438 tree base0, offset0, base1, offset1;
10440 if (extract_array_ref (arg0, &base0, &offset0)
10441 && extract_array_ref (arg1, &base1, &offset1)
10442 && operand_equal_p (base0, base1, 0))
10444 /* Handle no offsets on both sides specially. */
10445 if (offset0 == NULL_TREE && offset1 == NULL_TREE)
10446 return fold_build2 (code, type, integer_zero_node,
10447 integer_zero_node);
10449 if (!offset0 || !offset1
10450 || TREE_TYPE (offset0) == TREE_TYPE (offset1))
10452 if (offset0 == NULL_TREE)
10453 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
10454 if (offset1 == NULL_TREE)
10455 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
10456 return fold_build2 (code, type, offset0, offset1);
10461 if (integer_zerop (arg1)
10462 && tree_expr_nonzero_p (arg0))
10464 tree res = constant_boolean_node (code==NE_EXPR, type);
10465 return omit_one_operand (type, res, arg0);
10467 return NULL_TREE;
10469 case LT_EXPR:
10470 case GT_EXPR:
10471 case LE_EXPR:
10472 case GE_EXPR:
10473 tem = fold_comparison (code, type, op0, op1);
10474 if (tem != NULL_TREE)
10475 return tem;
10477 /* Transform comparisons of the form X +- C CMP X. */
10478 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10479 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10480 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10481 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10482 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10483 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
10484 && !(flag_wrapv || flag_trapv))))
10486 tree arg01 = TREE_OPERAND (arg0, 1);
10487 enum tree_code code0 = TREE_CODE (arg0);
10488 int is_positive;
10490 if (TREE_CODE (arg01) == REAL_CST)
10491 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10492 else
10493 is_positive = tree_int_cst_sgn (arg01);
10495 /* (X - c) > X becomes false. */
10496 if (code == GT_EXPR
10497 && ((code0 == MINUS_EXPR && is_positive >= 0)
10498 || (code0 == PLUS_EXPR && is_positive <= 0)))
10499 return constant_boolean_node (0, type);
10501 /* Likewise (X + c) < X becomes false. */
10502 if (code == LT_EXPR
10503 && ((code0 == PLUS_EXPR && is_positive >= 0)
10504 || (code0 == MINUS_EXPR && is_positive <= 0)))
10505 return constant_boolean_node (0, type);
10507 /* Convert (X - c) <= X to true. */
10508 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10509 && code == LE_EXPR
10510 && ((code0 == MINUS_EXPR && is_positive >= 0)
10511 || (code0 == PLUS_EXPR && is_positive <= 0)))
10512 return constant_boolean_node (1, type);
10514 /* Convert (X + c) >= X to true. */
10515 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10516 && code == GE_EXPR
10517 && ((code0 == PLUS_EXPR && is_positive >= 0)
10518 || (code0 == MINUS_EXPR && is_positive <= 0)))
10519 return constant_boolean_node (1, type);
10521 if (TREE_CODE (arg01) == INTEGER_CST)
10523 /* Convert X + c > X and X - c < X to true for integers. */
10524 if (code == GT_EXPR
10525 && ((code0 == PLUS_EXPR && is_positive > 0)
10526 || (code0 == MINUS_EXPR && is_positive < 0)))
10527 return constant_boolean_node (1, type);
10529 if (code == LT_EXPR
10530 && ((code0 == MINUS_EXPR && is_positive > 0)
10531 || (code0 == PLUS_EXPR && is_positive < 0)))
10532 return constant_boolean_node (1, type);
10534 /* Convert X + c <= X and X - c >= X to false for integers. */
10535 if (code == LE_EXPR
10536 && ((code0 == PLUS_EXPR && is_positive > 0)
10537 || (code0 == MINUS_EXPR && is_positive < 0)))
10538 return constant_boolean_node (0, type);
10540 if (code == GE_EXPR
10541 && ((code0 == MINUS_EXPR && is_positive > 0)
10542 || (code0 == PLUS_EXPR && is_positive < 0)))
10543 return constant_boolean_node (0, type);
10547 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10548 This transformation affects the cases which are handled in later
10549 optimizations involving comparisons with non-negative constants. */
10550 if (TREE_CODE (arg1) == INTEGER_CST
10551 && TREE_CODE (arg0) != INTEGER_CST
10552 && tree_int_cst_sgn (arg1) > 0)
10554 if (code == GE_EXPR)
10556 arg1 = const_binop (MINUS_EXPR, arg1,
10557 build_int_cst (TREE_TYPE (arg1), 1), 0);
10558 return fold_build2 (GT_EXPR, type, arg0,
10559 fold_convert (TREE_TYPE (arg0), arg1));
10561 if (code == LT_EXPR)
10563 arg1 = const_binop (MINUS_EXPR, arg1,
10564 build_int_cst (TREE_TYPE (arg1), 1), 0);
10565 return fold_build2 (LE_EXPR, type, arg0,
10566 fold_convert (TREE_TYPE (arg0), arg1));
10570 /* Comparisons with the highest or lowest possible integer of
10571 the specified size will have known values. */
10573 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
10575 if (TREE_CODE (arg1) == INTEGER_CST
10576 && ! TREE_CONSTANT_OVERFLOW (arg1)
10577 && width <= 2 * HOST_BITS_PER_WIDE_INT
10578 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10579 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10581 HOST_WIDE_INT signed_max_hi;
10582 unsigned HOST_WIDE_INT signed_max_lo;
10583 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
10585 if (width <= HOST_BITS_PER_WIDE_INT)
10587 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10588 - 1;
10589 signed_max_hi = 0;
10590 max_hi = 0;
10592 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10594 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10595 min_lo = 0;
10596 min_hi = 0;
10598 else
10600 max_lo = signed_max_lo;
10601 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10602 min_hi = -1;
10605 else
10607 width -= HOST_BITS_PER_WIDE_INT;
10608 signed_max_lo = -1;
10609 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10610 - 1;
10611 max_lo = -1;
10612 min_lo = 0;
10614 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10616 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10617 min_hi = 0;
10619 else
10621 max_hi = signed_max_hi;
10622 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10626 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
10627 && TREE_INT_CST_LOW (arg1) == max_lo)
10628 switch (code)
10630 case GT_EXPR:
10631 return omit_one_operand (type, integer_zero_node, arg0);
10633 case GE_EXPR:
10634 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10636 case LE_EXPR:
10637 return omit_one_operand (type, integer_one_node, arg0);
10639 case LT_EXPR:
10640 return fold_build2 (NE_EXPR, type, arg0, arg1);
10642 /* The GE_EXPR and LT_EXPR cases above are not normally
10643 reached because of previous transformations. */
10645 default:
10646 break;
10648 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10649 == max_hi
10650 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
10651 switch (code)
10653 case GT_EXPR:
10654 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10655 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10656 case LE_EXPR:
10657 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10658 return fold_build2 (NE_EXPR, type, arg0, arg1);
10659 default:
10660 break;
10662 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10663 == min_hi
10664 && TREE_INT_CST_LOW (arg1) == min_lo)
10665 switch (code)
10667 case LT_EXPR:
10668 return omit_one_operand (type, integer_zero_node, arg0);
10670 case LE_EXPR:
10671 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10673 case GE_EXPR:
10674 return omit_one_operand (type, integer_one_node, arg0);
10676 case GT_EXPR:
10677 return fold_build2 (NE_EXPR, type, op0, op1);
10679 default:
10680 break;
10682 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10683 == min_hi
10684 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
10685 switch (code)
10687 case GE_EXPR:
10688 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10689 return fold_build2 (NE_EXPR, type, arg0, arg1);
10690 case LT_EXPR:
10691 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10692 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10693 default:
10694 break;
10697 else if (!in_gimple_form
10698 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
10699 && TREE_INT_CST_LOW (arg1) == signed_max_lo
10700 && TYPE_UNSIGNED (TREE_TYPE (arg1))
10701 /* signed_type does not work on pointer types. */
10702 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
10704 /* The following case also applies to X < signed_max+1
10705 and X >= signed_max+1 because previous transformations. */
10706 if (code == LE_EXPR || code == GT_EXPR)
10708 tree st0, st1;
10709 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
10710 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
10711 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10712 type, fold_convert (st0, arg0),
10713 build_int_cst (st1, 0));
10719 /* If we are comparing an ABS_EXPR with a constant, we can
10720 convert all the cases into explicit comparisons, but they may
10721 well not be faster than doing the ABS and one comparison.
10722 But ABS (X) <= C is a range comparison, which becomes a subtraction
10723 and a comparison, and is probably faster. */
10724 if (code == LE_EXPR
10725 && TREE_CODE (arg1) == INTEGER_CST
10726 && TREE_CODE (arg0) == ABS_EXPR
10727 && ! TREE_SIDE_EFFECTS (arg0)
10728 && (0 != (tem = negate_expr (arg1)))
10729 && TREE_CODE (tem) == INTEGER_CST
10730 && ! TREE_CONSTANT_OVERFLOW (tem))
10731 return fold_build2 (TRUTH_ANDIF_EXPR, type,
10732 build2 (GE_EXPR, type,
10733 TREE_OPERAND (arg0, 0), tem),
10734 build2 (LE_EXPR, type,
10735 TREE_OPERAND (arg0, 0), arg1));
10737 /* Convert ABS_EXPR<x> >= 0 to true. */
10738 if (code == GE_EXPR
10739 && tree_expr_nonnegative_p (arg0)
10740 && (integer_zerop (arg1)
10741 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10742 && real_zerop (arg1))))
10743 return omit_one_operand (type, integer_one_node, arg0);
10745 /* Convert ABS_EXPR<x> < 0 to false. */
10746 if (code == LT_EXPR
10747 && tree_expr_nonnegative_p (arg0)
10748 && (integer_zerop (arg1) || real_zerop (arg1)))
10749 return omit_one_operand (type, integer_zero_node, arg0);
10751 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
10752 and similarly for >= into !=. */
10753 if ((code == LT_EXPR || code == GE_EXPR)
10754 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10755 && TREE_CODE (arg1) == LSHIFT_EXPR
10756 && integer_onep (TREE_OPERAND (arg1, 0)))
10757 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10758 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10759 TREE_OPERAND (arg1, 1)),
10760 build_int_cst (TREE_TYPE (arg0), 0));
10762 if ((code == LT_EXPR || code == GE_EXPR)
10763 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10764 && (TREE_CODE (arg1) == NOP_EXPR
10765 || TREE_CODE (arg1) == CONVERT_EXPR)
10766 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
10767 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
10768 return
10769 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10770 fold_convert (TREE_TYPE (arg0),
10771 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10772 TREE_OPERAND (TREE_OPERAND (arg1, 0),
10773 1))),
10774 build_int_cst (TREE_TYPE (arg0), 0));
10776 return NULL_TREE;
10778 case UNORDERED_EXPR:
10779 case ORDERED_EXPR:
10780 case UNLT_EXPR:
10781 case UNLE_EXPR:
10782 case UNGT_EXPR:
10783 case UNGE_EXPR:
10784 case UNEQ_EXPR:
10785 case LTGT_EXPR:
10786 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10788 t1 = fold_relational_const (code, type, arg0, arg1);
10789 if (t1 != NULL_TREE)
10790 return t1;
10793 /* If the first operand is NaN, the result is constant. */
10794 if (TREE_CODE (arg0) == REAL_CST
10795 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
10796 && (code != LTGT_EXPR || ! flag_trapping_math))
10798 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10799 ? integer_zero_node
10800 : integer_one_node;
10801 return omit_one_operand (type, t1, arg1);
10804 /* If the second operand is NaN, the result is constant. */
10805 if (TREE_CODE (arg1) == REAL_CST
10806 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
10807 && (code != LTGT_EXPR || ! flag_trapping_math))
10809 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10810 ? integer_zero_node
10811 : integer_one_node;
10812 return omit_one_operand (type, t1, arg0);
10815 /* Simplify unordered comparison of something with itself. */
10816 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
10817 && operand_equal_p (arg0, arg1, 0))
10818 return constant_boolean_node (1, type);
10820 if (code == LTGT_EXPR
10821 && !flag_trapping_math
10822 && operand_equal_p (arg0, arg1, 0))
10823 return constant_boolean_node (0, type);
10825 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10827 tree targ0 = strip_float_extensions (arg0);
10828 tree targ1 = strip_float_extensions (arg1);
10829 tree newtype = TREE_TYPE (targ0);
10831 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
10832 newtype = TREE_TYPE (targ1);
10834 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
10835 return fold_build2 (code, type, fold_convert (newtype, targ0),
10836 fold_convert (newtype, targ1));
10839 return NULL_TREE;
10841 case COMPOUND_EXPR:
10842 /* When pedantic, a compound expression can be neither an lvalue
10843 nor an integer constant expression. */
10844 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
10845 return NULL_TREE;
10846 /* Don't let (0, 0) be null pointer constant. */
10847 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
10848 : fold_convert (type, arg1);
10849 return pedantic_non_lvalue (tem);
10851 case COMPLEX_EXPR:
10852 if ((TREE_CODE (arg0) == REAL_CST
10853 && TREE_CODE (arg1) == REAL_CST)
10854 || (TREE_CODE (arg0) == INTEGER_CST
10855 && TREE_CODE (arg1) == INTEGER_CST))
10856 return build_complex (type, arg0, arg1);
10857 return NULL_TREE;
10859 case ASSERT_EXPR:
10860 /* An ASSERT_EXPR should never be passed to fold_binary. */
10861 gcc_unreachable ();
10863 default:
10864 return NULL_TREE;
10865 } /* switch (code) */
10868 /* Callback for walk_tree, looking for LABEL_EXPR.
10869 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10870 Do not check the sub-tree of GOTO_EXPR. */
10872 static tree
10873 contains_label_1 (tree *tp,
10874 int *walk_subtrees,
10875 void *data ATTRIBUTE_UNUSED)
10877 switch (TREE_CODE (*tp))
10879 case LABEL_EXPR:
10880 return *tp;
10881 case GOTO_EXPR:
10882 *walk_subtrees = 0;
10883 /* no break */
10884 default:
10885 return NULL_TREE;
10889 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
10890 accessible from outside the sub-tree. Returns NULL_TREE if no
10891 addressable label is found. */
10893 static bool
10894 contains_label_p (tree st)
10896 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
10899 /* Fold a ternary expression of code CODE and type TYPE with operands
10900 OP0, OP1, and OP2. Return the folded expression if folding is
10901 successful. Otherwise, return NULL_TREE. */
10903 tree
10904 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10906 tree tem;
10907 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
10908 enum tree_code_class kind = TREE_CODE_CLASS (code);
10910 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10911 && TREE_CODE_LENGTH (code) == 3);
10913 /* Strip any conversions that don't change the mode. This is safe
10914 for every expression, except for a comparison expression because
10915 its signedness is derived from its operands. So, in the latter
10916 case, only strip conversions that don't change the signedness.
10918 Note that this is done as an internal manipulation within the
10919 constant folder, in order to find the simplest representation of
10920 the arguments so that their form can be studied. In any cases,
10921 the appropriate type conversions should be put back in the tree
10922 that will get out of the constant folder. */
10923 if (op0)
10925 arg0 = op0;
10926 STRIP_NOPS (arg0);
10929 if (op1)
10931 arg1 = op1;
10932 STRIP_NOPS (arg1);
10935 switch (code)
10937 case COMPONENT_REF:
10938 if (TREE_CODE (arg0) == CONSTRUCTOR
10939 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
10941 unsigned HOST_WIDE_INT idx;
10942 tree field, value;
10943 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
10944 if (field == arg1)
10945 return value;
10947 return NULL_TREE;
10949 case COND_EXPR:
10950 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10951 so all simple results must be passed through pedantic_non_lvalue. */
10952 if (TREE_CODE (arg0) == INTEGER_CST)
10954 tree unused_op = integer_zerop (arg0) ? op1 : op2;
10955 tem = integer_zerop (arg0) ? op2 : op1;
10956 /* Only optimize constant conditions when the selected branch
10957 has the same type as the COND_EXPR. This avoids optimizing
10958 away "c ? x : throw", where the throw has a void type.
10959 Avoid throwing away that operand which contains label. */
10960 if ((!TREE_SIDE_EFFECTS (unused_op)
10961 || !contains_label_p (unused_op))
10962 && (! VOID_TYPE_P (TREE_TYPE (tem))
10963 || VOID_TYPE_P (type)))
10964 return pedantic_non_lvalue (tem);
10965 return NULL_TREE;
10967 if (operand_equal_p (arg1, op2, 0))
10968 return pedantic_omit_one_operand (type, arg1, arg0);
10970 /* If we have A op B ? A : C, we may be able to convert this to a
10971 simpler expression, depending on the operation and the values
10972 of B and C. Signed zeros prevent all of these transformations,
10973 for reasons given above each one.
10975 Also try swapping the arguments and inverting the conditional. */
10976 if (COMPARISON_CLASS_P (arg0)
10977 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10978 arg1, TREE_OPERAND (arg0, 1))
10979 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10981 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10982 if (tem)
10983 return tem;
10986 if (COMPARISON_CLASS_P (arg0)
10987 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10988 op2,
10989 TREE_OPERAND (arg0, 1))
10990 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10992 tem = invert_truthvalue (arg0);
10993 if (COMPARISON_CLASS_P (tem))
10995 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10996 if (tem)
10997 return tem;
11001 /* If the second operand is simpler than the third, swap them
11002 since that produces better jump optimization results. */
11003 if (truth_value_p (TREE_CODE (arg0))
11004 && tree_swap_operands_p (op1, op2, false))
11006 /* See if this can be inverted. If it can't, possibly because
11007 it was a floating-point inequality comparison, don't do
11008 anything. */
11009 tem = invert_truthvalue (arg0);
11011 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11012 return fold_build3 (code, type, tem, op2, op1);
11015 /* Convert A ? 1 : 0 to simply A. */
11016 if (integer_onep (op1)
11017 && integer_zerop (op2)
11018 /* If we try to convert OP0 to our type, the
11019 call to fold will try to move the conversion inside
11020 a COND, which will recurse. In that case, the COND_EXPR
11021 is probably the best choice, so leave it alone. */
11022 && type == TREE_TYPE (arg0))
11023 return pedantic_non_lvalue (arg0);
11025 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11026 over COND_EXPR in cases such as floating point comparisons. */
11027 if (integer_zerop (op1)
11028 && integer_onep (op2)
11029 && truth_value_p (TREE_CODE (arg0)))
11030 return pedantic_non_lvalue (fold_convert (type,
11031 invert_truthvalue (arg0)));
11033 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11034 if (TREE_CODE (arg0) == LT_EXPR
11035 && integer_zerop (TREE_OPERAND (arg0, 1))
11036 && integer_zerop (op2)
11037 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11038 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
11039 TREE_TYPE (tem), tem, arg1));
11041 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11042 already handled above. */
11043 if (TREE_CODE (arg0) == BIT_AND_EXPR
11044 && integer_onep (TREE_OPERAND (arg0, 1))
11045 && integer_zerop (op2)
11046 && integer_pow2p (arg1))
11048 tree tem = TREE_OPERAND (arg0, 0);
11049 STRIP_NOPS (tem);
11050 if (TREE_CODE (tem) == RSHIFT_EXPR
11051 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11052 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11053 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11054 return fold_build2 (BIT_AND_EXPR, type,
11055 TREE_OPERAND (tem, 0), arg1);
11058 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11059 is probably obsolete because the first operand should be a
11060 truth value (that's why we have the two cases above), but let's
11061 leave it in until we can confirm this for all front-ends. */
11062 if (integer_zerop (op2)
11063 && TREE_CODE (arg0) == NE_EXPR
11064 && integer_zerop (TREE_OPERAND (arg0, 1))
11065 && integer_pow2p (arg1)
11066 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11067 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11068 arg1, OEP_ONLY_CONST))
11069 return pedantic_non_lvalue (fold_convert (type,
11070 TREE_OPERAND (arg0, 0)));
11072 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11073 if (integer_zerop (op2)
11074 && truth_value_p (TREE_CODE (arg0))
11075 && truth_value_p (TREE_CODE (arg1)))
11076 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
11078 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11079 if (integer_onep (op2)
11080 && truth_value_p (TREE_CODE (arg0))
11081 && truth_value_p (TREE_CODE (arg1)))
11083 /* Only perform transformation if ARG0 is easily inverted. */
11084 tem = invert_truthvalue (arg0);
11085 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11086 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
11089 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11090 if (integer_zerop (arg1)
11091 && truth_value_p (TREE_CODE (arg0))
11092 && truth_value_p (TREE_CODE (op2)))
11094 /* Only perform transformation if ARG0 is easily inverted. */
11095 tem = invert_truthvalue (arg0);
11096 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11097 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
11100 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11101 if (integer_onep (arg1)
11102 && truth_value_p (TREE_CODE (arg0))
11103 && truth_value_p (TREE_CODE (op2)))
11104 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
11106 return NULL_TREE;
11108 case CALL_EXPR:
11109 /* Check for a built-in function. */
11110 if (TREE_CODE (op0) == ADDR_EXPR
11111 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11112 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11113 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11114 return NULL_TREE;
11116 case BIT_FIELD_REF:
11117 if (TREE_CODE (arg0) == VECTOR_CST
11118 && type == TREE_TYPE (TREE_TYPE (arg0))
11119 && host_integerp (arg1, 1)
11120 && host_integerp (op2, 1))
11122 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11123 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11125 if (width != 0
11126 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11127 && (idx % width) == 0
11128 && (idx = idx / width)
11129 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11131 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11132 while (idx-- > 0 && elements)
11133 elements = TREE_CHAIN (elements);
11134 if (elements)
11135 return TREE_VALUE (elements);
11136 else
11137 return fold_convert (type, integer_zero_node);
11140 return NULL_TREE;
11142 default:
11143 return NULL_TREE;
11144 } /* switch (code) */
11147 /* Perform constant folding and related simplification of EXPR.
11148 The related simplifications include x*1 => x, x*0 => 0, etc.,
11149 and application of the associative law.
11150 NOP_EXPR conversions may be removed freely (as long as we
11151 are careful not to change the type of the overall expression).
11152 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11153 but we can constant-fold them if they have constant operands. */
11155 #ifdef ENABLE_FOLD_CHECKING
11156 # define fold(x) fold_1 (x)
11157 static tree fold_1 (tree);
11158 static
11159 #endif
11160 tree
11161 fold (tree expr)
11163 const tree t = expr;
11164 enum tree_code code = TREE_CODE (t);
11165 enum tree_code_class kind = TREE_CODE_CLASS (code);
11166 tree tem;
11168 /* Return right away if a constant. */
11169 if (kind == tcc_constant)
11170 return t;
11172 if (IS_EXPR_CODE_CLASS (kind))
11174 tree type = TREE_TYPE (t);
11175 tree op0, op1, op2;
11177 switch (TREE_CODE_LENGTH (code))
11179 case 1:
11180 op0 = TREE_OPERAND (t, 0);
11181 tem = fold_unary (code, type, op0);
11182 return tem ? tem : expr;
11183 case 2:
11184 op0 = TREE_OPERAND (t, 0);
11185 op1 = TREE_OPERAND (t, 1);
11186 tem = fold_binary (code, type, op0, op1);
11187 return tem ? tem : expr;
11188 case 3:
11189 op0 = TREE_OPERAND (t, 0);
11190 op1 = TREE_OPERAND (t, 1);
11191 op2 = TREE_OPERAND (t, 2);
11192 tem = fold_ternary (code, type, op0, op1, op2);
11193 return tem ? tem : expr;
11194 default:
11195 break;
11199 switch (code)
11201 case CONST_DECL:
11202 return fold (DECL_INITIAL (t));
11204 default:
11205 return t;
11206 } /* switch (code) */
11209 #ifdef ENABLE_FOLD_CHECKING
11210 #undef fold
11212 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11213 static void fold_check_failed (tree, tree);
11214 void print_fold_checksum (tree);
11216 /* When --enable-checking=fold, compute a digest of expr before
11217 and after actual fold call to see if fold did not accidentally
11218 change original expr. */
11220 tree
11221 fold (tree expr)
11223 tree ret;
11224 struct md5_ctx ctx;
11225 unsigned char checksum_before[16], checksum_after[16];
11226 htab_t ht;
11228 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11229 md5_init_ctx (&ctx);
11230 fold_checksum_tree (expr, &ctx, ht);
11231 md5_finish_ctx (&ctx, checksum_before);
11232 htab_empty (ht);
11234 ret = fold_1 (expr);
11236 md5_init_ctx (&ctx);
11237 fold_checksum_tree (expr, &ctx, ht);
11238 md5_finish_ctx (&ctx, checksum_after);
11239 htab_delete (ht);
11241 if (memcmp (checksum_before, checksum_after, 16))
11242 fold_check_failed (expr, ret);
11244 return ret;
11247 void
11248 print_fold_checksum (tree expr)
11250 struct md5_ctx ctx;
11251 unsigned char checksum[16], cnt;
11252 htab_t ht;
11254 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11255 md5_init_ctx (&ctx);
11256 fold_checksum_tree (expr, &ctx, ht);
11257 md5_finish_ctx (&ctx, checksum);
11258 htab_delete (ht);
11259 for (cnt = 0; cnt < 16; ++cnt)
11260 fprintf (stderr, "%02x", checksum[cnt]);
11261 putc ('\n', stderr);
11264 static void
11265 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11267 internal_error ("fold check: original tree changed by fold");
11270 static void
11271 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11273 void **slot;
11274 enum tree_code code;
11275 struct tree_function_decl buf;
11276 int i, len;
11278 recursive_label:
11280 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11281 <= sizeof (struct tree_function_decl))
11282 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11283 if (expr == NULL)
11284 return;
11285 slot = htab_find_slot (ht, expr, INSERT);
11286 if (*slot != NULL)
11287 return;
11288 *slot = expr;
11289 code = TREE_CODE (expr);
11290 if (TREE_CODE_CLASS (code) == tcc_declaration
11291 && DECL_ASSEMBLER_NAME_SET_P (expr))
11293 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11294 memcpy ((char *) &buf, expr, tree_size (expr));
11295 expr = (tree) &buf;
11296 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11298 else if (TREE_CODE_CLASS (code) == tcc_type
11299 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11300 || TYPE_CACHED_VALUES_P (expr)
11301 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11303 /* Allow these fields to be modified. */
11304 memcpy ((char *) &buf, expr, tree_size (expr));
11305 expr = (tree) &buf;
11306 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11307 TYPE_POINTER_TO (expr) = NULL;
11308 TYPE_REFERENCE_TO (expr) = NULL;
11309 if (TYPE_CACHED_VALUES_P (expr))
11311 TYPE_CACHED_VALUES_P (expr) = 0;
11312 TYPE_CACHED_VALUES (expr) = NULL;
11315 md5_process_bytes (expr, tree_size (expr), ctx);
11316 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11317 if (TREE_CODE_CLASS (code) != tcc_type
11318 && TREE_CODE_CLASS (code) != tcc_declaration
11319 && code != TREE_LIST)
11320 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11321 switch (TREE_CODE_CLASS (code))
11323 case tcc_constant:
11324 switch (code)
11326 case STRING_CST:
11327 md5_process_bytes (TREE_STRING_POINTER (expr),
11328 TREE_STRING_LENGTH (expr), ctx);
11329 break;
11330 case COMPLEX_CST:
11331 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11332 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11333 break;
11334 case VECTOR_CST:
11335 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11336 break;
11337 default:
11338 break;
11340 break;
11341 case tcc_exceptional:
11342 switch (code)
11344 case TREE_LIST:
11345 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11346 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11347 expr = TREE_CHAIN (expr);
11348 goto recursive_label;
11349 break;
11350 case TREE_VEC:
11351 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11352 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11353 break;
11354 default:
11355 break;
11357 break;
11358 case tcc_expression:
11359 case tcc_reference:
11360 case tcc_comparison:
11361 case tcc_unary:
11362 case tcc_binary:
11363 case tcc_statement:
11364 len = TREE_CODE_LENGTH (code);
11365 for (i = 0; i < len; ++i)
11366 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11367 break;
11368 case tcc_declaration:
11369 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11370 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11371 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11373 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11374 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11375 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11376 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11377 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11379 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11380 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11382 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11384 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11385 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11386 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11388 break;
11389 case tcc_type:
11390 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11391 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11392 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11393 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11394 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11395 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11396 if (INTEGRAL_TYPE_P (expr)
11397 || SCALAR_FLOAT_TYPE_P (expr))
11399 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
11400 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
11402 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
11403 if (TREE_CODE (expr) == RECORD_TYPE
11404 || TREE_CODE (expr) == UNION_TYPE
11405 || TREE_CODE (expr) == QUAL_UNION_TYPE)
11406 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
11407 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
11408 break;
11409 default:
11410 break;
11414 #endif
11416 /* Fold a unary tree expression with code CODE of type TYPE with an
11417 operand OP0. Return a folded expression if successful. Otherwise,
11418 return a tree expression with code CODE of type TYPE with an
11419 operand OP0. */
11421 tree
11422 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
11424 tree tem;
11425 #ifdef ENABLE_FOLD_CHECKING
11426 unsigned char checksum_before[16], checksum_after[16];
11427 struct md5_ctx ctx;
11428 htab_t ht;
11430 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11431 md5_init_ctx (&ctx);
11432 fold_checksum_tree (op0, &ctx, ht);
11433 md5_finish_ctx (&ctx, checksum_before);
11434 htab_empty (ht);
11435 #endif
11437 tem = fold_unary (code, type, op0);
11438 if (!tem)
11439 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
11441 #ifdef ENABLE_FOLD_CHECKING
11442 md5_init_ctx (&ctx);
11443 fold_checksum_tree (op0, &ctx, ht);
11444 md5_finish_ctx (&ctx, checksum_after);
11445 htab_delete (ht);
11447 if (memcmp (checksum_before, checksum_after, 16))
11448 fold_check_failed (op0, tem);
11449 #endif
11450 return tem;
11453 /* Fold a binary tree expression with code CODE of type TYPE with
11454 operands OP0 and OP1. Return a folded expression if successful.
11455 Otherwise, return a tree expression with code CODE of type TYPE
11456 with operands OP0 and OP1. */
11458 tree
11459 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
11460 MEM_STAT_DECL)
11462 tree tem;
11463 #ifdef ENABLE_FOLD_CHECKING
11464 unsigned char checksum_before_op0[16],
11465 checksum_before_op1[16],
11466 checksum_after_op0[16],
11467 checksum_after_op1[16];
11468 struct md5_ctx ctx;
11469 htab_t ht;
11471 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11472 md5_init_ctx (&ctx);
11473 fold_checksum_tree (op0, &ctx, ht);
11474 md5_finish_ctx (&ctx, checksum_before_op0);
11475 htab_empty (ht);
11477 md5_init_ctx (&ctx);
11478 fold_checksum_tree (op1, &ctx, ht);
11479 md5_finish_ctx (&ctx, checksum_before_op1);
11480 htab_empty (ht);
11481 #endif
11483 tem = fold_binary (code, type, op0, op1);
11484 if (!tem)
11485 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
11487 #ifdef ENABLE_FOLD_CHECKING
11488 md5_init_ctx (&ctx);
11489 fold_checksum_tree (op0, &ctx, ht);
11490 md5_finish_ctx (&ctx, checksum_after_op0);
11491 htab_empty (ht);
11493 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11494 fold_check_failed (op0, tem);
11496 md5_init_ctx (&ctx);
11497 fold_checksum_tree (op1, &ctx, ht);
11498 md5_finish_ctx (&ctx, checksum_after_op1);
11499 htab_delete (ht);
11501 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11502 fold_check_failed (op1, tem);
11503 #endif
11504 return tem;
11507 /* Fold a ternary tree expression with code CODE of type TYPE with
11508 operands OP0, OP1, and OP2. Return a folded expression if
11509 successful. Otherwise, return a tree expression with code CODE of
11510 type TYPE with operands OP0, OP1, and OP2. */
11512 tree
11513 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
11514 MEM_STAT_DECL)
11516 tree tem;
11517 #ifdef ENABLE_FOLD_CHECKING
11518 unsigned char checksum_before_op0[16],
11519 checksum_before_op1[16],
11520 checksum_before_op2[16],
11521 checksum_after_op0[16],
11522 checksum_after_op1[16],
11523 checksum_after_op2[16];
11524 struct md5_ctx ctx;
11525 htab_t ht;
11527 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11528 md5_init_ctx (&ctx);
11529 fold_checksum_tree (op0, &ctx, ht);
11530 md5_finish_ctx (&ctx, checksum_before_op0);
11531 htab_empty (ht);
11533 md5_init_ctx (&ctx);
11534 fold_checksum_tree (op1, &ctx, ht);
11535 md5_finish_ctx (&ctx, checksum_before_op1);
11536 htab_empty (ht);
11538 md5_init_ctx (&ctx);
11539 fold_checksum_tree (op2, &ctx, ht);
11540 md5_finish_ctx (&ctx, checksum_before_op2);
11541 htab_empty (ht);
11542 #endif
11544 tem = fold_ternary (code, type, op0, op1, op2);
11545 if (!tem)
11546 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
11548 #ifdef ENABLE_FOLD_CHECKING
11549 md5_init_ctx (&ctx);
11550 fold_checksum_tree (op0, &ctx, ht);
11551 md5_finish_ctx (&ctx, checksum_after_op0);
11552 htab_empty (ht);
11554 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11555 fold_check_failed (op0, tem);
11557 md5_init_ctx (&ctx);
11558 fold_checksum_tree (op1, &ctx, ht);
11559 md5_finish_ctx (&ctx, checksum_after_op1);
11560 htab_empty (ht);
11562 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11563 fold_check_failed (op1, tem);
11565 md5_init_ctx (&ctx);
11566 fold_checksum_tree (op2, &ctx, ht);
11567 md5_finish_ctx (&ctx, checksum_after_op2);
11568 htab_delete (ht);
11570 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
11571 fold_check_failed (op2, tem);
11572 #endif
11573 return tem;
11576 /* Perform constant folding and related simplification of initializer
11577 expression EXPR. These behave identically to "fold_buildN" but ignore
11578 potential run-time traps and exceptions that fold must preserve. */
11580 #define START_FOLD_INIT \
11581 int saved_signaling_nans = flag_signaling_nans;\
11582 int saved_trapping_math = flag_trapping_math;\
11583 int saved_rounding_math = flag_rounding_math;\
11584 int saved_trapv = flag_trapv;\
11585 flag_signaling_nans = 0;\
11586 flag_trapping_math = 0;\
11587 flag_rounding_math = 0;\
11588 flag_trapv = 0
11590 #define END_FOLD_INIT \
11591 flag_signaling_nans = saved_signaling_nans;\
11592 flag_trapping_math = saved_trapping_math;\
11593 flag_rounding_math = saved_rounding_math;\
11594 flag_trapv = saved_trapv
11596 tree
11597 fold_build1_initializer (enum tree_code code, tree type, tree op)
11599 tree result;
11600 START_FOLD_INIT;
11602 result = fold_build1 (code, type, op);
11604 END_FOLD_INIT;
11605 return result;
11608 tree
11609 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
11611 tree result;
11612 START_FOLD_INIT;
11614 result = fold_build2 (code, type, op0, op1);
11616 END_FOLD_INIT;
11617 return result;
11620 tree
11621 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
11622 tree op2)
11624 tree result;
11625 START_FOLD_INIT;
11627 result = fold_build3 (code, type, op0, op1, op2);
11629 END_FOLD_INIT;
11630 return result;
11633 #undef START_FOLD_INIT
11634 #undef END_FOLD_INIT
11636 /* Determine if first argument is a multiple of second argument. Return 0 if
11637 it is not, or we cannot easily determined it to be.
11639 An example of the sort of thing we care about (at this point; this routine
11640 could surely be made more general, and expanded to do what the *_DIV_EXPR's
11641 fold cases do now) is discovering that
11643 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11645 is a multiple of
11647 SAVE_EXPR (J * 8)
11649 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
11651 This code also handles discovering that
11653 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11655 is a multiple of 8 so we don't have to worry about dealing with a
11656 possible remainder.
11658 Note that we *look* inside a SAVE_EXPR only to determine how it was
11659 calculated; it is not safe for fold to do much of anything else with the
11660 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
11661 at run time. For example, the latter example above *cannot* be implemented
11662 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
11663 evaluation time of the original SAVE_EXPR is not necessarily the same at
11664 the time the new expression is evaluated. The only optimization of this
11665 sort that would be valid is changing
11667 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
11669 divided by 8 to
11671 SAVE_EXPR (I) * SAVE_EXPR (J)
11673 (where the same SAVE_EXPR (J) is used in the original and the
11674 transformed version). */
11676 static int
11677 multiple_of_p (tree type, tree top, tree bottom)
11679 if (operand_equal_p (top, bottom, 0))
11680 return 1;
11682 if (TREE_CODE (type) != INTEGER_TYPE)
11683 return 0;
11685 switch (TREE_CODE (top))
11687 case BIT_AND_EXPR:
11688 /* Bitwise and provides a power of two multiple. If the mask is
11689 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
11690 if (!integer_pow2p (bottom))
11691 return 0;
11692 /* FALLTHRU */
11694 case MULT_EXPR:
11695 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11696 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11698 case PLUS_EXPR:
11699 case MINUS_EXPR:
11700 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11701 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11703 case LSHIFT_EXPR:
11704 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
11706 tree op1, t1;
11708 op1 = TREE_OPERAND (top, 1);
11709 /* const_binop may not detect overflow correctly,
11710 so check for it explicitly here. */
11711 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
11712 > TREE_INT_CST_LOW (op1)
11713 && TREE_INT_CST_HIGH (op1) == 0
11714 && 0 != (t1 = fold_convert (type,
11715 const_binop (LSHIFT_EXPR,
11716 size_one_node,
11717 op1, 0)))
11718 && ! TREE_OVERFLOW (t1))
11719 return multiple_of_p (type, t1, bottom);
11721 return 0;
11723 case NOP_EXPR:
11724 /* Can't handle conversions from non-integral or wider integral type. */
11725 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
11726 || (TYPE_PRECISION (type)
11727 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
11728 return 0;
11730 /* .. fall through ... */
11732 case SAVE_EXPR:
11733 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
11735 case INTEGER_CST:
11736 if (TREE_CODE (bottom) != INTEGER_CST
11737 || (TYPE_UNSIGNED (type)
11738 && (tree_int_cst_sgn (top) < 0
11739 || tree_int_cst_sgn (bottom) < 0)))
11740 return 0;
11741 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
11742 top, bottom, 0));
11744 default:
11745 return 0;
11749 /* Return true if `t' is known to be non-negative. */
11752 tree_expr_nonnegative_p (tree t)
11754 if (TYPE_UNSIGNED (TREE_TYPE (t)))
11755 return 1;
11757 switch (TREE_CODE (t))
11759 case SSA_NAME:
11760 /* Query VRP to see if it has recorded any information about
11761 the range of this object. */
11762 return ssa_name_nonnegative_p (t);
11764 case ABS_EXPR:
11765 /* We can't return 1 if flag_wrapv is set because
11766 ABS_EXPR<INT_MIN> = INT_MIN. */
11767 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
11768 return 1;
11769 break;
11771 case INTEGER_CST:
11772 return tree_int_cst_sgn (t) >= 0;
11774 case REAL_CST:
11775 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
11777 case PLUS_EXPR:
11778 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11779 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11780 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11782 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
11783 both unsigned and at least 2 bits shorter than the result. */
11784 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11785 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11786 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11788 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11789 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11790 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11791 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11793 unsigned int prec = MAX (TYPE_PRECISION (inner1),
11794 TYPE_PRECISION (inner2)) + 1;
11795 return prec < TYPE_PRECISION (TREE_TYPE (t));
11798 break;
11800 case MULT_EXPR:
11801 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11803 /* x * x for floating point x is always non-negative. */
11804 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
11805 return 1;
11806 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11807 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11810 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
11811 both unsigned and their total bits is shorter than the result. */
11812 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11813 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11814 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11816 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11817 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11818 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11819 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11820 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
11821 < TYPE_PRECISION (TREE_TYPE (t));
11823 return 0;
11825 case BIT_AND_EXPR:
11826 case MAX_EXPR:
11827 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11828 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11830 case BIT_IOR_EXPR:
11831 case BIT_XOR_EXPR:
11832 case MIN_EXPR:
11833 case RDIV_EXPR:
11834 case TRUNC_DIV_EXPR:
11835 case CEIL_DIV_EXPR:
11836 case FLOOR_DIV_EXPR:
11837 case ROUND_DIV_EXPR:
11838 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11839 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11841 case TRUNC_MOD_EXPR:
11842 case CEIL_MOD_EXPR:
11843 case FLOOR_MOD_EXPR:
11844 case ROUND_MOD_EXPR:
11845 case SAVE_EXPR:
11846 case NON_LVALUE_EXPR:
11847 case FLOAT_EXPR:
11848 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11850 case COMPOUND_EXPR:
11851 case MODIFY_EXPR:
11852 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11854 case BIND_EXPR:
11855 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
11857 case COND_EXPR:
11858 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
11859 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
11861 case NOP_EXPR:
11863 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11864 tree outer_type = TREE_TYPE (t);
11866 if (TREE_CODE (outer_type) == REAL_TYPE)
11868 if (TREE_CODE (inner_type) == REAL_TYPE)
11869 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11870 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11872 if (TYPE_UNSIGNED (inner_type))
11873 return 1;
11874 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11877 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
11879 if (TREE_CODE (inner_type) == REAL_TYPE)
11880 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
11881 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11882 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
11883 && TYPE_UNSIGNED (inner_type);
11886 break;
11888 case TARGET_EXPR:
11890 tree temp = TARGET_EXPR_SLOT (t);
11891 t = TARGET_EXPR_INITIAL (t);
11893 /* If the initializer is non-void, then it's a normal expression
11894 that will be assigned to the slot. */
11895 if (!VOID_TYPE_P (t))
11896 return tree_expr_nonnegative_p (t);
11898 /* Otherwise, the initializer sets the slot in some way. One common
11899 way is an assignment statement at the end of the initializer. */
11900 while (1)
11902 if (TREE_CODE (t) == BIND_EXPR)
11903 t = expr_last (BIND_EXPR_BODY (t));
11904 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
11905 || TREE_CODE (t) == TRY_CATCH_EXPR)
11906 t = expr_last (TREE_OPERAND (t, 0));
11907 else if (TREE_CODE (t) == STATEMENT_LIST)
11908 t = expr_last (t);
11909 else
11910 break;
11912 if (TREE_CODE (t) == MODIFY_EXPR
11913 && TREE_OPERAND (t, 0) == temp)
11914 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11916 return 0;
11919 case CALL_EXPR:
11921 tree fndecl = get_callee_fndecl (t);
11922 tree arglist = TREE_OPERAND (t, 1);
11923 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
11924 switch (DECL_FUNCTION_CODE (fndecl))
11926 CASE_FLT_FN (BUILT_IN_ACOS):
11927 CASE_FLT_FN (BUILT_IN_ACOSH):
11928 CASE_FLT_FN (BUILT_IN_CABS):
11929 CASE_FLT_FN (BUILT_IN_COSH):
11930 CASE_FLT_FN (BUILT_IN_ERFC):
11931 CASE_FLT_FN (BUILT_IN_EXP):
11932 CASE_FLT_FN (BUILT_IN_EXP10):
11933 CASE_FLT_FN (BUILT_IN_EXP2):
11934 CASE_FLT_FN (BUILT_IN_FABS):
11935 CASE_FLT_FN (BUILT_IN_FDIM):
11936 CASE_FLT_FN (BUILT_IN_HYPOT):
11937 CASE_FLT_FN (BUILT_IN_POW10):
11938 CASE_INT_FN (BUILT_IN_FFS):
11939 CASE_INT_FN (BUILT_IN_PARITY):
11940 CASE_INT_FN (BUILT_IN_POPCOUNT):
11941 /* Always true. */
11942 return 1;
11944 CASE_FLT_FN (BUILT_IN_SQRT):
11945 /* sqrt(-0.0) is -0.0. */
11946 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
11947 return 1;
11948 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11950 CASE_FLT_FN (BUILT_IN_ASINH):
11951 CASE_FLT_FN (BUILT_IN_ATAN):
11952 CASE_FLT_FN (BUILT_IN_ATANH):
11953 CASE_FLT_FN (BUILT_IN_CBRT):
11954 CASE_FLT_FN (BUILT_IN_CEIL):
11955 CASE_FLT_FN (BUILT_IN_ERF):
11956 CASE_FLT_FN (BUILT_IN_EXPM1):
11957 CASE_FLT_FN (BUILT_IN_FLOOR):
11958 CASE_FLT_FN (BUILT_IN_FMOD):
11959 CASE_FLT_FN (BUILT_IN_FREXP):
11960 CASE_FLT_FN (BUILT_IN_LCEIL):
11961 CASE_FLT_FN (BUILT_IN_LDEXP):
11962 CASE_FLT_FN (BUILT_IN_LFLOOR):
11963 CASE_FLT_FN (BUILT_IN_LLCEIL):
11964 CASE_FLT_FN (BUILT_IN_LLFLOOR):
11965 CASE_FLT_FN (BUILT_IN_LLRINT):
11966 CASE_FLT_FN (BUILT_IN_LLROUND):
11967 CASE_FLT_FN (BUILT_IN_LRINT):
11968 CASE_FLT_FN (BUILT_IN_LROUND):
11969 CASE_FLT_FN (BUILT_IN_MODF):
11970 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11971 CASE_FLT_FN (BUILT_IN_POW):
11972 CASE_FLT_FN (BUILT_IN_RINT):
11973 CASE_FLT_FN (BUILT_IN_ROUND):
11974 CASE_FLT_FN (BUILT_IN_SIGNBIT):
11975 CASE_FLT_FN (BUILT_IN_SINH):
11976 CASE_FLT_FN (BUILT_IN_TANH):
11977 CASE_FLT_FN (BUILT_IN_TRUNC):
11978 /* True if the 1st argument is nonnegative. */
11979 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11981 CASE_FLT_FN (BUILT_IN_FMAX):
11982 /* True if the 1st OR 2nd arguments are nonnegative. */
11983 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11984 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11986 CASE_FLT_FN (BUILT_IN_FMIN):
11987 /* True if the 1st AND 2nd arguments are nonnegative. */
11988 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11989 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11991 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11992 /* True if the 2nd argument is nonnegative. */
11993 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11995 default:
11996 break;
12000 /* ... fall through ... */
12002 default:
12003 if (truth_value_p (TREE_CODE (t)))
12004 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12005 return 1;
12008 /* We don't know sign of `t', so be conservative and return false. */
12009 return 0;
12012 /* Return true when T is an address and is known to be nonzero.
12013 For floating point we further ensure that T is not denormal.
12014 Similar logic is present in nonzero_address in rtlanal.h. */
12016 bool
12017 tree_expr_nonzero_p (tree t)
12019 tree type = TREE_TYPE (t);
12021 /* Doing something useful for floating point would need more work. */
12022 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12023 return false;
12025 switch (TREE_CODE (t))
12027 case SSA_NAME:
12028 /* Query VRP to see if it has recorded any information about
12029 the range of this object. */
12030 return ssa_name_nonzero_p (t);
12032 case ABS_EXPR:
12033 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12035 case INTEGER_CST:
12036 /* We used to test for !integer_zerop here. This does not work correctly
12037 if TREE_CONSTANT_OVERFLOW (t). */
12038 return (TREE_INT_CST_LOW (t) != 0
12039 || TREE_INT_CST_HIGH (t) != 0);
12041 case PLUS_EXPR:
12042 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12044 /* With the presence of negative values it is hard
12045 to say something. */
12046 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12047 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12048 return false;
12049 /* One of operands must be positive and the other non-negative. */
12050 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12051 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12053 break;
12055 case MULT_EXPR:
12056 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12058 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12059 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12061 break;
12063 case NOP_EXPR:
12065 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12066 tree outer_type = TREE_TYPE (t);
12068 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12069 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12071 break;
12073 case ADDR_EXPR:
12075 tree base = get_base_address (TREE_OPERAND (t, 0));
12077 if (!base)
12078 return false;
12080 /* Weak declarations may link to NULL. */
12081 if (VAR_OR_FUNCTION_DECL_P (base))
12082 return !DECL_WEAK (base);
12084 /* Constants are never weak. */
12085 if (CONSTANT_CLASS_P (base))
12086 return true;
12088 return false;
12091 case COND_EXPR:
12092 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12093 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12095 case MIN_EXPR:
12096 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12097 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12099 case MAX_EXPR:
12100 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12102 /* When both operands are nonzero, then MAX must be too. */
12103 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12104 return true;
12106 /* MAX where operand 0 is positive is positive. */
12107 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12109 /* MAX where operand 1 is positive is positive. */
12110 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12111 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12112 return true;
12113 break;
12115 case COMPOUND_EXPR:
12116 case MODIFY_EXPR:
12117 case BIND_EXPR:
12118 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
12120 case SAVE_EXPR:
12121 case NON_LVALUE_EXPR:
12122 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12124 case BIT_IOR_EXPR:
12125 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12126 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12128 case CALL_EXPR:
12129 return alloca_call_p (t);
12131 default:
12132 break;
12134 return false;
12137 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12138 attempt to fold the expression to a constant without modifying TYPE,
12139 OP0 or OP1.
12141 If the expression could be simplified to a constant, then return
12142 the constant. If the expression would not be simplified to a
12143 constant, then return NULL_TREE. */
12145 tree
12146 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12148 tree tem = fold_binary (code, type, op0, op1);
12149 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12152 /* Given the components of a unary expression CODE, TYPE and OP0,
12153 attempt to fold the expression to a constant without modifying
12154 TYPE or OP0.
12156 If the expression could be simplified to a constant, then return
12157 the constant. If the expression would not be simplified to a
12158 constant, then return NULL_TREE. */
12160 tree
12161 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12163 tree tem = fold_unary (code, type, op0);
12164 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12167 /* If EXP represents referencing an element in a constant string
12168 (either via pointer arithmetic or array indexing), return the
12169 tree representing the value accessed, otherwise return NULL. */
12171 tree
12172 fold_read_from_constant_string (tree exp)
12174 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
12176 tree exp1 = TREE_OPERAND (exp, 0);
12177 tree index;
12178 tree string;
12180 if (TREE_CODE (exp) == INDIRECT_REF)
12181 string = string_constant (exp1, &index);
12182 else
12184 tree low_bound = array_ref_low_bound (exp);
12185 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12187 /* Optimize the special-case of a zero lower bound.
12189 We convert the low_bound to sizetype to avoid some problems
12190 with constant folding. (E.g. suppose the lower bound is 1,
12191 and its mode is QI. Without the conversion,l (ARRAY
12192 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12193 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12194 if (! integer_zerop (low_bound))
12195 index = size_diffop (index, fold_convert (sizetype, low_bound));
12197 string = exp1;
12200 if (string
12201 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
12202 && TREE_CODE (string) == STRING_CST
12203 && TREE_CODE (index) == INTEGER_CST
12204 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12205 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12206 == MODE_INT)
12207 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12208 return fold_convert (TREE_TYPE (exp),
12209 build_int_cst (NULL_TREE,
12210 (TREE_STRING_POINTER (string)
12211 [TREE_INT_CST_LOW (index)])));
12213 return NULL;
12216 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12217 an integer constant or real constant.
12219 TYPE is the type of the result. */
12221 static tree
12222 fold_negate_const (tree arg0, tree type)
12224 tree t = NULL_TREE;
12226 switch (TREE_CODE (arg0))
12228 case INTEGER_CST:
12230 unsigned HOST_WIDE_INT low;
12231 HOST_WIDE_INT high;
12232 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12233 TREE_INT_CST_HIGH (arg0),
12234 &low, &high);
12235 t = build_int_cst_wide (type, low, high);
12236 t = force_fit_type (t, 1,
12237 (overflow | TREE_OVERFLOW (arg0))
12238 && !TYPE_UNSIGNED (type),
12239 TREE_CONSTANT_OVERFLOW (arg0));
12240 break;
12243 case REAL_CST:
12244 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12245 break;
12247 default:
12248 gcc_unreachable ();
12251 return t;
12254 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12255 an integer constant or real constant.
12257 TYPE is the type of the result. */
12259 tree
12260 fold_abs_const (tree arg0, tree type)
12262 tree t = NULL_TREE;
12264 switch (TREE_CODE (arg0))
12266 case INTEGER_CST:
12267 /* If the value is unsigned, then the absolute value is
12268 the same as the ordinary value. */
12269 if (TYPE_UNSIGNED (type))
12270 t = arg0;
12271 /* Similarly, if the value is non-negative. */
12272 else if (INT_CST_LT (integer_minus_one_node, arg0))
12273 t = arg0;
12274 /* If the value is negative, then the absolute value is
12275 its negation. */
12276 else
12278 unsigned HOST_WIDE_INT low;
12279 HOST_WIDE_INT high;
12280 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12281 TREE_INT_CST_HIGH (arg0),
12282 &low, &high);
12283 t = build_int_cst_wide (type, low, high);
12284 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
12285 TREE_CONSTANT_OVERFLOW (arg0));
12287 break;
12289 case REAL_CST:
12290 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
12291 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12292 else
12293 t = arg0;
12294 break;
12296 default:
12297 gcc_unreachable ();
12300 return t;
12303 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
12304 constant. TYPE is the type of the result. */
12306 static tree
12307 fold_not_const (tree arg0, tree type)
12309 tree t = NULL_TREE;
12311 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
12313 t = build_int_cst_wide (type,
12314 ~ TREE_INT_CST_LOW (arg0),
12315 ~ TREE_INT_CST_HIGH (arg0));
12316 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
12317 TREE_CONSTANT_OVERFLOW (arg0));
12319 return t;
12322 /* Given CODE, a relational operator, the target type, TYPE and two
12323 constant operands OP0 and OP1, return the result of the
12324 relational operation. If the result is not a compile time
12325 constant, then return NULL_TREE. */
12327 static tree
12328 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
12330 int result, invert;
12332 /* From here on, the only cases we handle are when the result is
12333 known to be a constant. */
12335 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
12337 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
12338 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
12340 /* Handle the cases where either operand is a NaN. */
12341 if (real_isnan (c0) || real_isnan (c1))
12343 switch (code)
12345 case EQ_EXPR:
12346 case ORDERED_EXPR:
12347 result = 0;
12348 break;
12350 case NE_EXPR:
12351 case UNORDERED_EXPR:
12352 case UNLT_EXPR:
12353 case UNLE_EXPR:
12354 case UNGT_EXPR:
12355 case UNGE_EXPR:
12356 case UNEQ_EXPR:
12357 result = 1;
12358 break;
12360 case LT_EXPR:
12361 case LE_EXPR:
12362 case GT_EXPR:
12363 case GE_EXPR:
12364 case LTGT_EXPR:
12365 if (flag_trapping_math)
12366 return NULL_TREE;
12367 result = 0;
12368 break;
12370 default:
12371 gcc_unreachable ();
12374 return constant_boolean_node (result, type);
12377 return constant_boolean_node (real_compare (code, c0, c1), type);
12380 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
12382 To compute GT, swap the arguments and do LT.
12383 To compute GE, do LT and invert the result.
12384 To compute LE, swap the arguments, do LT and invert the result.
12385 To compute NE, do EQ and invert the result.
12387 Therefore, the code below must handle only EQ and LT. */
12389 if (code == LE_EXPR || code == GT_EXPR)
12391 tree tem = op0;
12392 op0 = op1;
12393 op1 = tem;
12394 code = swap_tree_comparison (code);
12397 /* Note that it is safe to invert for real values here because we
12398 have already handled the one case that it matters. */
12400 invert = 0;
12401 if (code == NE_EXPR || code == GE_EXPR)
12403 invert = 1;
12404 code = invert_tree_comparison (code, false);
12407 /* Compute a result for LT or EQ if args permit;
12408 Otherwise return T. */
12409 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
12411 if (code == EQ_EXPR)
12412 result = tree_int_cst_equal (op0, op1);
12413 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
12414 result = INT_CST_LT_UNSIGNED (op0, op1);
12415 else
12416 result = INT_CST_LT (op0, op1);
12418 else
12419 return NULL_TREE;
12421 if (invert)
12422 result ^= 1;
12423 return constant_boolean_node (result, type);
12426 /* Build an expression for the a clean point containing EXPR with type TYPE.
12427 Don't build a cleanup point expression for EXPR which don't have side
12428 effects. */
12430 tree
12431 fold_build_cleanup_point_expr (tree type, tree expr)
12433 /* If the expression does not have side effects then we don't have to wrap
12434 it with a cleanup point expression. */
12435 if (!TREE_SIDE_EFFECTS (expr))
12436 return expr;
12438 /* If the expression is a return, check to see if the expression inside the
12439 return has no side effects or the right hand side of the modify expression
12440 inside the return. If either don't have side effects set we don't need to
12441 wrap the expression in a cleanup point expression. Note we don't check the
12442 left hand side of the modify because it should always be a return decl. */
12443 if (TREE_CODE (expr) == RETURN_EXPR)
12445 tree op = TREE_OPERAND (expr, 0);
12446 if (!op || !TREE_SIDE_EFFECTS (op))
12447 return expr;
12448 op = TREE_OPERAND (op, 1);
12449 if (!TREE_SIDE_EFFECTS (op))
12450 return expr;
12453 return build1 (CLEANUP_POINT_EXPR, type, expr);
12456 /* Build an expression for the address of T. Folds away INDIRECT_REF to
12457 avoid confusing the gimplify process. */
12459 tree
12460 build_fold_addr_expr_with_type (tree t, tree ptrtype)
12462 /* The size of the object is not relevant when talking about its address. */
12463 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12464 t = TREE_OPERAND (t, 0);
12466 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
12467 if (TREE_CODE (t) == INDIRECT_REF
12468 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
12470 t = TREE_OPERAND (t, 0);
12471 if (TREE_TYPE (t) != ptrtype)
12472 t = build1 (NOP_EXPR, ptrtype, t);
12474 else
12476 tree base = t;
12478 while (handled_component_p (base))
12479 base = TREE_OPERAND (base, 0);
12480 if (DECL_P (base))
12481 TREE_ADDRESSABLE (base) = 1;
12483 t = build1 (ADDR_EXPR, ptrtype, t);
12486 return t;
12489 tree
12490 build_fold_addr_expr (tree t)
12492 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
12495 /* Given a pointer value OP0 and a type TYPE, return a simplified version
12496 of an indirection through OP0, or NULL_TREE if no simplification is
12497 possible. */
12499 tree
12500 fold_indirect_ref_1 (tree type, tree op0)
12502 tree sub = op0;
12503 tree subtype;
12505 STRIP_NOPS (sub);
12506 subtype = TREE_TYPE (sub);
12507 if (!POINTER_TYPE_P (subtype))
12508 return NULL_TREE;
12510 if (TREE_CODE (sub) == ADDR_EXPR)
12512 tree op = TREE_OPERAND (sub, 0);
12513 tree optype = TREE_TYPE (op);
12514 /* *&p => p; make sure to handle *&"str"[cst] here. */
12515 if (type == optype)
12517 tree fop = fold_read_from_constant_string (op);
12518 if (fop)
12519 return fop;
12520 else
12521 return op;
12523 /* *(foo *)&fooarray => fooarray[0] */
12524 else if (TREE_CODE (optype) == ARRAY_TYPE
12525 && type == TREE_TYPE (optype))
12527 tree type_domain = TYPE_DOMAIN (optype);
12528 tree min_val = size_zero_node;
12529 if (type_domain && TYPE_MIN_VALUE (type_domain))
12530 min_val = TYPE_MIN_VALUE (type_domain);
12531 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
12533 /* *(foo *)&complexfoo => __real__ complexfoo */
12534 else if (TREE_CODE (optype) == COMPLEX_TYPE
12535 && type == TREE_TYPE (optype))
12536 return fold_build1 (REALPART_EXPR, type, op);
12539 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
12540 if (TREE_CODE (sub) == PLUS_EXPR
12541 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
12543 tree op00 = TREE_OPERAND (sub, 0);
12544 tree op01 = TREE_OPERAND (sub, 1);
12545 tree op00type;
12547 STRIP_NOPS (op00);
12548 op00type = TREE_TYPE (op00);
12549 if (TREE_CODE (op00) == ADDR_EXPR
12550 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
12551 && type == TREE_TYPE (TREE_TYPE (op00type)))
12553 tree size = TYPE_SIZE_UNIT (type);
12554 if (tree_int_cst_equal (size, op01))
12555 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
12559 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
12560 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
12561 && type == TREE_TYPE (TREE_TYPE (subtype)))
12563 tree type_domain;
12564 tree min_val = size_zero_node;
12565 sub = build_fold_indirect_ref (sub);
12566 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
12567 if (type_domain && TYPE_MIN_VALUE (type_domain))
12568 min_val = TYPE_MIN_VALUE (type_domain);
12569 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
12572 return NULL_TREE;
12575 /* Builds an expression for an indirection through T, simplifying some
12576 cases. */
12578 tree
12579 build_fold_indirect_ref (tree t)
12581 tree type = TREE_TYPE (TREE_TYPE (t));
12582 tree sub = fold_indirect_ref_1 (type, t);
12584 if (sub)
12585 return sub;
12586 else
12587 return build1 (INDIRECT_REF, type, t);
12590 /* Given an INDIRECT_REF T, return either T or a simplified version. */
12592 tree
12593 fold_indirect_ref (tree t)
12595 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
12597 if (sub)
12598 return sub;
12599 else
12600 return t;
12603 /* Strip non-trapping, non-side-effecting tree nodes from an expression
12604 whose result is ignored. The type of the returned tree need not be
12605 the same as the original expression. */
12607 tree
12608 fold_ignored_result (tree t)
12610 if (!TREE_SIDE_EFFECTS (t))
12611 return integer_zero_node;
12613 for (;;)
12614 switch (TREE_CODE_CLASS (TREE_CODE (t)))
12616 case tcc_unary:
12617 t = TREE_OPERAND (t, 0);
12618 break;
12620 case tcc_binary:
12621 case tcc_comparison:
12622 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12623 t = TREE_OPERAND (t, 0);
12624 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
12625 t = TREE_OPERAND (t, 1);
12626 else
12627 return t;
12628 break;
12630 case tcc_expression:
12631 switch (TREE_CODE (t))
12633 case COMPOUND_EXPR:
12634 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12635 return t;
12636 t = TREE_OPERAND (t, 0);
12637 break;
12639 case COND_EXPR:
12640 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
12641 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
12642 return t;
12643 t = TREE_OPERAND (t, 0);
12644 break;
12646 default:
12647 return t;
12649 break;
12651 default:
12652 return t;
12656 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
12657 This can only be applied to objects of a sizetype. */
12659 tree
12660 round_up (tree value, int divisor)
12662 tree div = NULL_TREE;
12664 gcc_assert (divisor > 0);
12665 if (divisor == 1)
12666 return value;
12668 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12669 have to do anything. Only do this when we are not given a const,
12670 because in that case, this check is more expensive than just
12671 doing it. */
12672 if (TREE_CODE (value) != INTEGER_CST)
12674 div = build_int_cst (TREE_TYPE (value), divisor);
12676 if (multiple_of_p (TREE_TYPE (value), value, div))
12677 return value;
12680 /* If divisor is a power of two, simplify this to bit manipulation. */
12681 if (divisor == (divisor & -divisor))
12683 tree t;
12685 t = build_int_cst (TREE_TYPE (value), divisor - 1);
12686 value = size_binop (PLUS_EXPR, value, t);
12687 t = build_int_cst (TREE_TYPE (value), -divisor);
12688 value = size_binop (BIT_AND_EXPR, value, t);
12690 else
12692 if (!div)
12693 div = build_int_cst (TREE_TYPE (value), divisor);
12694 value = size_binop (CEIL_DIV_EXPR, value, div);
12695 value = size_binop (MULT_EXPR, value, div);
12698 return value;
12701 /* Likewise, but round down. */
12703 tree
12704 round_down (tree value, int divisor)
12706 tree div = NULL_TREE;
12708 gcc_assert (divisor > 0);
12709 if (divisor == 1)
12710 return value;
12712 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12713 have to do anything. Only do this when we are not given a const,
12714 because in that case, this check is more expensive than just
12715 doing it. */
12716 if (TREE_CODE (value) != INTEGER_CST)
12718 div = build_int_cst (TREE_TYPE (value), divisor);
12720 if (multiple_of_p (TREE_TYPE (value), value, div))
12721 return value;
12724 /* If divisor is a power of two, simplify this to bit manipulation. */
12725 if (divisor == (divisor & -divisor))
12727 tree t;
12729 t = build_int_cst (TREE_TYPE (value), -divisor);
12730 value = size_binop (BIT_AND_EXPR, value, t);
12732 else
12734 if (!div)
12735 div = build_int_cst (TREE_TYPE (value), divisor);
12736 value = size_binop (FLOOR_DIV_EXPR, value, div);
12737 value = size_binop (MULT_EXPR, value, div);
12740 return value;
12743 /* Returns the pointer to the base of the object addressed by EXP and
12744 extracts the information about the offset of the access, storing it
12745 to PBITPOS and POFFSET. */
12747 static tree
12748 split_address_to_core_and_offset (tree exp,
12749 HOST_WIDE_INT *pbitpos, tree *poffset)
12751 tree core;
12752 enum machine_mode mode;
12753 int unsignedp, volatilep;
12754 HOST_WIDE_INT bitsize;
12756 if (TREE_CODE (exp) == ADDR_EXPR)
12758 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
12759 poffset, &mode, &unsignedp, &volatilep,
12760 false);
12761 core = build_fold_addr_expr (core);
12763 else
12765 core = exp;
12766 *pbitpos = 0;
12767 *poffset = NULL_TREE;
12770 return core;
12773 /* Returns true if addresses of E1 and E2 differ by a constant, false
12774 otherwise. If they do, E1 - E2 is stored in *DIFF. */
12776 bool
12777 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
12779 tree core1, core2;
12780 HOST_WIDE_INT bitpos1, bitpos2;
12781 tree toffset1, toffset2, tdiff, type;
12783 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
12784 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
12786 if (bitpos1 % BITS_PER_UNIT != 0
12787 || bitpos2 % BITS_PER_UNIT != 0
12788 || !operand_equal_p (core1, core2, 0))
12789 return false;
12791 if (toffset1 && toffset2)
12793 type = TREE_TYPE (toffset1);
12794 if (type != TREE_TYPE (toffset2))
12795 toffset2 = fold_convert (type, toffset2);
12797 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
12798 if (!cst_and_fits_in_hwi (tdiff))
12799 return false;
12801 *diff = int_cst_value (tdiff);
12803 else if (toffset1 || toffset2)
12805 /* If only one of the offsets is non-constant, the difference cannot
12806 be a constant. */
12807 return false;
12809 else
12810 *diff = 0;
12812 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
12813 return true;
12816 /* Simplify the floating point expression EXP when the sign of the
12817 result is not significant. Return NULL_TREE if no simplification
12818 is possible. */
12820 tree
12821 fold_strip_sign_ops (tree exp)
12823 tree arg0, arg1;
12825 switch (TREE_CODE (exp))
12827 case ABS_EXPR:
12828 case NEGATE_EXPR:
12829 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12830 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
12832 case MULT_EXPR:
12833 case RDIV_EXPR:
12834 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
12835 return NULL_TREE;
12836 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12837 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
12838 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
12839 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
12840 arg0 ? arg0 : TREE_OPERAND (exp, 0),
12841 arg1 ? arg1 : TREE_OPERAND (exp, 1));
12842 break;
12844 default:
12845 break;
12847 return NULL_TREE;