gcc/
[official-gcc.git] / gcc / fold-const.c
blob567168276e19762e41ff781985da1873cb3ae6be
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
106 tree *, tree *);
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree range_predecessor (tree);
112 static tree range_successor (tree);
113 static tree make_range (tree, int *, tree *, tree *);
114 static tree build_range_check (tree, tree, int, tree, tree);
115 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
116 tree);
117 static tree fold_range_test (enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree fold_truthop (enum tree_code, tree, tree, tree);
121 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
124 static int multiple_of_p (tree, tree, tree);
125 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static bool fold_real_zero_addition_p (tree, tree, int);
129 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (tree, tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static int native_encode_expr (tree, unsigned char *, int);
138 static tree native_interpret_expr (tree, unsigned char *, int);
141 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
142 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
143 and SUM1. Then this yields nonzero if overflow occurred during the
144 addition.
146 Overflow occurs if A and B have the same sign, but A and SUM differ in
147 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
148 sign. */
149 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
151 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
152 We do that by representing the two-word integer in 4 words, with only
153 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
154 number. The value of the word is LOWPART + HIGHPART * BASE. */
156 #define LOWPART(x) \
157 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
158 #define HIGHPART(x) \
159 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
160 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
162 /* Unpack a two-word integer into 4 words.
163 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
164 WORDS points to the array of HOST_WIDE_INTs. */
166 static void
167 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
169 words[0] = LOWPART (low);
170 words[1] = HIGHPART (low);
171 words[2] = LOWPART (hi);
172 words[3] = HIGHPART (hi);
175 /* Pack an array of 4 words into a two-word integer.
176 WORDS points to the array of words.
177 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
179 static void
180 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
181 HOST_WIDE_INT *hi)
183 *low = words[0] + words[1] * BASE;
184 *hi = words[2] + words[3] * BASE;
187 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
188 in overflow of the value, when >0 we are only interested in signed
189 overflow, for <0 we are interested in any overflow. OVERFLOWED
190 indicates whether overflow has already occurred. CONST_OVERFLOWED
191 indicates whether constant overflow has already occurred. We force
192 T's value to be within range of T's type (by setting to 0 or 1 all
193 the bits outside the type's range). We set TREE_OVERFLOWED if,
194 OVERFLOWED is nonzero,
195 or OVERFLOWABLE is >0 and signed overflow occurs
196 or OVERFLOWABLE is <0 and any overflow occurs
197 We set TREE_CONSTANT_OVERFLOWED if,
198 CONST_OVERFLOWED is nonzero
199 or we set TREE_OVERFLOWED.
200 We return either the original T, or a copy. */
202 tree
203 force_fit_type (tree t, int overflowable,
204 bool overflowed, bool overflowed_const)
206 unsigned HOST_WIDE_INT low;
207 HOST_WIDE_INT high;
208 unsigned int prec;
209 int sign_extended_type;
211 gcc_assert (TREE_CODE (t) == INTEGER_CST);
213 low = TREE_INT_CST_LOW (t);
214 high = TREE_INT_CST_HIGH (t);
216 if (POINTER_TYPE_P (TREE_TYPE (t))
217 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
218 prec = POINTER_SIZE;
219 else
220 prec = TYPE_PRECISION (TREE_TYPE (t));
221 /* Size types *are* sign extended. */
222 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
223 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
224 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
226 /* First clear all bits that are beyond the type's precision. */
228 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
230 else if (prec > HOST_BITS_PER_WIDE_INT)
231 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
232 else
234 high = 0;
235 if (prec < HOST_BITS_PER_WIDE_INT)
236 low &= ~((HOST_WIDE_INT) (-1) << prec);
239 if (!sign_extended_type)
240 /* No sign extension */;
241 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
242 /* Correct width already. */;
243 else if (prec > HOST_BITS_PER_WIDE_INT)
245 /* Sign extend top half? */
246 if (high & ((unsigned HOST_WIDE_INT)1
247 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
248 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
250 else if (prec == HOST_BITS_PER_WIDE_INT)
252 if ((HOST_WIDE_INT)low < 0)
253 high = -1;
255 else
257 /* Sign extend bottom half? */
258 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
260 high = -1;
261 low |= (HOST_WIDE_INT)(-1) << prec;
265 /* If the value changed, return a new node. */
266 if (overflowed || overflowed_const
267 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
269 t = build_int_cst_wide (TREE_TYPE (t), low, high);
271 if (overflowed
272 || overflowable < 0
273 || (overflowable > 0 && sign_extended_type))
275 t = copy_node (t);
276 TREE_OVERFLOW (t) = 1;
277 TREE_CONSTANT_OVERFLOW (t) = 1;
279 else if (overflowed_const)
281 t = copy_node (t);
282 TREE_CONSTANT_OVERFLOW (t) = 1;
286 return t;
289 /* Add two doubleword integers with doubleword result.
290 Each argument is given as two `HOST_WIDE_INT' pieces.
291 One argument is L1 and H1; the other, L2 and H2.
292 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
295 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
296 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
297 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
299 unsigned HOST_WIDE_INT l;
300 HOST_WIDE_INT h;
302 l = l1 + l2;
303 h = h1 + h2 + (l < l1);
305 *lv = l;
306 *hv = h;
307 return OVERFLOW_SUM_SIGN (h1, h2, h);
310 /* Negate a doubleword integer with doubleword result.
311 Return nonzero if the operation overflows, assuming it's signed.
312 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
313 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
316 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
317 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
319 if (l1 == 0)
321 *lv = 0;
322 *hv = - h1;
323 return (*hv & h1) < 0;
325 else
327 *lv = -l1;
328 *hv = ~h1;
329 return 0;
333 /* Multiply two doubleword integers with doubleword result.
334 Return nonzero if the operation overflows, assuming it's signed.
335 Each argument is given as two `HOST_WIDE_INT' pieces.
336 One argument is L1 and H1; the other, L2 and H2.
337 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
340 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
341 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
342 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
344 HOST_WIDE_INT arg1[4];
345 HOST_WIDE_INT arg2[4];
346 HOST_WIDE_INT prod[4 * 2];
347 unsigned HOST_WIDE_INT carry;
348 int i, j, k;
349 unsigned HOST_WIDE_INT toplow, neglow;
350 HOST_WIDE_INT tophigh, neghigh;
352 encode (arg1, l1, h1);
353 encode (arg2, l2, h2);
355 memset (prod, 0, sizeof prod);
357 for (i = 0; i < 4; i++)
359 carry = 0;
360 for (j = 0; j < 4; j++)
362 k = i + j;
363 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
364 carry += arg1[i] * arg2[j];
365 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
366 carry += prod[k];
367 prod[k] = LOWPART (carry);
368 carry = HIGHPART (carry);
370 prod[i + 4] = carry;
373 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
375 /* Check for overflow by calculating the top half of the answer in full;
376 it should agree with the low half's sign bit. */
377 decode (prod + 4, &toplow, &tophigh);
378 if (h1 < 0)
380 neg_double (l2, h2, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 if (h2 < 0)
385 neg_double (l1, h1, &neglow, &neghigh);
386 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
388 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
391 /* Shift the doubleword integer in L1, H1 left by COUNT places
392 keeping only PREC bits of result.
393 Shift right if COUNT is negative.
394 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
395 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
397 void
398 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
399 HOST_WIDE_INT count, unsigned int prec,
400 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
402 unsigned HOST_WIDE_INT signmask;
404 if (count < 0)
406 rshift_double (l1, h1, -count, prec, lv, hv, arith);
407 return;
410 if (SHIFT_COUNT_TRUNCATED)
411 count %= prec;
413 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
415 /* Shifting by the host word size is undefined according to the
416 ANSI standard, so we must handle this as a special case. */
417 *hv = 0;
418 *lv = 0;
420 else if (count >= HOST_BITS_PER_WIDE_INT)
422 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
423 *lv = 0;
425 else
427 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
428 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
429 *lv = l1 << count;
432 /* Sign extend all bits that are beyond the precision. */
434 signmask = -((prec > HOST_BITS_PER_WIDE_INT
435 ? ((unsigned HOST_WIDE_INT) *hv
436 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
437 : (*lv >> (prec - 1))) & 1);
439 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
441 else if (prec >= HOST_BITS_PER_WIDE_INT)
443 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
444 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
446 else
448 *hv = signmask;
449 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
450 *lv |= signmask << prec;
454 /* Shift the doubleword integer in L1, H1 right by COUNT places
455 keeping only PREC bits of result. COUNT must be positive.
456 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
457 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
459 void
460 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
461 HOST_WIDE_INT count, unsigned int prec,
462 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
463 int arith)
465 unsigned HOST_WIDE_INT signmask;
467 signmask = (arith
468 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
469 : 0);
471 if (SHIFT_COUNT_TRUNCATED)
472 count %= prec;
474 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
476 /* Shifting by the host word size is undefined according to the
477 ANSI standard, so we must handle this as a special case. */
478 *hv = 0;
479 *lv = 0;
481 else if (count >= HOST_BITS_PER_WIDE_INT)
483 *hv = 0;
484 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
486 else
488 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
489 *lv = ((l1 >> count)
490 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
493 /* Zero / sign extend all bits that are beyond the precision. */
495 if (count >= (HOST_WIDE_INT)prec)
497 *hv = signmask;
498 *lv = signmask;
500 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
502 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
504 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
505 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
507 else
509 *hv = signmask;
510 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
511 *lv |= signmask << (prec - count);
515 /* Rotate the doubleword integer in L1, H1 left by COUNT places
516 keeping only PREC bits of result.
517 Rotate right if COUNT is negative.
518 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
520 void
521 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
522 HOST_WIDE_INT count, unsigned int prec,
523 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
525 unsigned HOST_WIDE_INT s1l, s2l;
526 HOST_WIDE_INT s1h, s2h;
528 count %= prec;
529 if (count < 0)
530 count += prec;
532 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
533 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
534 *lv = s1l | s2l;
535 *hv = s1h | s2h;
538 /* Rotate the doubleword integer in L1, H1 left by COUNT places
539 keeping only PREC bits of result. COUNT must be positive.
540 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
542 void
543 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
544 HOST_WIDE_INT count, unsigned int prec,
545 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
547 unsigned HOST_WIDE_INT s1l, s2l;
548 HOST_WIDE_INT s1h, s2h;
550 count %= prec;
551 if (count < 0)
552 count += prec;
554 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
555 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
556 *lv = s1l | s2l;
557 *hv = s1h | s2h;
560 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
561 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
562 CODE is a tree code for a kind of division, one of
563 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
564 or EXACT_DIV_EXPR
565 It controls how the quotient is rounded to an integer.
566 Return nonzero if the operation overflows.
567 UNS nonzero says do unsigned division. */
570 div_and_round_double (enum tree_code code, int uns,
571 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
572 HOST_WIDE_INT hnum_orig,
573 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
574 HOST_WIDE_INT hden_orig,
575 unsigned HOST_WIDE_INT *lquo,
576 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
577 HOST_WIDE_INT *hrem)
579 int quo_neg = 0;
580 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
581 HOST_WIDE_INT den[4], quo[4];
582 int i, j;
583 unsigned HOST_WIDE_INT work;
584 unsigned HOST_WIDE_INT carry = 0;
585 unsigned HOST_WIDE_INT lnum = lnum_orig;
586 HOST_WIDE_INT hnum = hnum_orig;
587 unsigned HOST_WIDE_INT lden = lden_orig;
588 HOST_WIDE_INT hden = hden_orig;
589 int overflow = 0;
591 if (hden == 0 && lden == 0)
592 overflow = 1, lden = 1;
594 /* Calculate quotient sign and convert operands to unsigned. */
595 if (!uns)
597 if (hnum < 0)
599 quo_neg = ~ quo_neg;
600 /* (minimum integer) / (-1) is the only overflow case. */
601 if (neg_double (lnum, hnum, &lnum, &hnum)
602 && ((HOST_WIDE_INT) lden & hden) == -1)
603 overflow = 1;
605 if (hden < 0)
607 quo_neg = ~ quo_neg;
608 neg_double (lden, hden, &lden, &hden);
612 if (hnum == 0 && hden == 0)
613 { /* single precision */
614 *hquo = *hrem = 0;
615 /* This unsigned division rounds toward zero. */
616 *lquo = lnum / lden;
617 goto finish_up;
620 if (hnum == 0)
621 { /* trivial case: dividend < divisor */
622 /* hden != 0 already checked. */
623 *hquo = *lquo = 0;
624 *hrem = hnum;
625 *lrem = lnum;
626 goto finish_up;
629 memset (quo, 0, sizeof quo);
631 memset (num, 0, sizeof num); /* to zero 9th element */
632 memset (den, 0, sizeof den);
634 encode (num, lnum, hnum);
635 encode (den, lden, hden);
637 /* Special code for when the divisor < BASE. */
638 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
640 /* hnum != 0 already checked. */
641 for (i = 4 - 1; i >= 0; i--)
643 work = num[i] + carry * BASE;
644 quo[i] = work / lden;
645 carry = work % lden;
648 else
650 /* Full double precision division,
651 with thanks to Don Knuth's "Seminumerical Algorithms". */
652 int num_hi_sig, den_hi_sig;
653 unsigned HOST_WIDE_INT quo_est, scale;
655 /* Find the highest nonzero divisor digit. */
656 for (i = 4 - 1;; i--)
657 if (den[i] != 0)
659 den_hi_sig = i;
660 break;
663 /* Insure that the first digit of the divisor is at least BASE/2.
664 This is required by the quotient digit estimation algorithm. */
666 scale = BASE / (den[den_hi_sig] + 1);
667 if (scale > 1)
668 { /* scale divisor and dividend */
669 carry = 0;
670 for (i = 0; i <= 4 - 1; i++)
672 work = (num[i] * scale) + carry;
673 num[i] = LOWPART (work);
674 carry = HIGHPART (work);
677 num[4] = carry;
678 carry = 0;
679 for (i = 0; i <= 4 - 1; i++)
681 work = (den[i] * scale) + carry;
682 den[i] = LOWPART (work);
683 carry = HIGHPART (work);
684 if (den[i] != 0) den_hi_sig = i;
688 num_hi_sig = 4;
690 /* Main loop */
691 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
693 /* Guess the next quotient digit, quo_est, by dividing the first
694 two remaining dividend digits by the high order quotient digit.
695 quo_est is never low and is at most 2 high. */
696 unsigned HOST_WIDE_INT tmp;
698 num_hi_sig = i + den_hi_sig + 1;
699 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
700 if (num[num_hi_sig] != den[den_hi_sig])
701 quo_est = work / den[den_hi_sig];
702 else
703 quo_est = BASE - 1;
705 /* Refine quo_est so it's usually correct, and at most one high. */
706 tmp = work - quo_est * den[den_hi_sig];
707 if (tmp < BASE
708 && (den[den_hi_sig - 1] * quo_est
709 > (tmp * BASE + num[num_hi_sig - 2])))
710 quo_est--;
712 /* Try QUO_EST as the quotient digit, by multiplying the
713 divisor by QUO_EST and subtracting from the remaining dividend.
714 Keep in mind that QUO_EST is the I - 1st digit. */
716 carry = 0;
717 for (j = 0; j <= den_hi_sig; j++)
719 work = quo_est * den[j] + carry;
720 carry = HIGHPART (work);
721 work = num[i + j] - LOWPART (work);
722 num[i + j] = LOWPART (work);
723 carry += HIGHPART (work) != 0;
726 /* If quo_est was high by one, then num[i] went negative and
727 we need to correct things. */
728 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
730 quo_est--;
731 carry = 0; /* add divisor back in */
732 for (j = 0; j <= den_hi_sig; j++)
734 work = num[i + j] + den[j] + carry;
735 carry = HIGHPART (work);
736 num[i + j] = LOWPART (work);
739 num [num_hi_sig] += carry;
742 /* Store the quotient digit. */
743 quo[i] = quo_est;
747 decode (quo, lquo, hquo);
749 finish_up:
750 /* If result is negative, make it so. */
751 if (quo_neg)
752 neg_double (*lquo, *hquo, lquo, hquo);
754 /* Compute trial remainder: rem = num - (quo * den) */
755 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
756 neg_double (*lrem, *hrem, lrem, hrem);
757 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
759 switch (code)
761 case TRUNC_DIV_EXPR:
762 case TRUNC_MOD_EXPR: /* round toward zero */
763 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
764 return overflow;
766 case FLOOR_DIV_EXPR:
767 case FLOOR_MOD_EXPR: /* round toward negative infinity */
768 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
770 /* quo = quo - 1; */
771 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
772 lquo, hquo);
774 else
775 return overflow;
776 break;
778 case CEIL_DIV_EXPR:
779 case CEIL_MOD_EXPR: /* round toward positive infinity */
780 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
782 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
783 lquo, hquo);
785 else
786 return overflow;
787 break;
789 case ROUND_DIV_EXPR:
790 case ROUND_MOD_EXPR: /* round to closest integer */
792 unsigned HOST_WIDE_INT labs_rem = *lrem;
793 HOST_WIDE_INT habs_rem = *hrem;
794 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
795 HOST_WIDE_INT habs_den = hden, htwice;
797 /* Get absolute values. */
798 if (*hrem < 0)
799 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
800 if (hden < 0)
801 neg_double (lden, hden, &labs_den, &habs_den);
803 /* If (2 * abs (lrem) >= abs (lden)) */
804 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
805 labs_rem, habs_rem, &ltwice, &htwice);
807 if (((unsigned HOST_WIDE_INT) habs_den
808 < (unsigned HOST_WIDE_INT) htwice)
809 || (((unsigned HOST_WIDE_INT) habs_den
810 == (unsigned HOST_WIDE_INT) htwice)
811 && (labs_den < ltwice)))
813 if (*hquo < 0)
814 /* quo = quo - 1; */
815 add_double (*lquo, *hquo,
816 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
817 else
818 /* quo = quo + 1; */
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
820 lquo, hquo);
822 else
823 return overflow;
825 break;
827 default:
828 gcc_unreachable ();
831 /* Compute true remainder: rem = num - (quo * den) */
832 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
833 neg_double (*lrem, *hrem, lrem, hrem);
834 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
835 return overflow;
838 /* If ARG2 divides ARG1 with zero remainder, carries out the division
839 of type CODE and returns the quotient.
840 Otherwise returns NULL_TREE. */
842 static tree
843 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
845 unsigned HOST_WIDE_INT int1l, int2l;
846 HOST_WIDE_INT int1h, int2h;
847 unsigned HOST_WIDE_INT quol, reml;
848 HOST_WIDE_INT quoh, remh;
849 tree type = TREE_TYPE (arg1);
850 int uns = TYPE_UNSIGNED (type);
852 int1l = TREE_INT_CST_LOW (arg1);
853 int1h = TREE_INT_CST_HIGH (arg1);
854 int2l = TREE_INT_CST_LOW (arg2);
855 int2h = TREE_INT_CST_HIGH (arg2);
857 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
858 &quol, &quoh, &reml, &remh);
859 if (remh != 0 || reml != 0)
860 return NULL_TREE;
862 return build_int_cst_wide (type, quol, quoh);
865 /* Return true if the built-in mathematical function specified by CODE
866 is odd, i.e. -f(x) == f(-x). */
868 static bool
869 negate_mathfn_p (enum built_in_function code)
871 switch (code)
873 CASE_FLT_FN (BUILT_IN_ASIN):
874 CASE_FLT_FN (BUILT_IN_ASINH):
875 CASE_FLT_FN (BUILT_IN_ATAN):
876 CASE_FLT_FN (BUILT_IN_ATANH):
877 CASE_FLT_FN (BUILT_IN_CBRT):
878 CASE_FLT_FN (BUILT_IN_SIN):
879 CASE_FLT_FN (BUILT_IN_SINH):
880 CASE_FLT_FN (BUILT_IN_TAN):
881 CASE_FLT_FN (BUILT_IN_TANH):
882 return true;
884 default:
885 break;
887 return false;
890 /* Check whether we may negate an integer constant T without causing
891 overflow. */
893 bool
894 may_negate_without_overflow_p (tree t)
896 unsigned HOST_WIDE_INT val;
897 unsigned int prec;
898 tree type;
900 gcc_assert (TREE_CODE (t) == INTEGER_CST);
902 type = TREE_TYPE (t);
903 if (TYPE_UNSIGNED (type))
904 return false;
906 prec = TYPE_PRECISION (type);
907 if (prec > HOST_BITS_PER_WIDE_INT)
909 if (TREE_INT_CST_LOW (t) != 0)
910 return true;
911 prec -= HOST_BITS_PER_WIDE_INT;
912 val = TREE_INT_CST_HIGH (t);
914 else
915 val = TREE_INT_CST_LOW (t);
916 if (prec < HOST_BITS_PER_WIDE_INT)
917 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
918 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
921 /* Determine whether an expression T can be cheaply negated using
922 the function negate_expr. */
924 static bool
925 negate_expr_p (tree t)
927 tree type;
929 if (t == 0)
930 return false;
932 type = TREE_TYPE (t);
934 STRIP_SIGN_NOPS (t);
935 switch (TREE_CODE (t))
937 case INTEGER_CST:
938 if (TYPE_UNSIGNED (type) || ! flag_trapv)
939 return true;
941 /* Check that -CST will not overflow type. */
942 return may_negate_without_overflow_p (t);
943 case BIT_NOT_EXPR:
944 return INTEGRAL_TYPE_P (type);
946 case REAL_CST:
947 case NEGATE_EXPR:
948 return true;
950 case COMPLEX_CST:
951 return negate_expr_p (TREE_REALPART (t))
952 && negate_expr_p (TREE_IMAGPART (t));
954 case PLUS_EXPR:
955 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
956 return false;
957 /* -(A + B) -> (-B) - A. */
958 if (negate_expr_p (TREE_OPERAND (t, 1))
959 && reorder_operands_p (TREE_OPERAND (t, 0),
960 TREE_OPERAND (t, 1)))
961 return true;
962 /* -(A + B) -> (-A) - B. */
963 return negate_expr_p (TREE_OPERAND (t, 0));
965 case MINUS_EXPR:
966 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
967 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
968 && reorder_operands_p (TREE_OPERAND (t, 0),
969 TREE_OPERAND (t, 1));
971 case MULT_EXPR:
972 if (TYPE_UNSIGNED (TREE_TYPE (t)))
973 break;
975 /* Fall through. */
977 case RDIV_EXPR:
978 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
979 return negate_expr_p (TREE_OPERAND (t, 1))
980 || negate_expr_p (TREE_OPERAND (t, 0));
981 break;
983 case TRUNC_DIV_EXPR:
984 case ROUND_DIV_EXPR:
985 case FLOOR_DIV_EXPR:
986 case CEIL_DIV_EXPR:
987 case EXACT_DIV_EXPR:
988 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
989 break;
990 return negate_expr_p (TREE_OPERAND (t, 1))
991 || negate_expr_p (TREE_OPERAND (t, 0));
993 case NOP_EXPR:
994 /* Negate -((double)float) as (double)(-float). */
995 if (TREE_CODE (type) == REAL_TYPE)
997 tree tem = strip_float_extensions (t);
998 if (tem != t)
999 return negate_expr_p (tem);
1001 break;
1003 case CALL_EXPR:
1004 /* Negate -f(x) as f(-x). */
1005 if (negate_mathfn_p (builtin_mathfn_code (t)))
1006 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1007 break;
1009 case RSHIFT_EXPR:
1010 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1011 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1013 tree op1 = TREE_OPERAND (t, 1);
1014 if (TREE_INT_CST_HIGH (op1) == 0
1015 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1016 == TREE_INT_CST_LOW (op1))
1017 return true;
1019 break;
1021 default:
1022 break;
1024 return false;
1027 /* Given T, an expression, return the negation of T. Allow for T to be
1028 null, in which case return null. */
1030 static tree
1031 negate_expr (tree t)
1033 tree type;
1034 tree tem;
1036 if (t == 0)
1037 return 0;
1039 type = TREE_TYPE (t);
1040 STRIP_SIGN_NOPS (t);
1042 switch (TREE_CODE (t))
1044 /* Convert - (~A) to A + 1. */
1045 case BIT_NOT_EXPR:
1046 if (INTEGRAL_TYPE_P (type))
1047 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1048 build_int_cst (type, 1));
1049 break;
1051 case INTEGER_CST:
1052 tem = fold_negate_const (t, type);
1053 if (! TREE_OVERFLOW (tem)
1054 || TYPE_UNSIGNED (type)
1055 || ! flag_trapv)
1056 return tem;
1057 break;
1059 case REAL_CST:
1060 tem = fold_negate_const (t, type);
1061 /* Two's complement FP formats, such as c4x, may overflow. */
1062 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1063 return fold_convert (type, tem);
1064 break;
1066 case COMPLEX_CST:
1068 tree rpart = negate_expr (TREE_REALPART (t));
1069 tree ipart = negate_expr (TREE_IMAGPART (t));
1071 if ((TREE_CODE (rpart) == REAL_CST
1072 && TREE_CODE (ipart) == REAL_CST)
1073 || (TREE_CODE (rpart) == INTEGER_CST
1074 && TREE_CODE (ipart) == INTEGER_CST))
1075 return build_complex (type, rpart, ipart);
1077 break;
1079 case NEGATE_EXPR:
1080 return fold_convert (type, TREE_OPERAND (t, 0));
1082 case PLUS_EXPR:
1083 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1085 /* -(A + B) -> (-B) - A. */
1086 if (negate_expr_p (TREE_OPERAND (t, 1))
1087 && reorder_operands_p (TREE_OPERAND (t, 0),
1088 TREE_OPERAND (t, 1)))
1090 tem = negate_expr (TREE_OPERAND (t, 1));
1091 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1092 tem, TREE_OPERAND (t, 0));
1093 return fold_convert (type, tem);
1096 /* -(A + B) -> (-A) - B. */
1097 if (negate_expr_p (TREE_OPERAND (t, 0)))
1099 tem = negate_expr (TREE_OPERAND (t, 0));
1100 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1101 tem, TREE_OPERAND (t, 1));
1102 return fold_convert (type, tem);
1105 break;
1107 case MINUS_EXPR:
1108 /* - (A - B) -> B - A */
1109 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1110 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1111 return fold_convert (type,
1112 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1113 TREE_OPERAND (t, 1),
1114 TREE_OPERAND (t, 0)));
1115 break;
1117 case MULT_EXPR:
1118 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1119 break;
1121 /* Fall through. */
1123 case RDIV_EXPR:
1124 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1126 tem = TREE_OPERAND (t, 1);
1127 if (negate_expr_p (tem))
1128 return fold_convert (type,
1129 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1130 TREE_OPERAND (t, 0),
1131 negate_expr (tem)));
1132 tem = TREE_OPERAND (t, 0);
1133 if (negate_expr_p (tem))
1134 return fold_convert (type,
1135 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1136 negate_expr (tem),
1137 TREE_OPERAND (t, 1)));
1139 break;
1141 case TRUNC_DIV_EXPR:
1142 case ROUND_DIV_EXPR:
1143 case FLOOR_DIV_EXPR:
1144 case CEIL_DIV_EXPR:
1145 case EXACT_DIV_EXPR:
1146 if (!TYPE_UNSIGNED (TREE_TYPE (t)) && !flag_wrapv)
1148 tem = TREE_OPERAND (t, 1);
1149 if (negate_expr_p (tem))
1150 return fold_convert (type,
1151 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1152 TREE_OPERAND (t, 0),
1153 negate_expr (tem)));
1154 tem = TREE_OPERAND (t, 0);
1155 if (negate_expr_p (tem))
1156 return fold_convert (type,
1157 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1158 negate_expr (tem),
1159 TREE_OPERAND (t, 1)));
1161 break;
1163 case NOP_EXPR:
1164 /* Convert -((double)float) into (double)(-float). */
1165 if (TREE_CODE (type) == REAL_TYPE)
1167 tem = strip_float_extensions (t);
1168 if (tem != t && negate_expr_p (tem))
1169 return fold_convert (type, negate_expr (tem));
1171 break;
1173 case CALL_EXPR:
1174 /* Negate -f(x) as f(-x). */
1175 if (negate_mathfn_p (builtin_mathfn_code (t))
1176 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1178 tree fndecl, arg, arglist;
1180 fndecl = get_callee_fndecl (t);
1181 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1182 arglist = build_tree_list (NULL_TREE, arg);
1183 return build_function_call_expr (fndecl, arglist);
1185 break;
1187 case RSHIFT_EXPR:
1188 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1189 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1191 tree op1 = TREE_OPERAND (t, 1);
1192 if (TREE_INT_CST_HIGH (op1) == 0
1193 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1194 == TREE_INT_CST_LOW (op1))
1196 tree ntype = TYPE_UNSIGNED (type)
1197 ? lang_hooks.types.signed_type (type)
1198 : lang_hooks.types.unsigned_type (type);
1199 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1200 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1201 return fold_convert (type, temp);
1204 break;
1206 default:
1207 break;
1210 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1211 return fold_convert (type, tem);
1214 /* Split a tree IN into a constant, literal and variable parts that could be
1215 combined with CODE to make IN. "constant" means an expression with
1216 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1217 commutative arithmetic operation. Store the constant part into *CONP,
1218 the literal in *LITP and return the variable part. If a part isn't
1219 present, set it to null. If the tree does not decompose in this way,
1220 return the entire tree as the variable part and the other parts as null.
1222 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1223 case, we negate an operand that was subtracted. Except if it is a
1224 literal for which we use *MINUS_LITP instead.
1226 If NEGATE_P is true, we are negating all of IN, again except a literal
1227 for which we use *MINUS_LITP instead.
1229 If IN is itself a literal or constant, return it as appropriate.
1231 Note that we do not guarantee that any of the three values will be the
1232 same type as IN, but they will have the same signedness and mode. */
1234 static tree
1235 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1236 tree *minus_litp, int negate_p)
1238 tree var = 0;
1240 *conp = 0;
1241 *litp = 0;
1242 *minus_litp = 0;
1244 /* Strip any conversions that don't change the machine mode or signedness. */
1245 STRIP_SIGN_NOPS (in);
1247 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1248 *litp = in;
1249 else if (TREE_CODE (in) == code
1250 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1251 /* We can associate addition and subtraction together (even
1252 though the C standard doesn't say so) for integers because
1253 the value is not affected. For reals, the value might be
1254 affected, so we can't. */
1255 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1256 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1258 tree op0 = TREE_OPERAND (in, 0);
1259 tree op1 = TREE_OPERAND (in, 1);
1260 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1261 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1263 /* First see if either of the operands is a literal, then a constant. */
1264 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1265 *litp = op0, op0 = 0;
1266 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1267 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1269 if (op0 != 0 && TREE_CONSTANT (op0))
1270 *conp = op0, op0 = 0;
1271 else if (op1 != 0 && TREE_CONSTANT (op1))
1272 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1274 /* If we haven't dealt with either operand, this is not a case we can
1275 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1276 if (op0 != 0 && op1 != 0)
1277 var = in;
1278 else if (op0 != 0)
1279 var = op0;
1280 else
1281 var = op1, neg_var_p = neg1_p;
1283 /* Now do any needed negations. */
1284 if (neg_litp_p)
1285 *minus_litp = *litp, *litp = 0;
1286 if (neg_conp_p)
1287 *conp = negate_expr (*conp);
1288 if (neg_var_p)
1289 var = negate_expr (var);
1291 else if (TREE_CONSTANT (in))
1292 *conp = in;
1293 else
1294 var = in;
1296 if (negate_p)
1298 if (*litp)
1299 *minus_litp = *litp, *litp = 0;
1300 else if (*minus_litp)
1301 *litp = *minus_litp, *minus_litp = 0;
1302 *conp = negate_expr (*conp);
1303 var = negate_expr (var);
1306 return var;
1309 /* Re-associate trees split by the above function. T1 and T2 are either
1310 expressions to associate or null. Return the new expression, if any. If
1311 we build an operation, do it in TYPE and with CODE. */
1313 static tree
1314 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1316 if (t1 == 0)
1317 return t2;
1318 else if (t2 == 0)
1319 return t1;
1321 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1322 try to fold this since we will have infinite recursion. But do
1323 deal with any NEGATE_EXPRs. */
1324 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1325 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1327 if (code == PLUS_EXPR)
1329 if (TREE_CODE (t1) == NEGATE_EXPR)
1330 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1331 fold_convert (type, TREE_OPERAND (t1, 0)));
1332 else if (TREE_CODE (t2) == NEGATE_EXPR)
1333 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1334 fold_convert (type, TREE_OPERAND (t2, 0)));
1335 else if (integer_zerop (t2))
1336 return fold_convert (type, t1);
1338 else if (code == MINUS_EXPR)
1340 if (integer_zerop (t2))
1341 return fold_convert (type, t1);
1344 return build2 (code, type, fold_convert (type, t1),
1345 fold_convert (type, t2));
1348 return fold_build2 (code, type, fold_convert (type, t1),
1349 fold_convert (type, t2));
1352 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1353 to produce a new constant. Return NULL_TREE if we don't know how
1354 to evaluate CODE at compile-time.
1356 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1358 tree
1359 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1361 unsigned HOST_WIDE_INT int1l, int2l;
1362 HOST_WIDE_INT int1h, int2h;
1363 unsigned HOST_WIDE_INT low;
1364 HOST_WIDE_INT hi;
1365 unsigned HOST_WIDE_INT garbagel;
1366 HOST_WIDE_INT garbageh;
1367 tree t;
1368 tree type = TREE_TYPE (arg1);
1369 int uns = TYPE_UNSIGNED (type);
1370 int is_sizetype
1371 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1372 int overflow = 0;
1374 int1l = TREE_INT_CST_LOW (arg1);
1375 int1h = TREE_INT_CST_HIGH (arg1);
1376 int2l = TREE_INT_CST_LOW (arg2);
1377 int2h = TREE_INT_CST_HIGH (arg2);
1379 switch (code)
1381 case BIT_IOR_EXPR:
1382 low = int1l | int2l, hi = int1h | int2h;
1383 break;
1385 case BIT_XOR_EXPR:
1386 low = int1l ^ int2l, hi = int1h ^ int2h;
1387 break;
1389 case BIT_AND_EXPR:
1390 low = int1l & int2l, hi = int1h & int2h;
1391 break;
1393 case RSHIFT_EXPR:
1394 int2l = -int2l;
1395 case LSHIFT_EXPR:
1396 /* It's unclear from the C standard whether shifts can overflow.
1397 The following code ignores overflow; perhaps a C standard
1398 interpretation ruling is needed. */
1399 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1400 &low, &hi, !uns);
1401 break;
1403 case RROTATE_EXPR:
1404 int2l = - int2l;
1405 case LROTATE_EXPR:
1406 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1407 &low, &hi);
1408 break;
1410 case PLUS_EXPR:
1411 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1412 break;
1414 case MINUS_EXPR:
1415 neg_double (int2l, int2h, &low, &hi);
1416 add_double (int1l, int1h, low, hi, &low, &hi);
1417 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1418 break;
1420 case MULT_EXPR:
1421 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1422 break;
1424 case TRUNC_DIV_EXPR:
1425 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1426 case EXACT_DIV_EXPR:
1427 /* This is a shortcut for a common special case. */
1428 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1429 && ! TREE_CONSTANT_OVERFLOW (arg1)
1430 && ! TREE_CONSTANT_OVERFLOW (arg2)
1431 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1433 if (code == CEIL_DIV_EXPR)
1434 int1l += int2l - 1;
1436 low = int1l / int2l, hi = 0;
1437 break;
1440 /* ... fall through ... */
1442 case ROUND_DIV_EXPR:
1443 if (int2h == 0 && int2l == 0)
1444 return NULL_TREE;
1445 if (int2h == 0 && int2l == 1)
1447 low = int1l, hi = int1h;
1448 break;
1450 if (int1l == int2l && int1h == int2h
1451 && ! (int1l == 0 && int1h == 0))
1453 low = 1, hi = 0;
1454 break;
1456 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1457 &low, &hi, &garbagel, &garbageh);
1458 break;
1460 case TRUNC_MOD_EXPR:
1461 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1462 /* This is a shortcut for a common special case. */
1463 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1464 && ! TREE_CONSTANT_OVERFLOW (arg1)
1465 && ! TREE_CONSTANT_OVERFLOW (arg2)
1466 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1468 if (code == CEIL_MOD_EXPR)
1469 int1l += int2l - 1;
1470 low = int1l % int2l, hi = 0;
1471 break;
1474 /* ... fall through ... */
1476 case ROUND_MOD_EXPR:
1477 if (int2h == 0 && int2l == 0)
1478 return NULL_TREE;
1479 overflow = div_and_round_double (code, uns,
1480 int1l, int1h, int2l, int2h,
1481 &garbagel, &garbageh, &low, &hi);
1482 break;
1484 case MIN_EXPR:
1485 case MAX_EXPR:
1486 if (uns)
1487 low = (((unsigned HOST_WIDE_INT) int1h
1488 < (unsigned HOST_WIDE_INT) int2h)
1489 || (((unsigned HOST_WIDE_INT) int1h
1490 == (unsigned HOST_WIDE_INT) int2h)
1491 && int1l < int2l));
1492 else
1493 low = (int1h < int2h
1494 || (int1h == int2h && int1l < int2l));
1496 if (low == (code == MIN_EXPR))
1497 low = int1l, hi = int1h;
1498 else
1499 low = int2l, hi = int2h;
1500 break;
1502 default:
1503 return NULL_TREE;
1506 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1508 if (notrunc)
1510 /* Propagate overflow flags ourselves. */
1511 if (((!uns || is_sizetype) && overflow)
1512 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1514 t = copy_node (t);
1515 TREE_OVERFLOW (t) = 1;
1516 TREE_CONSTANT_OVERFLOW (t) = 1;
1518 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1520 t = copy_node (t);
1521 TREE_CONSTANT_OVERFLOW (t) = 1;
1524 else
1525 t = force_fit_type (t, 1,
1526 ((!uns || is_sizetype) && overflow)
1527 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1528 TREE_CONSTANT_OVERFLOW (arg1)
1529 | TREE_CONSTANT_OVERFLOW (arg2));
1531 return t;
1534 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1535 constant. We assume ARG1 and ARG2 have the same data type, or at least
1536 are the same kind of constant and the same machine mode.
1538 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1540 static tree
1541 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1543 STRIP_NOPS (arg1);
1544 STRIP_NOPS (arg2);
1546 if (TREE_CODE (arg1) == INTEGER_CST)
1547 return int_const_binop (code, arg1, arg2, notrunc);
1549 if (TREE_CODE (arg1) == REAL_CST)
1551 enum machine_mode mode;
1552 REAL_VALUE_TYPE d1;
1553 REAL_VALUE_TYPE d2;
1554 REAL_VALUE_TYPE value;
1555 REAL_VALUE_TYPE result;
1556 bool inexact;
1557 tree t, type;
1559 /* The following codes are handled by real_arithmetic. */
1560 switch (code)
1562 case PLUS_EXPR:
1563 case MINUS_EXPR:
1564 case MULT_EXPR:
1565 case RDIV_EXPR:
1566 case MIN_EXPR:
1567 case MAX_EXPR:
1568 break;
1570 default:
1571 return NULL_TREE;
1574 d1 = TREE_REAL_CST (arg1);
1575 d2 = TREE_REAL_CST (arg2);
1577 type = TREE_TYPE (arg1);
1578 mode = TYPE_MODE (type);
1580 /* Don't perform operation if we honor signaling NaNs and
1581 either operand is a NaN. */
1582 if (HONOR_SNANS (mode)
1583 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1584 return NULL_TREE;
1586 /* Don't perform operation if it would raise a division
1587 by zero exception. */
1588 if (code == RDIV_EXPR
1589 && REAL_VALUES_EQUAL (d2, dconst0)
1590 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1591 return NULL_TREE;
1593 /* If either operand is a NaN, just return it. Otherwise, set up
1594 for floating-point trap; we return an overflow. */
1595 if (REAL_VALUE_ISNAN (d1))
1596 return arg1;
1597 else if (REAL_VALUE_ISNAN (d2))
1598 return arg2;
1600 inexact = real_arithmetic (&value, code, &d1, &d2);
1601 real_convert (&result, mode, &value);
1603 /* Don't constant fold this floating point operation if
1604 the result has overflowed and flag_trapping_math. */
1606 if (flag_trapping_math
1607 && MODE_HAS_INFINITIES (mode)
1608 && REAL_VALUE_ISINF (result)
1609 && !REAL_VALUE_ISINF (d1)
1610 && !REAL_VALUE_ISINF (d2))
1611 return NULL_TREE;
1613 /* Don't constant fold this floating point operation if the
1614 result may dependent upon the run-time rounding mode and
1615 flag_rounding_math is set, or if GCC's software emulation
1616 is unable to accurately represent the result. */
1618 if ((flag_rounding_math
1619 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1620 && !flag_unsafe_math_optimizations))
1621 && (inexact || !real_identical (&result, &value)))
1622 return NULL_TREE;
1624 t = build_real (type, result);
1626 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1627 TREE_CONSTANT_OVERFLOW (t)
1628 = TREE_OVERFLOW (t)
1629 | TREE_CONSTANT_OVERFLOW (arg1)
1630 | TREE_CONSTANT_OVERFLOW (arg2);
1631 return t;
1634 if (TREE_CODE (arg1) == COMPLEX_CST)
1636 tree type = TREE_TYPE (arg1);
1637 tree r1 = TREE_REALPART (arg1);
1638 tree i1 = TREE_IMAGPART (arg1);
1639 tree r2 = TREE_REALPART (arg2);
1640 tree i2 = TREE_IMAGPART (arg2);
1641 tree t;
1643 switch (code)
1645 case PLUS_EXPR:
1646 t = build_complex (type,
1647 const_binop (PLUS_EXPR, r1, r2, notrunc),
1648 const_binop (PLUS_EXPR, i1, i2, notrunc));
1649 break;
1651 case MINUS_EXPR:
1652 t = build_complex (type,
1653 const_binop (MINUS_EXPR, r1, r2, notrunc),
1654 const_binop (MINUS_EXPR, i1, i2, notrunc));
1655 break;
1657 case MULT_EXPR:
1658 t = build_complex (type,
1659 const_binop (MINUS_EXPR,
1660 const_binop (MULT_EXPR,
1661 r1, r2, notrunc),
1662 const_binop (MULT_EXPR,
1663 i1, i2, notrunc),
1664 notrunc),
1665 const_binop (PLUS_EXPR,
1666 const_binop (MULT_EXPR,
1667 r1, i2, notrunc),
1668 const_binop (MULT_EXPR,
1669 i1, r2, notrunc),
1670 notrunc));
1671 break;
1673 case RDIV_EXPR:
1675 tree t1, t2, real, imag;
1676 tree magsquared
1677 = const_binop (PLUS_EXPR,
1678 const_binop (MULT_EXPR, r2, r2, notrunc),
1679 const_binop (MULT_EXPR, i2, i2, notrunc),
1680 notrunc);
1682 t1 = const_binop (PLUS_EXPR,
1683 const_binop (MULT_EXPR, r1, r2, notrunc),
1684 const_binop (MULT_EXPR, i1, i2, notrunc),
1685 notrunc);
1686 t2 = const_binop (MINUS_EXPR,
1687 const_binop (MULT_EXPR, i1, r2, notrunc),
1688 const_binop (MULT_EXPR, r1, i2, notrunc),
1689 notrunc);
1691 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1693 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1694 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1696 else
1698 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1699 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1700 if (!real || !imag)
1701 return NULL_TREE;
1704 t = build_complex (type, real, imag);
1706 break;
1708 default:
1709 return NULL_TREE;
1711 return t;
1713 return NULL_TREE;
1716 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1717 indicates which particular sizetype to create. */
1719 tree
1720 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1722 return build_int_cst (sizetype_tab[(int) kind], number);
1725 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1726 is a tree code. The type of the result is taken from the operands.
1727 Both must be the same type integer type and it must be a size type.
1728 If the operands are constant, so is the result. */
1730 tree
1731 size_binop (enum tree_code code, tree arg0, tree arg1)
1733 tree type = TREE_TYPE (arg0);
1735 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1736 && type == TREE_TYPE (arg1));
1738 /* Handle the special case of two integer constants faster. */
1739 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1741 /* And some specific cases even faster than that. */
1742 if (code == PLUS_EXPR && integer_zerop (arg0))
1743 return arg1;
1744 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1745 && integer_zerop (arg1))
1746 return arg0;
1747 else if (code == MULT_EXPR && integer_onep (arg0))
1748 return arg1;
1750 /* Handle general case of two integer constants. */
1751 return int_const_binop (code, arg0, arg1, 0);
1754 if (arg0 == error_mark_node || arg1 == error_mark_node)
1755 return error_mark_node;
1757 return fold_build2 (code, type, arg0, arg1);
1760 /* Given two values, either both of sizetype or both of bitsizetype,
1761 compute the difference between the two values. Return the value
1762 in signed type corresponding to the type of the operands. */
1764 tree
1765 size_diffop (tree arg0, tree arg1)
1767 tree type = TREE_TYPE (arg0);
1768 tree ctype;
1770 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1771 && type == TREE_TYPE (arg1));
1773 /* If the type is already signed, just do the simple thing. */
1774 if (!TYPE_UNSIGNED (type))
1775 return size_binop (MINUS_EXPR, arg0, arg1);
1777 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1779 /* If either operand is not a constant, do the conversions to the signed
1780 type and subtract. The hardware will do the right thing with any
1781 overflow in the subtraction. */
1782 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1783 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1784 fold_convert (ctype, arg1));
1786 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1787 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1788 overflow) and negate (which can't either). Special-case a result
1789 of zero while we're here. */
1790 if (tree_int_cst_equal (arg0, arg1))
1791 return build_int_cst (ctype, 0);
1792 else if (tree_int_cst_lt (arg1, arg0))
1793 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1794 else
1795 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1796 fold_convert (ctype, size_binop (MINUS_EXPR,
1797 arg1, arg0)));
1800 /* A subroutine of fold_convert_const handling conversions of an
1801 INTEGER_CST to another integer type. */
1803 static tree
1804 fold_convert_const_int_from_int (tree type, tree arg1)
1806 tree t;
1808 /* Given an integer constant, make new constant with new type,
1809 appropriately sign-extended or truncated. */
1810 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1811 TREE_INT_CST_HIGH (arg1));
1813 t = force_fit_type (t,
1814 /* Don't set the overflow when
1815 converting a pointer */
1816 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1817 (TREE_INT_CST_HIGH (arg1) < 0
1818 && (TYPE_UNSIGNED (type)
1819 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1820 | TREE_OVERFLOW (arg1),
1821 TREE_CONSTANT_OVERFLOW (arg1));
1823 return t;
1826 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1827 to an integer type. */
1829 static tree
1830 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1832 int overflow = 0;
1833 tree t;
1835 /* The following code implements the floating point to integer
1836 conversion rules required by the Java Language Specification,
1837 that IEEE NaNs are mapped to zero and values that overflow
1838 the target precision saturate, i.e. values greater than
1839 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1840 are mapped to INT_MIN. These semantics are allowed by the
1841 C and C++ standards that simply state that the behavior of
1842 FP-to-integer conversion is unspecified upon overflow. */
1844 HOST_WIDE_INT high, low;
1845 REAL_VALUE_TYPE r;
1846 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1848 switch (code)
1850 case FIX_TRUNC_EXPR:
1851 real_trunc (&r, VOIDmode, &x);
1852 break;
1854 case FIX_CEIL_EXPR:
1855 real_ceil (&r, VOIDmode, &x);
1856 break;
1858 case FIX_FLOOR_EXPR:
1859 real_floor (&r, VOIDmode, &x);
1860 break;
1862 case FIX_ROUND_EXPR:
1863 real_round (&r, VOIDmode, &x);
1864 break;
1866 default:
1867 gcc_unreachable ();
1870 /* If R is NaN, return zero and show we have an overflow. */
1871 if (REAL_VALUE_ISNAN (r))
1873 overflow = 1;
1874 high = 0;
1875 low = 0;
1878 /* See if R is less than the lower bound or greater than the
1879 upper bound. */
1881 if (! overflow)
1883 tree lt = TYPE_MIN_VALUE (type);
1884 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1885 if (REAL_VALUES_LESS (r, l))
1887 overflow = 1;
1888 high = TREE_INT_CST_HIGH (lt);
1889 low = TREE_INT_CST_LOW (lt);
1893 if (! overflow)
1895 tree ut = TYPE_MAX_VALUE (type);
1896 if (ut)
1898 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1899 if (REAL_VALUES_LESS (u, r))
1901 overflow = 1;
1902 high = TREE_INT_CST_HIGH (ut);
1903 low = TREE_INT_CST_LOW (ut);
1908 if (! overflow)
1909 REAL_VALUE_TO_INT (&low, &high, r);
1911 t = build_int_cst_wide (type, low, high);
1913 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1914 TREE_CONSTANT_OVERFLOW (arg1));
1915 return t;
1918 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1919 to another floating point type. */
1921 static tree
1922 fold_convert_const_real_from_real (tree type, tree arg1)
1924 REAL_VALUE_TYPE value;
1925 tree t;
1927 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1928 t = build_real (type, value);
1930 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1931 TREE_CONSTANT_OVERFLOW (t)
1932 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1933 return t;
1936 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1937 type TYPE. If no simplification can be done return NULL_TREE. */
1939 static tree
1940 fold_convert_const (enum tree_code code, tree type, tree arg1)
1942 if (TREE_TYPE (arg1) == type)
1943 return arg1;
1945 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1947 if (TREE_CODE (arg1) == INTEGER_CST)
1948 return fold_convert_const_int_from_int (type, arg1);
1949 else if (TREE_CODE (arg1) == REAL_CST)
1950 return fold_convert_const_int_from_real (code, type, arg1);
1952 else if (TREE_CODE (type) == REAL_TYPE)
1954 if (TREE_CODE (arg1) == INTEGER_CST)
1955 return build_real_from_int_cst (type, arg1);
1956 if (TREE_CODE (arg1) == REAL_CST)
1957 return fold_convert_const_real_from_real (type, arg1);
1959 return NULL_TREE;
1962 /* Construct a vector of zero elements of vector type TYPE. */
1964 static tree
1965 build_zero_vector (tree type)
1967 tree elem, list;
1968 int i, units;
1970 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1971 units = TYPE_VECTOR_SUBPARTS (type);
1973 list = NULL_TREE;
1974 for (i = 0; i < units; i++)
1975 list = tree_cons (NULL_TREE, elem, list);
1976 return build_vector (type, list);
1979 /* Convert expression ARG to type TYPE. Used by the middle-end for
1980 simple conversions in preference to calling the front-end's convert. */
1982 tree
1983 fold_convert (tree type, tree arg)
1985 tree orig = TREE_TYPE (arg);
1986 tree tem;
1988 if (type == orig)
1989 return arg;
1991 if (TREE_CODE (arg) == ERROR_MARK
1992 || TREE_CODE (type) == ERROR_MARK
1993 || TREE_CODE (orig) == ERROR_MARK)
1994 return error_mark_node;
1996 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1997 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1998 TYPE_MAIN_VARIANT (orig)))
1999 return fold_build1 (NOP_EXPR, type, arg);
2001 switch (TREE_CODE (type))
2003 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2004 case POINTER_TYPE: case REFERENCE_TYPE:
2005 case OFFSET_TYPE:
2006 if (TREE_CODE (arg) == INTEGER_CST)
2008 tem = fold_convert_const (NOP_EXPR, type, arg);
2009 if (tem != NULL_TREE)
2010 return tem;
2012 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2013 || TREE_CODE (orig) == OFFSET_TYPE)
2014 return fold_build1 (NOP_EXPR, type, arg);
2015 if (TREE_CODE (orig) == COMPLEX_TYPE)
2017 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2018 return fold_convert (type, tem);
2020 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2021 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2022 return fold_build1 (NOP_EXPR, type, arg);
2024 case REAL_TYPE:
2025 if (TREE_CODE (arg) == INTEGER_CST)
2027 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2028 if (tem != NULL_TREE)
2029 return tem;
2031 else if (TREE_CODE (arg) == REAL_CST)
2033 tem = fold_convert_const (NOP_EXPR, type, arg);
2034 if (tem != NULL_TREE)
2035 return tem;
2038 switch (TREE_CODE (orig))
2040 case INTEGER_TYPE:
2041 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2042 case POINTER_TYPE: case REFERENCE_TYPE:
2043 return fold_build1 (FLOAT_EXPR, type, arg);
2045 case REAL_TYPE:
2046 return fold_build1 (NOP_EXPR, type, arg);
2048 case COMPLEX_TYPE:
2049 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2050 return fold_convert (type, tem);
2052 default:
2053 gcc_unreachable ();
2056 case COMPLEX_TYPE:
2057 switch (TREE_CODE (orig))
2059 case INTEGER_TYPE:
2060 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2061 case POINTER_TYPE: case REFERENCE_TYPE:
2062 case REAL_TYPE:
2063 return build2 (COMPLEX_EXPR, type,
2064 fold_convert (TREE_TYPE (type), arg),
2065 fold_convert (TREE_TYPE (type), integer_zero_node));
2066 case COMPLEX_TYPE:
2068 tree rpart, ipart;
2070 if (TREE_CODE (arg) == COMPLEX_EXPR)
2072 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2073 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2074 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2077 arg = save_expr (arg);
2078 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2079 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2080 rpart = fold_convert (TREE_TYPE (type), rpart);
2081 ipart = fold_convert (TREE_TYPE (type), ipart);
2082 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2085 default:
2086 gcc_unreachable ();
2089 case VECTOR_TYPE:
2090 if (integer_zerop (arg))
2091 return build_zero_vector (type);
2092 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2093 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2094 || TREE_CODE (orig) == VECTOR_TYPE);
2095 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2097 case VOID_TYPE:
2098 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2100 default:
2101 gcc_unreachable ();
2105 /* Return false if expr can be assumed not to be an lvalue, true
2106 otherwise. */
2108 static bool
2109 maybe_lvalue_p (tree x)
2111 /* We only need to wrap lvalue tree codes. */
2112 switch (TREE_CODE (x))
2114 case VAR_DECL:
2115 case PARM_DECL:
2116 case RESULT_DECL:
2117 case LABEL_DECL:
2118 case FUNCTION_DECL:
2119 case SSA_NAME:
2121 case COMPONENT_REF:
2122 case INDIRECT_REF:
2123 case ALIGN_INDIRECT_REF:
2124 case MISALIGNED_INDIRECT_REF:
2125 case ARRAY_REF:
2126 case ARRAY_RANGE_REF:
2127 case BIT_FIELD_REF:
2128 case OBJ_TYPE_REF:
2130 case REALPART_EXPR:
2131 case IMAGPART_EXPR:
2132 case PREINCREMENT_EXPR:
2133 case PREDECREMENT_EXPR:
2134 case SAVE_EXPR:
2135 case TRY_CATCH_EXPR:
2136 case WITH_CLEANUP_EXPR:
2137 case COMPOUND_EXPR:
2138 case MODIFY_EXPR:
2139 case TARGET_EXPR:
2140 case COND_EXPR:
2141 case BIND_EXPR:
2142 case MIN_EXPR:
2143 case MAX_EXPR:
2144 break;
2146 default:
2147 /* Assume the worst for front-end tree codes. */
2148 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2149 break;
2150 return false;
2153 return true;
2156 /* Return an expr equal to X but certainly not valid as an lvalue. */
2158 tree
2159 non_lvalue (tree x)
2161 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2162 us. */
2163 if (in_gimple_form)
2164 return x;
2166 if (! maybe_lvalue_p (x))
2167 return x;
2168 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2171 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2172 Zero means allow extended lvalues. */
2174 int pedantic_lvalues;
2176 /* When pedantic, return an expr equal to X but certainly not valid as a
2177 pedantic lvalue. Otherwise, return X. */
2179 static tree
2180 pedantic_non_lvalue (tree x)
2182 if (pedantic_lvalues)
2183 return non_lvalue (x);
2184 else
2185 return x;
2188 /* Given a tree comparison code, return the code that is the logical inverse
2189 of the given code. It is not safe to do this for floating-point
2190 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2191 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2193 enum tree_code
2194 invert_tree_comparison (enum tree_code code, bool honor_nans)
2196 if (honor_nans && flag_trapping_math)
2197 return ERROR_MARK;
2199 switch (code)
2201 case EQ_EXPR:
2202 return NE_EXPR;
2203 case NE_EXPR:
2204 return EQ_EXPR;
2205 case GT_EXPR:
2206 return honor_nans ? UNLE_EXPR : LE_EXPR;
2207 case GE_EXPR:
2208 return honor_nans ? UNLT_EXPR : LT_EXPR;
2209 case LT_EXPR:
2210 return honor_nans ? UNGE_EXPR : GE_EXPR;
2211 case LE_EXPR:
2212 return honor_nans ? UNGT_EXPR : GT_EXPR;
2213 case LTGT_EXPR:
2214 return UNEQ_EXPR;
2215 case UNEQ_EXPR:
2216 return LTGT_EXPR;
2217 case UNGT_EXPR:
2218 return LE_EXPR;
2219 case UNGE_EXPR:
2220 return LT_EXPR;
2221 case UNLT_EXPR:
2222 return GE_EXPR;
2223 case UNLE_EXPR:
2224 return GT_EXPR;
2225 case ORDERED_EXPR:
2226 return UNORDERED_EXPR;
2227 case UNORDERED_EXPR:
2228 return ORDERED_EXPR;
2229 default:
2230 gcc_unreachable ();
2234 /* Similar, but return the comparison that results if the operands are
2235 swapped. This is safe for floating-point. */
2237 enum tree_code
2238 swap_tree_comparison (enum tree_code code)
2240 switch (code)
2242 case EQ_EXPR:
2243 case NE_EXPR:
2244 case ORDERED_EXPR:
2245 case UNORDERED_EXPR:
2246 case LTGT_EXPR:
2247 case UNEQ_EXPR:
2248 return code;
2249 case GT_EXPR:
2250 return LT_EXPR;
2251 case GE_EXPR:
2252 return LE_EXPR;
2253 case LT_EXPR:
2254 return GT_EXPR;
2255 case LE_EXPR:
2256 return GE_EXPR;
2257 case UNGT_EXPR:
2258 return UNLT_EXPR;
2259 case UNGE_EXPR:
2260 return UNLE_EXPR;
2261 case UNLT_EXPR:
2262 return UNGT_EXPR;
2263 case UNLE_EXPR:
2264 return UNGE_EXPR;
2265 default:
2266 gcc_unreachable ();
2271 /* Convert a comparison tree code from an enum tree_code representation
2272 into a compcode bit-based encoding. This function is the inverse of
2273 compcode_to_comparison. */
2275 static enum comparison_code
2276 comparison_to_compcode (enum tree_code code)
2278 switch (code)
2280 case LT_EXPR:
2281 return COMPCODE_LT;
2282 case EQ_EXPR:
2283 return COMPCODE_EQ;
2284 case LE_EXPR:
2285 return COMPCODE_LE;
2286 case GT_EXPR:
2287 return COMPCODE_GT;
2288 case NE_EXPR:
2289 return COMPCODE_NE;
2290 case GE_EXPR:
2291 return COMPCODE_GE;
2292 case ORDERED_EXPR:
2293 return COMPCODE_ORD;
2294 case UNORDERED_EXPR:
2295 return COMPCODE_UNORD;
2296 case UNLT_EXPR:
2297 return COMPCODE_UNLT;
2298 case UNEQ_EXPR:
2299 return COMPCODE_UNEQ;
2300 case UNLE_EXPR:
2301 return COMPCODE_UNLE;
2302 case UNGT_EXPR:
2303 return COMPCODE_UNGT;
2304 case LTGT_EXPR:
2305 return COMPCODE_LTGT;
2306 case UNGE_EXPR:
2307 return COMPCODE_UNGE;
2308 default:
2309 gcc_unreachable ();
2313 /* Convert a compcode bit-based encoding of a comparison operator back
2314 to GCC's enum tree_code representation. This function is the
2315 inverse of comparison_to_compcode. */
2317 static enum tree_code
2318 compcode_to_comparison (enum comparison_code code)
2320 switch (code)
2322 case COMPCODE_LT:
2323 return LT_EXPR;
2324 case COMPCODE_EQ:
2325 return EQ_EXPR;
2326 case COMPCODE_LE:
2327 return LE_EXPR;
2328 case COMPCODE_GT:
2329 return GT_EXPR;
2330 case COMPCODE_NE:
2331 return NE_EXPR;
2332 case COMPCODE_GE:
2333 return GE_EXPR;
2334 case COMPCODE_ORD:
2335 return ORDERED_EXPR;
2336 case COMPCODE_UNORD:
2337 return UNORDERED_EXPR;
2338 case COMPCODE_UNLT:
2339 return UNLT_EXPR;
2340 case COMPCODE_UNEQ:
2341 return UNEQ_EXPR;
2342 case COMPCODE_UNLE:
2343 return UNLE_EXPR;
2344 case COMPCODE_UNGT:
2345 return UNGT_EXPR;
2346 case COMPCODE_LTGT:
2347 return LTGT_EXPR;
2348 case COMPCODE_UNGE:
2349 return UNGE_EXPR;
2350 default:
2351 gcc_unreachable ();
2355 /* Return a tree for the comparison which is the combination of
2356 doing the AND or OR (depending on CODE) of the two operations LCODE
2357 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2358 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2359 if this makes the transformation invalid. */
2361 tree
2362 combine_comparisons (enum tree_code code, enum tree_code lcode,
2363 enum tree_code rcode, tree truth_type,
2364 tree ll_arg, tree lr_arg)
2366 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2367 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2368 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2369 enum comparison_code compcode;
2371 switch (code)
2373 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2374 compcode = lcompcode & rcompcode;
2375 break;
2377 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2378 compcode = lcompcode | rcompcode;
2379 break;
2381 default:
2382 return NULL_TREE;
2385 if (!honor_nans)
2387 /* Eliminate unordered comparisons, as well as LTGT and ORD
2388 which are not used unless the mode has NaNs. */
2389 compcode &= ~COMPCODE_UNORD;
2390 if (compcode == COMPCODE_LTGT)
2391 compcode = COMPCODE_NE;
2392 else if (compcode == COMPCODE_ORD)
2393 compcode = COMPCODE_TRUE;
2395 else if (flag_trapping_math)
2397 /* Check that the original operation and the optimized ones will trap
2398 under the same condition. */
2399 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2400 && (lcompcode != COMPCODE_EQ)
2401 && (lcompcode != COMPCODE_ORD);
2402 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2403 && (rcompcode != COMPCODE_EQ)
2404 && (rcompcode != COMPCODE_ORD);
2405 bool trap = (compcode & COMPCODE_UNORD) == 0
2406 && (compcode != COMPCODE_EQ)
2407 && (compcode != COMPCODE_ORD);
2409 /* In a short-circuited boolean expression the LHS might be
2410 such that the RHS, if evaluated, will never trap. For
2411 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2412 if neither x nor y is NaN. (This is a mixed blessing: for
2413 example, the expression above will never trap, hence
2414 optimizing it to x < y would be invalid). */
2415 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2416 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2417 rtrap = false;
2419 /* If the comparison was short-circuited, and only the RHS
2420 trapped, we may now generate a spurious trap. */
2421 if (rtrap && !ltrap
2422 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2423 return NULL_TREE;
2425 /* If we changed the conditions that cause a trap, we lose. */
2426 if ((ltrap || rtrap) != trap)
2427 return NULL_TREE;
2430 if (compcode == COMPCODE_TRUE)
2431 return constant_boolean_node (true, truth_type);
2432 else if (compcode == COMPCODE_FALSE)
2433 return constant_boolean_node (false, truth_type);
2434 else
2435 return fold_build2 (compcode_to_comparison (compcode),
2436 truth_type, ll_arg, lr_arg);
2439 /* Return nonzero if CODE is a tree code that represents a truth value. */
2441 static int
2442 truth_value_p (enum tree_code code)
2444 return (TREE_CODE_CLASS (code) == tcc_comparison
2445 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2446 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2447 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2450 /* Return nonzero if two operands (typically of the same tree node)
2451 are necessarily equal. If either argument has side-effects this
2452 function returns zero. FLAGS modifies behavior as follows:
2454 If OEP_ONLY_CONST is set, only return nonzero for constants.
2455 This function tests whether the operands are indistinguishable;
2456 it does not test whether they are equal using C's == operation.
2457 The distinction is important for IEEE floating point, because
2458 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2459 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2461 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2462 even though it may hold multiple values during a function.
2463 This is because a GCC tree node guarantees that nothing else is
2464 executed between the evaluation of its "operands" (which may often
2465 be evaluated in arbitrary order). Hence if the operands themselves
2466 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2467 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2468 unset means assuming isochronic (or instantaneous) tree equivalence.
2469 Unless comparing arbitrary expression trees, such as from different
2470 statements, this flag can usually be left unset.
2472 If OEP_PURE_SAME is set, then pure functions with identical arguments
2473 are considered the same. It is used when the caller has other ways
2474 to ensure that global memory is unchanged in between. */
2477 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2479 /* If either is ERROR_MARK, they aren't equal. */
2480 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2481 return 0;
2483 /* If both types don't have the same signedness, then we can't consider
2484 them equal. We must check this before the STRIP_NOPS calls
2485 because they may change the signedness of the arguments. */
2486 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2487 return 0;
2489 STRIP_NOPS (arg0);
2490 STRIP_NOPS (arg1);
2492 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2493 /* This is needed for conversions and for COMPONENT_REF.
2494 Might as well play it safe and always test this. */
2495 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2496 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2497 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2498 return 0;
2500 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2501 We don't care about side effects in that case because the SAVE_EXPR
2502 takes care of that for us. In all other cases, two expressions are
2503 equal if they have no side effects. If we have two identical
2504 expressions with side effects that should be treated the same due
2505 to the only side effects being identical SAVE_EXPR's, that will
2506 be detected in the recursive calls below. */
2507 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2508 && (TREE_CODE (arg0) == SAVE_EXPR
2509 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2510 return 1;
2512 /* Next handle constant cases, those for which we can return 1 even
2513 if ONLY_CONST is set. */
2514 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2515 switch (TREE_CODE (arg0))
2517 case INTEGER_CST:
2518 return (! TREE_CONSTANT_OVERFLOW (arg0)
2519 && ! TREE_CONSTANT_OVERFLOW (arg1)
2520 && tree_int_cst_equal (arg0, arg1));
2522 case REAL_CST:
2523 return (! TREE_CONSTANT_OVERFLOW (arg0)
2524 && ! TREE_CONSTANT_OVERFLOW (arg1)
2525 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2526 TREE_REAL_CST (arg1)));
2528 case VECTOR_CST:
2530 tree v1, v2;
2532 if (TREE_CONSTANT_OVERFLOW (arg0)
2533 || TREE_CONSTANT_OVERFLOW (arg1))
2534 return 0;
2536 v1 = TREE_VECTOR_CST_ELTS (arg0);
2537 v2 = TREE_VECTOR_CST_ELTS (arg1);
2538 while (v1 && v2)
2540 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2541 flags))
2542 return 0;
2543 v1 = TREE_CHAIN (v1);
2544 v2 = TREE_CHAIN (v2);
2547 return v1 == v2;
2550 case COMPLEX_CST:
2551 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2552 flags)
2553 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2554 flags));
2556 case STRING_CST:
2557 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2558 && ! memcmp (TREE_STRING_POINTER (arg0),
2559 TREE_STRING_POINTER (arg1),
2560 TREE_STRING_LENGTH (arg0)));
2562 case ADDR_EXPR:
2563 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2565 default:
2566 break;
2569 if (flags & OEP_ONLY_CONST)
2570 return 0;
2572 /* Define macros to test an operand from arg0 and arg1 for equality and a
2573 variant that allows null and views null as being different from any
2574 non-null value. In the latter case, if either is null, the both
2575 must be; otherwise, do the normal comparison. */
2576 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2577 TREE_OPERAND (arg1, N), flags)
2579 #define OP_SAME_WITH_NULL(N) \
2580 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2581 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2583 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2585 case tcc_unary:
2586 /* Two conversions are equal only if signedness and modes match. */
2587 switch (TREE_CODE (arg0))
2589 case NOP_EXPR:
2590 case CONVERT_EXPR:
2591 case FIX_CEIL_EXPR:
2592 case FIX_TRUNC_EXPR:
2593 case FIX_FLOOR_EXPR:
2594 case FIX_ROUND_EXPR:
2595 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2596 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2597 return 0;
2598 break;
2599 default:
2600 break;
2603 return OP_SAME (0);
2606 case tcc_comparison:
2607 case tcc_binary:
2608 if (OP_SAME (0) && OP_SAME (1))
2609 return 1;
2611 /* For commutative ops, allow the other order. */
2612 return (commutative_tree_code (TREE_CODE (arg0))
2613 && operand_equal_p (TREE_OPERAND (arg0, 0),
2614 TREE_OPERAND (arg1, 1), flags)
2615 && operand_equal_p (TREE_OPERAND (arg0, 1),
2616 TREE_OPERAND (arg1, 0), flags));
2618 case tcc_reference:
2619 /* If either of the pointer (or reference) expressions we are
2620 dereferencing contain a side effect, these cannot be equal. */
2621 if (TREE_SIDE_EFFECTS (arg0)
2622 || TREE_SIDE_EFFECTS (arg1))
2623 return 0;
2625 switch (TREE_CODE (arg0))
2627 case INDIRECT_REF:
2628 case ALIGN_INDIRECT_REF:
2629 case MISALIGNED_INDIRECT_REF:
2630 case REALPART_EXPR:
2631 case IMAGPART_EXPR:
2632 return OP_SAME (0);
2634 case ARRAY_REF:
2635 case ARRAY_RANGE_REF:
2636 /* Operands 2 and 3 may be null. */
2637 return (OP_SAME (0)
2638 && OP_SAME (1)
2639 && OP_SAME_WITH_NULL (2)
2640 && OP_SAME_WITH_NULL (3));
2642 case COMPONENT_REF:
2643 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2644 may be NULL when we're called to compare MEM_EXPRs. */
2645 return OP_SAME_WITH_NULL (0)
2646 && OP_SAME (1)
2647 && OP_SAME_WITH_NULL (2);
2649 case BIT_FIELD_REF:
2650 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2652 default:
2653 return 0;
2656 case tcc_expression:
2657 switch (TREE_CODE (arg0))
2659 case ADDR_EXPR:
2660 case TRUTH_NOT_EXPR:
2661 return OP_SAME (0);
2663 case TRUTH_ANDIF_EXPR:
2664 case TRUTH_ORIF_EXPR:
2665 return OP_SAME (0) && OP_SAME (1);
2667 case TRUTH_AND_EXPR:
2668 case TRUTH_OR_EXPR:
2669 case TRUTH_XOR_EXPR:
2670 if (OP_SAME (0) && OP_SAME (1))
2671 return 1;
2673 /* Otherwise take into account this is a commutative operation. */
2674 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2675 TREE_OPERAND (arg1, 1), flags)
2676 && operand_equal_p (TREE_OPERAND (arg0, 1),
2677 TREE_OPERAND (arg1, 0), flags));
2679 case CALL_EXPR:
2680 /* If the CALL_EXPRs call different functions, then they
2681 clearly can not be equal. */
2682 if (!OP_SAME (0))
2683 return 0;
2686 unsigned int cef = call_expr_flags (arg0);
2687 if (flags & OEP_PURE_SAME)
2688 cef &= ECF_CONST | ECF_PURE;
2689 else
2690 cef &= ECF_CONST;
2691 if (!cef)
2692 return 0;
2695 /* Now see if all the arguments are the same. operand_equal_p
2696 does not handle TREE_LIST, so we walk the operands here
2697 feeding them to operand_equal_p. */
2698 arg0 = TREE_OPERAND (arg0, 1);
2699 arg1 = TREE_OPERAND (arg1, 1);
2700 while (arg0 && arg1)
2702 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2703 flags))
2704 return 0;
2706 arg0 = TREE_CHAIN (arg0);
2707 arg1 = TREE_CHAIN (arg1);
2710 /* If we get here and both argument lists are exhausted
2711 then the CALL_EXPRs are equal. */
2712 return ! (arg0 || arg1);
2714 default:
2715 return 0;
2718 case tcc_declaration:
2719 /* Consider __builtin_sqrt equal to sqrt. */
2720 return (TREE_CODE (arg0) == FUNCTION_DECL
2721 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2722 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2723 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2725 default:
2726 return 0;
2729 #undef OP_SAME
2730 #undef OP_SAME_WITH_NULL
2733 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2734 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2736 When in doubt, return 0. */
2738 static int
2739 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2741 int unsignedp1, unsignedpo;
2742 tree primarg0, primarg1, primother;
2743 unsigned int correct_width;
2745 if (operand_equal_p (arg0, arg1, 0))
2746 return 1;
2748 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2749 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2750 return 0;
2752 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2753 and see if the inner values are the same. This removes any
2754 signedness comparison, which doesn't matter here. */
2755 primarg0 = arg0, primarg1 = arg1;
2756 STRIP_NOPS (primarg0);
2757 STRIP_NOPS (primarg1);
2758 if (operand_equal_p (primarg0, primarg1, 0))
2759 return 1;
2761 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2762 actual comparison operand, ARG0.
2764 First throw away any conversions to wider types
2765 already present in the operands. */
2767 primarg1 = get_narrower (arg1, &unsignedp1);
2768 primother = get_narrower (other, &unsignedpo);
2770 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2771 if (unsignedp1 == unsignedpo
2772 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2773 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2775 tree type = TREE_TYPE (arg0);
2777 /* Make sure shorter operand is extended the right way
2778 to match the longer operand. */
2779 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2780 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2782 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2783 return 1;
2786 return 0;
2789 /* See if ARG is an expression that is either a comparison or is performing
2790 arithmetic on comparisons. The comparisons must only be comparing
2791 two different values, which will be stored in *CVAL1 and *CVAL2; if
2792 they are nonzero it means that some operands have already been found.
2793 No variables may be used anywhere else in the expression except in the
2794 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2795 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2797 If this is true, return 1. Otherwise, return zero. */
2799 static int
2800 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2802 enum tree_code code = TREE_CODE (arg);
2803 enum tree_code_class class = TREE_CODE_CLASS (code);
2805 /* We can handle some of the tcc_expression cases here. */
2806 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2807 class = tcc_unary;
2808 else if (class == tcc_expression
2809 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2810 || code == COMPOUND_EXPR))
2811 class = tcc_binary;
2813 else if (class == tcc_expression && code == SAVE_EXPR
2814 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2816 /* If we've already found a CVAL1 or CVAL2, this expression is
2817 two complex to handle. */
2818 if (*cval1 || *cval2)
2819 return 0;
2821 class = tcc_unary;
2822 *save_p = 1;
2825 switch (class)
2827 case tcc_unary:
2828 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2830 case tcc_binary:
2831 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2832 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2833 cval1, cval2, save_p));
2835 case tcc_constant:
2836 return 1;
2838 case tcc_expression:
2839 if (code == COND_EXPR)
2840 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2841 cval1, cval2, save_p)
2842 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2843 cval1, cval2, save_p)
2844 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2845 cval1, cval2, save_p));
2846 return 0;
2848 case tcc_comparison:
2849 /* First see if we can handle the first operand, then the second. For
2850 the second operand, we know *CVAL1 can't be zero. It must be that
2851 one side of the comparison is each of the values; test for the
2852 case where this isn't true by failing if the two operands
2853 are the same. */
2855 if (operand_equal_p (TREE_OPERAND (arg, 0),
2856 TREE_OPERAND (arg, 1), 0))
2857 return 0;
2859 if (*cval1 == 0)
2860 *cval1 = TREE_OPERAND (arg, 0);
2861 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2863 else if (*cval2 == 0)
2864 *cval2 = TREE_OPERAND (arg, 0);
2865 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2867 else
2868 return 0;
2870 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2872 else if (*cval2 == 0)
2873 *cval2 = TREE_OPERAND (arg, 1);
2874 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2876 else
2877 return 0;
2879 return 1;
2881 default:
2882 return 0;
2886 /* ARG is a tree that is known to contain just arithmetic operations and
2887 comparisons. Evaluate the operations in the tree substituting NEW0 for
2888 any occurrence of OLD0 as an operand of a comparison and likewise for
2889 NEW1 and OLD1. */
2891 static tree
2892 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2894 tree type = TREE_TYPE (arg);
2895 enum tree_code code = TREE_CODE (arg);
2896 enum tree_code_class class = TREE_CODE_CLASS (code);
2898 /* We can handle some of the tcc_expression cases here. */
2899 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2900 class = tcc_unary;
2901 else if (class == tcc_expression
2902 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2903 class = tcc_binary;
2905 switch (class)
2907 case tcc_unary:
2908 return fold_build1 (code, type,
2909 eval_subst (TREE_OPERAND (arg, 0),
2910 old0, new0, old1, new1));
2912 case tcc_binary:
2913 return fold_build2 (code, type,
2914 eval_subst (TREE_OPERAND (arg, 0),
2915 old0, new0, old1, new1),
2916 eval_subst (TREE_OPERAND (arg, 1),
2917 old0, new0, old1, new1));
2919 case tcc_expression:
2920 switch (code)
2922 case SAVE_EXPR:
2923 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2925 case COMPOUND_EXPR:
2926 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2928 case COND_EXPR:
2929 return fold_build3 (code, type,
2930 eval_subst (TREE_OPERAND (arg, 0),
2931 old0, new0, old1, new1),
2932 eval_subst (TREE_OPERAND (arg, 1),
2933 old0, new0, old1, new1),
2934 eval_subst (TREE_OPERAND (arg, 2),
2935 old0, new0, old1, new1));
2936 default:
2937 break;
2939 /* Fall through - ??? */
2941 case tcc_comparison:
2943 tree arg0 = TREE_OPERAND (arg, 0);
2944 tree arg1 = TREE_OPERAND (arg, 1);
2946 /* We need to check both for exact equality and tree equality. The
2947 former will be true if the operand has a side-effect. In that
2948 case, we know the operand occurred exactly once. */
2950 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2951 arg0 = new0;
2952 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2953 arg0 = new1;
2955 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2956 arg1 = new0;
2957 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2958 arg1 = new1;
2960 return fold_build2 (code, type, arg0, arg1);
2963 default:
2964 return arg;
2968 /* Return a tree for the case when the result of an expression is RESULT
2969 converted to TYPE and OMITTED was previously an operand of the expression
2970 but is now not needed (e.g., we folded OMITTED * 0).
2972 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2973 the conversion of RESULT to TYPE. */
2975 tree
2976 omit_one_operand (tree type, tree result, tree omitted)
2978 tree t = fold_convert (type, result);
2980 if (TREE_SIDE_EFFECTS (omitted))
2981 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2983 return non_lvalue (t);
2986 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2988 static tree
2989 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2991 tree t = fold_convert (type, result);
2993 if (TREE_SIDE_EFFECTS (omitted))
2994 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2996 return pedantic_non_lvalue (t);
2999 /* Return a tree for the case when the result of an expression is RESULT
3000 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3001 of the expression but are now not needed.
3003 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3004 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3005 evaluated before OMITTED2. Otherwise, if neither has side effects,
3006 just do the conversion of RESULT to TYPE. */
3008 tree
3009 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3011 tree t = fold_convert (type, result);
3013 if (TREE_SIDE_EFFECTS (omitted2))
3014 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3015 if (TREE_SIDE_EFFECTS (omitted1))
3016 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3018 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3022 /* Return a simplified tree node for the truth-negation of ARG. This
3023 never alters ARG itself. We assume that ARG is an operation that
3024 returns a truth value (0 or 1).
3026 FIXME: one would think we would fold the result, but it causes
3027 problems with the dominator optimizer. */
3028 tree
3029 invert_truthvalue (tree arg)
3031 tree type = TREE_TYPE (arg);
3032 enum tree_code code = TREE_CODE (arg);
3034 if (code == ERROR_MARK)
3035 return arg;
3037 /* If this is a comparison, we can simply invert it, except for
3038 floating-point non-equality comparisons, in which case we just
3039 enclose a TRUTH_NOT_EXPR around what we have. */
3041 if (TREE_CODE_CLASS (code) == tcc_comparison)
3043 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3044 if (FLOAT_TYPE_P (op_type)
3045 && flag_trapping_math
3046 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3047 && code != NE_EXPR && code != EQ_EXPR)
3048 return build1 (TRUTH_NOT_EXPR, type, arg);
3049 else
3051 code = invert_tree_comparison (code,
3052 HONOR_NANS (TYPE_MODE (op_type)));
3053 if (code == ERROR_MARK)
3054 return build1 (TRUTH_NOT_EXPR, type, arg);
3055 else
3056 return build2 (code, type,
3057 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3061 switch (code)
3063 case INTEGER_CST:
3064 return constant_boolean_node (integer_zerop (arg), type);
3066 case TRUTH_AND_EXPR:
3067 return build2 (TRUTH_OR_EXPR, type,
3068 invert_truthvalue (TREE_OPERAND (arg, 0)),
3069 invert_truthvalue (TREE_OPERAND (arg, 1)));
3071 case TRUTH_OR_EXPR:
3072 return build2 (TRUTH_AND_EXPR, type,
3073 invert_truthvalue (TREE_OPERAND (arg, 0)),
3074 invert_truthvalue (TREE_OPERAND (arg, 1)));
3076 case TRUTH_XOR_EXPR:
3077 /* Here we can invert either operand. We invert the first operand
3078 unless the second operand is a TRUTH_NOT_EXPR in which case our
3079 result is the XOR of the first operand with the inside of the
3080 negation of the second operand. */
3082 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3083 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3084 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3085 else
3086 return build2 (TRUTH_XOR_EXPR, type,
3087 invert_truthvalue (TREE_OPERAND (arg, 0)),
3088 TREE_OPERAND (arg, 1));
3090 case TRUTH_ANDIF_EXPR:
3091 return build2 (TRUTH_ORIF_EXPR, type,
3092 invert_truthvalue (TREE_OPERAND (arg, 0)),
3093 invert_truthvalue (TREE_OPERAND (arg, 1)));
3095 case TRUTH_ORIF_EXPR:
3096 return build2 (TRUTH_ANDIF_EXPR, type,
3097 invert_truthvalue (TREE_OPERAND (arg, 0)),
3098 invert_truthvalue (TREE_OPERAND (arg, 1)));
3100 case TRUTH_NOT_EXPR:
3101 return TREE_OPERAND (arg, 0);
3103 case COND_EXPR:
3105 tree arg1 = TREE_OPERAND (arg, 1);
3106 tree arg2 = TREE_OPERAND (arg, 2);
3107 /* A COND_EXPR may have a throw as one operand, which
3108 then has void type. Just leave void operands
3109 as they are. */
3110 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3111 VOID_TYPE_P (TREE_TYPE (arg1))
3112 ? arg1 : invert_truthvalue (arg1),
3113 VOID_TYPE_P (TREE_TYPE (arg2))
3114 ? arg2 : invert_truthvalue (arg2));
3117 case COMPOUND_EXPR:
3118 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3119 invert_truthvalue (TREE_OPERAND (arg, 1)));
3121 case NON_LVALUE_EXPR:
3122 return invert_truthvalue (TREE_OPERAND (arg, 0));
3124 case NOP_EXPR:
3125 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3126 break;
3128 case CONVERT_EXPR:
3129 case FLOAT_EXPR:
3130 return build1 (TREE_CODE (arg), type,
3131 invert_truthvalue (TREE_OPERAND (arg, 0)));
3133 case BIT_AND_EXPR:
3134 if (!integer_onep (TREE_OPERAND (arg, 1)))
3135 break;
3136 return build2 (EQ_EXPR, type, arg,
3137 build_int_cst (type, 0));
3139 case SAVE_EXPR:
3140 return build1 (TRUTH_NOT_EXPR, type, arg);
3142 case CLEANUP_POINT_EXPR:
3143 return build1 (CLEANUP_POINT_EXPR, type,
3144 invert_truthvalue (TREE_OPERAND (arg, 0)));
3146 default:
3147 break;
3149 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3150 return build1 (TRUTH_NOT_EXPR, type, arg);
3153 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3154 operands are another bit-wise operation with a common input. If so,
3155 distribute the bit operations to save an operation and possibly two if
3156 constants are involved. For example, convert
3157 (A | B) & (A | C) into A | (B & C)
3158 Further simplification will occur if B and C are constants.
3160 If this optimization cannot be done, 0 will be returned. */
3162 static tree
3163 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3165 tree common;
3166 tree left, right;
3168 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3169 || TREE_CODE (arg0) == code
3170 || (TREE_CODE (arg0) != BIT_AND_EXPR
3171 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3172 return 0;
3174 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3176 common = TREE_OPERAND (arg0, 0);
3177 left = TREE_OPERAND (arg0, 1);
3178 right = TREE_OPERAND (arg1, 1);
3180 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3182 common = TREE_OPERAND (arg0, 0);
3183 left = TREE_OPERAND (arg0, 1);
3184 right = TREE_OPERAND (arg1, 0);
3186 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3188 common = TREE_OPERAND (arg0, 1);
3189 left = TREE_OPERAND (arg0, 0);
3190 right = TREE_OPERAND (arg1, 1);
3192 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3194 common = TREE_OPERAND (arg0, 1);
3195 left = TREE_OPERAND (arg0, 0);
3196 right = TREE_OPERAND (arg1, 0);
3198 else
3199 return 0;
3201 return fold_build2 (TREE_CODE (arg0), type, common,
3202 fold_build2 (code, type, left, right));
3205 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3206 with code CODE. This optimization is unsafe. */
3207 static tree
3208 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3210 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3211 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3213 /* (A / C) +- (B / C) -> (A +- B) / C. */
3214 if (mul0 == mul1
3215 && operand_equal_p (TREE_OPERAND (arg0, 1),
3216 TREE_OPERAND (arg1, 1), 0))
3217 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3218 fold_build2 (code, type,
3219 TREE_OPERAND (arg0, 0),
3220 TREE_OPERAND (arg1, 0)),
3221 TREE_OPERAND (arg0, 1));
3223 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3224 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3225 TREE_OPERAND (arg1, 0), 0)
3226 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3227 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3229 REAL_VALUE_TYPE r0, r1;
3230 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3231 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3232 if (!mul0)
3233 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3234 if (!mul1)
3235 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3236 real_arithmetic (&r0, code, &r0, &r1);
3237 return fold_build2 (MULT_EXPR, type,
3238 TREE_OPERAND (arg0, 0),
3239 build_real (type, r0));
3242 return NULL_TREE;
3245 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3246 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3248 static tree
3249 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3250 int unsignedp)
3252 tree result;
3254 if (bitpos == 0)
3256 tree size = TYPE_SIZE (TREE_TYPE (inner));
3257 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3258 || POINTER_TYPE_P (TREE_TYPE (inner)))
3259 && host_integerp (size, 0)
3260 && tree_low_cst (size, 0) == bitsize)
3261 return fold_convert (type, inner);
3264 result = build3 (BIT_FIELD_REF, type, inner,
3265 size_int (bitsize), bitsize_int (bitpos));
3267 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3269 return result;
3272 /* Optimize a bit-field compare.
3274 There are two cases: First is a compare against a constant and the
3275 second is a comparison of two items where the fields are at the same
3276 bit position relative to the start of a chunk (byte, halfword, word)
3277 large enough to contain it. In these cases we can avoid the shift
3278 implicit in bitfield extractions.
3280 For constants, we emit a compare of the shifted constant with the
3281 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3282 compared. For two fields at the same position, we do the ANDs with the
3283 similar mask and compare the result of the ANDs.
3285 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3286 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3287 are the left and right operands of the comparison, respectively.
3289 If the optimization described above can be done, we return the resulting
3290 tree. Otherwise we return zero. */
3292 static tree
3293 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3294 tree lhs, tree rhs)
3296 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3297 tree type = TREE_TYPE (lhs);
3298 tree signed_type, unsigned_type;
3299 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3300 enum machine_mode lmode, rmode, nmode;
3301 int lunsignedp, runsignedp;
3302 int lvolatilep = 0, rvolatilep = 0;
3303 tree linner, rinner = NULL_TREE;
3304 tree mask;
3305 tree offset;
3307 /* Get all the information about the extractions being done. If the bit size
3308 if the same as the size of the underlying object, we aren't doing an
3309 extraction at all and so can do nothing. We also don't want to
3310 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3311 then will no longer be able to replace it. */
3312 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3313 &lunsignedp, &lvolatilep, false);
3314 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3315 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3316 return 0;
3318 if (!const_p)
3320 /* If this is not a constant, we can only do something if bit positions,
3321 sizes, and signedness are the same. */
3322 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3323 &runsignedp, &rvolatilep, false);
3325 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3326 || lunsignedp != runsignedp || offset != 0
3327 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3328 return 0;
3331 /* See if we can find a mode to refer to this field. We should be able to,
3332 but fail if we can't. */
3333 nmode = get_best_mode (lbitsize, lbitpos,
3334 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3335 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3336 TYPE_ALIGN (TREE_TYPE (rinner))),
3337 word_mode, lvolatilep || rvolatilep);
3338 if (nmode == VOIDmode)
3339 return 0;
3341 /* Set signed and unsigned types of the precision of this mode for the
3342 shifts below. */
3343 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3344 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3346 /* Compute the bit position and size for the new reference and our offset
3347 within it. If the new reference is the same size as the original, we
3348 won't optimize anything, so return zero. */
3349 nbitsize = GET_MODE_BITSIZE (nmode);
3350 nbitpos = lbitpos & ~ (nbitsize - 1);
3351 lbitpos -= nbitpos;
3352 if (nbitsize == lbitsize)
3353 return 0;
3355 if (BYTES_BIG_ENDIAN)
3356 lbitpos = nbitsize - lbitsize - lbitpos;
3358 /* Make the mask to be used against the extracted field. */
3359 mask = build_int_cst (unsigned_type, -1);
3360 mask = force_fit_type (mask, 0, false, false);
3361 mask = fold_convert (unsigned_type, mask);
3362 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3363 mask = const_binop (RSHIFT_EXPR, mask,
3364 size_int (nbitsize - lbitsize - lbitpos), 0);
3366 if (! const_p)
3367 /* If not comparing with constant, just rework the comparison
3368 and return. */
3369 return build2 (code, compare_type,
3370 build2 (BIT_AND_EXPR, unsigned_type,
3371 make_bit_field_ref (linner, unsigned_type,
3372 nbitsize, nbitpos, 1),
3373 mask),
3374 build2 (BIT_AND_EXPR, unsigned_type,
3375 make_bit_field_ref (rinner, unsigned_type,
3376 nbitsize, nbitpos, 1),
3377 mask));
3379 /* Otherwise, we are handling the constant case. See if the constant is too
3380 big for the field. Warn and return a tree of for 0 (false) if so. We do
3381 this not only for its own sake, but to avoid having to test for this
3382 error case below. If we didn't, we might generate wrong code.
3384 For unsigned fields, the constant shifted right by the field length should
3385 be all zero. For signed fields, the high-order bits should agree with
3386 the sign bit. */
3388 if (lunsignedp)
3390 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3391 fold_convert (unsigned_type, rhs),
3392 size_int (lbitsize), 0)))
3394 warning (0, "comparison is always %d due to width of bit-field",
3395 code == NE_EXPR);
3396 return constant_boolean_node (code == NE_EXPR, compare_type);
3399 else
3401 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3402 size_int (lbitsize - 1), 0);
3403 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3405 warning (0, "comparison is always %d due to width of bit-field",
3406 code == NE_EXPR);
3407 return constant_boolean_node (code == NE_EXPR, compare_type);
3411 /* Single-bit compares should always be against zero. */
3412 if (lbitsize == 1 && ! integer_zerop (rhs))
3414 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3415 rhs = build_int_cst (type, 0);
3418 /* Make a new bitfield reference, shift the constant over the
3419 appropriate number of bits and mask it with the computed mask
3420 (in case this was a signed field). If we changed it, make a new one. */
3421 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3422 if (lvolatilep)
3424 TREE_SIDE_EFFECTS (lhs) = 1;
3425 TREE_THIS_VOLATILE (lhs) = 1;
3428 rhs = const_binop (BIT_AND_EXPR,
3429 const_binop (LSHIFT_EXPR,
3430 fold_convert (unsigned_type, rhs),
3431 size_int (lbitpos), 0),
3432 mask, 0);
3434 return build2 (code, compare_type,
3435 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3436 rhs);
3439 /* Subroutine for fold_truthop: decode a field reference.
3441 If EXP is a comparison reference, we return the innermost reference.
3443 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3444 set to the starting bit number.
3446 If the innermost field can be completely contained in a mode-sized
3447 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3449 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3450 otherwise it is not changed.
3452 *PUNSIGNEDP is set to the signedness of the field.
3454 *PMASK is set to the mask used. This is either contained in a
3455 BIT_AND_EXPR or derived from the width of the field.
3457 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3459 Return 0 if this is not a component reference or is one that we can't
3460 do anything with. */
3462 static tree
3463 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3464 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3465 int *punsignedp, int *pvolatilep,
3466 tree *pmask, tree *pand_mask)
3468 tree outer_type = 0;
3469 tree and_mask = 0;
3470 tree mask, inner, offset;
3471 tree unsigned_type;
3472 unsigned int precision;
3474 /* All the optimizations using this function assume integer fields.
3475 There are problems with FP fields since the type_for_size call
3476 below can fail for, e.g., XFmode. */
3477 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3478 return 0;
3480 /* We are interested in the bare arrangement of bits, so strip everything
3481 that doesn't affect the machine mode. However, record the type of the
3482 outermost expression if it may matter below. */
3483 if (TREE_CODE (exp) == NOP_EXPR
3484 || TREE_CODE (exp) == CONVERT_EXPR
3485 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3486 outer_type = TREE_TYPE (exp);
3487 STRIP_NOPS (exp);
3489 if (TREE_CODE (exp) == BIT_AND_EXPR)
3491 and_mask = TREE_OPERAND (exp, 1);
3492 exp = TREE_OPERAND (exp, 0);
3493 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3494 if (TREE_CODE (and_mask) != INTEGER_CST)
3495 return 0;
3498 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3499 punsignedp, pvolatilep, false);
3500 if ((inner == exp && and_mask == 0)
3501 || *pbitsize < 0 || offset != 0
3502 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3503 return 0;
3505 /* If the number of bits in the reference is the same as the bitsize of
3506 the outer type, then the outer type gives the signedness. Otherwise
3507 (in case of a small bitfield) the signedness is unchanged. */
3508 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3509 *punsignedp = TYPE_UNSIGNED (outer_type);
3511 /* Compute the mask to access the bitfield. */
3512 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3513 precision = TYPE_PRECISION (unsigned_type);
3515 mask = build_int_cst (unsigned_type, -1);
3516 mask = force_fit_type (mask, 0, false, false);
3518 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3519 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3521 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3522 if (and_mask != 0)
3523 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3524 fold_convert (unsigned_type, and_mask), mask);
3526 *pmask = mask;
3527 *pand_mask = and_mask;
3528 return inner;
3531 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3532 bit positions. */
3534 static int
3535 all_ones_mask_p (tree mask, int size)
3537 tree type = TREE_TYPE (mask);
3538 unsigned int precision = TYPE_PRECISION (type);
3539 tree tmask;
3541 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3542 tmask = force_fit_type (tmask, 0, false, false);
3544 return
3545 tree_int_cst_equal (mask,
3546 const_binop (RSHIFT_EXPR,
3547 const_binop (LSHIFT_EXPR, tmask,
3548 size_int (precision - size),
3550 size_int (precision - size), 0));
3553 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3554 represents the sign bit of EXP's type. If EXP represents a sign
3555 or zero extension, also test VAL against the unextended type.
3556 The return value is the (sub)expression whose sign bit is VAL,
3557 or NULL_TREE otherwise. */
3559 static tree
3560 sign_bit_p (tree exp, tree val)
3562 unsigned HOST_WIDE_INT mask_lo, lo;
3563 HOST_WIDE_INT mask_hi, hi;
3564 int width;
3565 tree t;
3567 /* Tree EXP must have an integral type. */
3568 t = TREE_TYPE (exp);
3569 if (! INTEGRAL_TYPE_P (t))
3570 return NULL_TREE;
3572 /* Tree VAL must be an integer constant. */
3573 if (TREE_CODE (val) != INTEGER_CST
3574 || TREE_CONSTANT_OVERFLOW (val))
3575 return NULL_TREE;
3577 width = TYPE_PRECISION (t);
3578 if (width > HOST_BITS_PER_WIDE_INT)
3580 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3581 lo = 0;
3583 mask_hi = ((unsigned HOST_WIDE_INT) -1
3584 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3585 mask_lo = -1;
3587 else
3589 hi = 0;
3590 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3592 mask_hi = 0;
3593 mask_lo = ((unsigned HOST_WIDE_INT) -1
3594 >> (HOST_BITS_PER_WIDE_INT - width));
3597 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3598 treat VAL as if it were unsigned. */
3599 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3600 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3601 return exp;
3603 /* Handle extension from a narrower type. */
3604 if (TREE_CODE (exp) == NOP_EXPR
3605 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3606 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3608 return NULL_TREE;
3611 /* Subroutine for fold_truthop: determine if an operand is simple enough
3612 to be evaluated unconditionally. */
3614 static int
3615 simple_operand_p (tree exp)
3617 /* Strip any conversions that don't change the machine mode. */
3618 STRIP_NOPS (exp);
3620 return (CONSTANT_CLASS_P (exp)
3621 || TREE_CODE (exp) == SSA_NAME
3622 || (DECL_P (exp)
3623 && ! TREE_ADDRESSABLE (exp)
3624 && ! TREE_THIS_VOLATILE (exp)
3625 && ! DECL_NONLOCAL (exp)
3626 /* Don't regard global variables as simple. They may be
3627 allocated in ways unknown to the compiler (shared memory,
3628 #pragma weak, etc). */
3629 && ! TREE_PUBLIC (exp)
3630 && ! DECL_EXTERNAL (exp)
3631 /* Loading a static variable is unduly expensive, but global
3632 registers aren't expensive. */
3633 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3636 /* The following functions are subroutines to fold_range_test and allow it to
3637 try to change a logical combination of comparisons into a range test.
3639 For example, both
3640 X == 2 || X == 3 || X == 4 || X == 5
3642 X >= 2 && X <= 5
3643 are converted to
3644 (unsigned) (X - 2) <= 3
3646 We describe each set of comparisons as being either inside or outside
3647 a range, using a variable named like IN_P, and then describe the
3648 range with a lower and upper bound. If one of the bounds is omitted,
3649 it represents either the highest or lowest value of the type.
3651 In the comments below, we represent a range by two numbers in brackets
3652 preceded by a "+" to designate being inside that range, or a "-" to
3653 designate being outside that range, so the condition can be inverted by
3654 flipping the prefix. An omitted bound is represented by a "-". For
3655 example, "- [-, 10]" means being outside the range starting at the lowest
3656 possible value and ending at 10, in other words, being greater than 10.
3657 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3658 always false.
3660 We set up things so that the missing bounds are handled in a consistent
3661 manner so neither a missing bound nor "true" and "false" need to be
3662 handled using a special case. */
3664 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3665 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3666 and UPPER1_P are nonzero if the respective argument is an upper bound
3667 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3668 must be specified for a comparison. ARG1 will be converted to ARG0's
3669 type if both are specified. */
3671 static tree
3672 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3673 tree arg1, int upper1_p)
3675 tree tem;
3676 int result;
3677 int sgn0, sgn1;
3679 /* If neither arg represents infinity, do the normal operation.
3680 Else, if not a comparison, return infinity. Else handle the special
3681 comparison rules. Note that most of the cases below won't occur, but
3682 are handled for consistency. */
3684 if (arg0 != 0 && arg1 != 0)
3686 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3687 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3688 STRIP_NOPS (tem);
3689 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3692 if (TREE_CODE_CLASS (code) != tcc_comparison)
3693 return 0;
3695 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3696 for neither. In real maths, we cannot assume open ended ranges are
3697 the same. But, this is computer arithmetic, where numbers are finite.
3698 We can therefore make the transformation of any unbounded range with
3699 the value Z, Z being greater than any representable number. This permits
3700 us to treat unbounded ranges as equal. */
3701 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3702 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3703 switch (code)
3705 case EQ_EXPR:
3706 result = sgn0 == sgn1;
3707 break;
3708 case NE_EXPR:
3709 result = sgn0 != sgn1;
3710 break;
3711 case LT_EXPR:
3712 result = sgn0 < sgn1;
3713 break;
3714 case LE_EXPR:
3715 result = sgn0 <= sgn1;
3716 break;
3717 case GT_EXPR:
3718 result = sgn0 > sgn1;
3719 break;
3720 case GE_EXPR:
3721 result = sgn0 >= sgn1;
3722 break;
3723 default:
3724 gcc_unreachable ();
3727 return constant_boolean_node (result, type);
3730 /* Given EXP, a logical expression, set the range it is testing into
3731 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3732 actually being tested. *PLOW and *PHIGH will be made of the same type
3733 as the returned expression. If EXP is not a comparison, we will most
3734 likely not be returning a useful value and range. */
3736 static tree
3737 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3739 enum tree_code code;
3740 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3741 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3742 int in_p, n_in_p;
3743 tree low, high, n_low, n_high;
3745 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3746 and see if we can refine the range. Some of the cases below may not
3747 happen, but it doesn't seem worth worrying about this. We "continue"
3748 the outer loop when we've changed something; otherwise we "break"
3749 the switch, which will "break" the while. */
3751 in_p = 0;
3752 low = high = build_int_cst (TREE_TYPE (exp), 0);
3754 while (1)
3756 code = TREE_CODE (exp);
3757 exp_type = TREE_TYPE (exp);
3759 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3761 if (TREE_CODE_LENGTH (code) > 0)
3762 arg0 = TREE_OPERAND (exp, 0);
3763 if (TREE_CODE_CLASS (code) == tcc_comparison
3764 || TREE_CODE_CLASS (code) == tcc_unary
3765 || TREE_CODE_CLASS (code) == tcc_binary)
3766 arg0_type = TREE_TYPE (arg0);
3767 if (TREE_CODE_CLASS (code) == tcc_binary
3768 || TREE_CODE_CLASS (code) == tcc_comparison
3769 || (TREE_CODE_CLASS (code) == tcc_expression
3770 && TREE_CODE_LENGTH (code) > 1))
3771 arg1 = TREE_OPERAND (exp, 1);
3774 switch (code)
3776 case TRUTH_NOT_EXPR:
3777 in_p = ! in_p, exp = arg0;
3778 continue;
3780 case EQ_EXPR: case NE_EXPR:
3781 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3782 /* We can only do something if the range is testing for zero
3783 and if the second operand is an integer constant. Note that
3784 saying something is "in" the range we make is done by
3785 complementing IN_P since it will set in the initial case of
3786 being not equal to zero; "out" is leaving it alone. */
3787 if (low == 0 || high == 0
3788 || ! integer_zerop (low) || ! integer_zerop (high)
3789 || TREE_CODE (arg1) != INTEGER_CST)
3790 break;
3792 switch (code)
3794 case NE_EXPR: /* - [c, c] */
3795 low = high = arg1;
3796 break;
3797 case EQ_EXPR: /* + [c, c] */
3798 in_p = ! in_p, low = high = arg1;
3799 break;
3800 case GT_EXPR: /* - [-, c] */
3801 low = 0, high = arg1;
3802 break;
3803 case GE_EXPR: /* + [c, -] */
3804 in_p = ! in_p, low = arg1, high = 0;
3805 break;
3806 case LT_EXPR: /* - [c, -] */
3807 low = arg1, high = 0;
3808 break;
3809 case LE_EXPR: /* + [-, c] */
3810 in_p = ! in_p, low = 0, high = arg1;
3811 break;
3812 default:
3813 gcc_unreachable ();
3816 /* If this is an unsigned comparison, we also know that EXP is
3817 greater than or equal to zero. We base the range tests we make
3818 on that fact, so we record it here so we can parse existing
3819 range tests. We test arg0_type since often the return type
3820 of, e.g. EQ_EXPR, is boolean. */
3821 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3823 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3824 in_p, low, high, 1,
3825 build_int_cst (arg0_type, 0),
3826 NULL_TREE))
3827 break;
3829 in_p = n_in_p, low = n_low, high = n_high;
3831 /* If the high bound is missing, but we have a nonzero low
3832 bound, reverse the range so it goes from zero to the low bound
3833 minus 1. */
3834 if (high == 0 && low && ! integer_zerop (low))
3836 in_p = ! in_p;
3837 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3838 integer_one_node, 0);
3839 low = build_int_cst (arg0_type, 0);
3843 exp = arg0;
3844 continue;
3846 case NEGATE_EXPR:
3847 /* (-x) IN [a,b] -> x in [-b, -a] */
3848 n_low = range_binop (MINUS_EXPR, exp_type,
3849 build_int_cst (exp_type, 0),
3850 0, high, 1);
3851 n_high = range_binop (MINUS_EXPR, exp_type,
3852 build_int_cst (exp_type, 0),
3853 0, low, 0);
3854 low = n_low, high = n_high;
3855 exp = arg0;
3856 continue;
3858 case BIT_NOT_EXPR:
3859 /* ~ X -> -X - 1 */
3860 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3861 build_int_cst (exp_type, 1));
3862 continue;
3864 case PLUS_EXPR: case MINUS_EXPR:
3865 if (TREE_CODE (arg1) != INTEGER_CST)
3866 break;
3868 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3869 move a constant to the other side. */
3870 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3871 break;
3873 /* If EXP is signed, any overflow in the computation is undefined,
3874 so we don't worry about it so long as our computations on
3875 the bounds don't overflow. For unsigned, overflow is defined
3876 and this is exactly the right thing. */
3877 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3878 arg0_type, low, 0, arg1, 0);
3879 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3880 arg0_type, high, 1, arg1, 0);
3881 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3882 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3883 break;
3885 /* Check for an unsigned range which has wrapped around the maximum
3886 value thus making n_high < n_low, and normalize it. */
3887 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3889 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3890 integer_one_node, 0);
3891 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3892 integer_one_node, 0);
3894 /* If the range is of the form +/- [ x+1, x ], we won't
3895 be able to normalize it. But then, it represents the
3896 whole range or the empty set, so make it
3897 +/- [ -, - ]. */
3898 if (tree_int_cst_equal (n_low, low)
3899 && tree_int_cst_equal (n_high, high))
3900 low = high = 0;
3901 else
3902 in_p = ! in_p;
3904 else
3905 low = n_low, high = n_high;
3907 exp = arg0;
3908 continue;
3910 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3911 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3912 break;
3914 if (! INTEGRAL_TYPE_P (arg0_type)
3915 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3916 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3917 break;
3919 n_low = low, n_high = high;
3921 if (n_low != 0)
3922 n_low = fold_convert (arg0_type, n_low);
3924 if (n_high != 0)
3925 n_high = fold_convert (arg0_type, n_high);
3928 /* If we're converting arg0 from an unsigned type, to exp,
3929 a signed type, we will be doing the comparison as unsigned.
3930 The tests above have already verified that LOW and HIGH
3931 are both positive.
3933 So we have to ensure that we will handle large unsigned
3934 values the same way that the current signed bounds treat
3935 negative values. */
3937 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3939 tree high_positive;
3940 tree equiv_type = lang_hooks.types.type_for_mode
3941 (TYPE_MODE (arg0_type), 1);
3943 /* A range without an upper bound is, naturally, unbounded.
3944 Since convert would have cropped a very large value, use
3945 the max value for the destination type. */
3946 high_positive
3947 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3948 : TYPE_MAX_VALUE (arg0_type);
3950 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3951 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3952 fold_convert (arg0_type,
3953 high_positive),
3954 fold_convert (arg0_type,
3955 integer_one_node));
3957 /* If the low bound is specified, "and" the range with the
3958 range for which the original unsigned value will be
3959 positive. */
3960 if (low != 0)
3962 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3963 1, n_low, n_high, 1,
3964 fold_convert (arg0_type,
3965 integer_zero_node),
3966 high_positive))
3967 break;
3969 in_p = (n_in_p == in_p);
3971 else
3973 /* Otherwise, "or" the range with the range of the input
3974 that will be interpreted as negative. */
3975 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3976 0, n_low, n_high, 1,
3977 fold_convert (arg0_type,
3978 integer_zero_node),
3979 high_positive))
3980 break;
3982 in_p = (in_p != n_in_p);
3986 exp = arg0;
3987 low = n_low, high = n_high;
3988 continue;
3990 default:
3991 break;
3994 break;
3997 /* If EXP is a constant, we can evaluate whether this is true or false. */
3998 if (TREE_CODE (exp) == INTEGER_CST)
4000 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4001 exp, 0, low, 0))
4002 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4003 exp, 1, high, 1)));
4004 low = high = 0;
4005 exp = 0;
4008 *pin_p = in_p, *plow = low, *phigh = high;
4009 return exp;
4012 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4013 type, TYPE, return an expression to test if EXP is in (or out of, depending
4014 on IN_P) the range. Return 0 if the test couldn't be created. */
4016 static tree
4017 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4019 tree etype = TREE_TYPE (exp);
4020 tree value;
4022 #ifdef HAVE_canonicalize_funcptr_for_compare
4023 /* Disable this optimization for function pointer expressions
4024 on targets that require function pointer canonicalization. */
4025 if (HAVE_canonicalize_funcptr_for_compare
4026 && TREE_CODE (etype) == POINTER_TYPE
4027 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4028 return NULL_TREE;
4029 #endif
4031 if (! in_p)
4033 value = build_range_check (type, exp, 1, low, high);
4034 if (value != 0)
4035 return invert_truthvalue (value);
4037 return 0;
4040 if (low == 0 && high == 0)
4041 return build_int_cst (type, 1);
4043 if (low == 0)
4044 return fold_build2 (LE_EXPR, type, exp,
4045 fold_convert (etype, high));
4047 if (high == 0)
4048 return fold_build2 (GE_EXPR, type, exp,
4049 fold_convert (etype, low));
4051 if (operand_equal_p (low, high, 0))
4052 return fold_build2 (EQ_EXPR, type, exp,
4053 fold_convert (etype, low));
4055 if (integer_zerop (low))
4057 if (! TYPE_UNSIGNED (etype))
4059 etype = lang_hooks.types.unsigned_type (etype);
4060 high = fold_convert (etype, high);
4061 exp = fold_convert (etype, exp);
4063 return build_range_check (type, exp, 1, 0, high);
4066 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4067 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4069 unsigned HOST_WIDE_INT lo;
4070 HOST_WIDE_INT hi;
4071 int prec;
4073 prec = TYPE_PRECISION (etype);
4074 if (prec <= HOST_BITS_PER_WIDE_INT)
4076 hi = 0;
4077 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4079 else
4081 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4082 lo = (unsigned HOST_WIDE_INT) -1;
4085 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4087 if (TYPE_UNSIGNED (etype))
4089 etype = lang_hooks.types.signed_type (etype);
4090 exp = fold_convert (etype, exp);
4092 return fold_build2 (GT_EXPR, type, exp,
4093 build_int_cst (etype, 0));
4097 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4098 This requires wrap-around arithmetics for the type of the expression. */
4099 switch (TREE_CODE (etype))
4101 case INTEGER_TYPE:
4102 /* There is no requirement that LOW be within the range of ETYPE
4103 if the latter is a subtype. It must, however, be within the base
4104 type of ETYPE. So be sure we do the subtraction in that type. */
4105 if (TREE_TYPE (etype))
4106 etype = TREE_TYPE (etype);
4107 break;
4109 case ENUMERAL_TYPE:
4110 case BOOLEAN_TYPE:
4111 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4112 TYPE_UNSIGNED (etype));
4113 break;
4115 default:
4116 break;
4119 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4120 if (TREE_CODE (etype) == INTEGER_TYPE
4121 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4123 tree utype, minv, maxv;
4125 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4126 for the type in question, as we rely on this here. */
4127 utype = lang_hooks.types.unsigned_type (etype);
4128 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4129 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4130 integer_one_node, 1);
4131 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4133 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4134 minv, 1, maxv, 1)))
4135 etype = utype;
4136 else
4137 return 0;
4140 high = fold_convert (etype, high);
4141 low = fold_convert (etype, low);
4142 exp = fold_convert (etype, exp);
4144 value = const_binop (MINUS_EXPR, high, low, 0);
4146 if (value != 0 && !TREE_OVERFLOW (value))
4147 return build_range_check (type,
4148 fold_build2 (MINUS_EXPR, etype, exp, low),
4149 1, build_int_cst (etype, 0), value);
4151 return 0;
4154 /* Return the predecessor of VAL in its type, handling the infinite case. */
4156 static tree
4157 range_predecessor (tree val)
4159 tree type = TREE_TYPE (val);
4161 if (INTEGRAL_TYPE_P (type)
4162 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4163 return 0;
4164 else
4165 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4168 /* Return the successor of VAL in its type, handling the infinite case. */
4170 static tree
4171 range_successor (tree val)
4173 tree type = TREE_TYPE (val);
4175 if (INTEGRAL_TYPE_P (type)
4176 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4177 return 0;
4178 else
4179 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4182 /* Given two ranges, see if we can merge them into one. Return 1 if we
4183 can, 0 if we can't. Set the output range into the specified parameters. */
4185 static int
4186 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4187 tree high0, int in1_p, tree low1, tree high1)
4189 int no_overlap;
4190 int subset;
4191 int temp;
4192 tree tem;
4193 int in_p;
4194 tree low, high;
4195 int lowequal = ((low0 == 0 && low1 == 0)
4196 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4197 low0, 0, low1, 0)));
4198 int highequal = ((high0 == 0 && high1 == 0)
4199 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4200 high0, 1, high1, 1)));
4202 /* Make range 0 be the range that starts first, or ends last if they
4203 start at the same value. Swap them if it isn't. */
4204 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4205 low0, 0, low1, 0))
4206 || (lowequal
4207 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4208 high1, 1, high0, 1))))
4210 temp = in0_p, in0_p = in1_p, in1_p = temp;
4211 tem = low0, low0 = low1, low1 = tem;
4212 tem = high0, high0 = high1, high1 = tem;
4215 /* Now flag two cases, whether the ranges are disjoint or whether the
4216 second range is totally subsumed in the first. Note that the tests
4217 below are simplified by the ones above. */
4218 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4219 high0, 1, low1, 0));
4220 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4221 high1, 1, high0, 1));
4223 /* We now have four cases, depending on whether we are including or
4224 excluding the two ranges. */
4225 if (in0_p && in1_p)
4227 /* If they don't overlap, the result is false. If the second range
4228 is a subset it is the result. Otherwise, the range is from the start
4229 of the second to the end of the first. */
4230 if (no_overlap)
4231 in_p = 0, low = high = 0;
4232 else if (subset)
4233 in_p = 1, low = low1, high = high1;
4234 else
4235 in_p = 1, low = low1, high = high0;
4238 else if (in0_p && ! in1_p)
4240 /* If they don't overlap, the result is the first range. If they are
4241 equal, the result is false. If the second range is a subset of the
4242 first, and the ranges begin at the same place, we go from just after
4243 the end of the second range to the end of the first. If the second
4244 range is not a subset of the first, or if it is a subset and both
4245 ranges end at the same place, the range starts at the start of the
4246 first range and ends just before the second range.
4247 Otherwise, we can't describe this as a single range. */
4248 if (no_overlap)
4249 in_p = 1, low = low0, high = high0;
4250 else if (lowequal && highequal)
4251 in_p = 0, low = high = 0;
4252 else if (subset && lowequal)
4254 low = range_successor (high1);
4255 high = high0;
4256 in_p = (low != 0);
4258 else if (! subset || highequal)
4260 low = low0;
4261 high = range_predecessor (low1);
4262 in_p = (high != 0);
4264 else
4265 return 0;
4268 else if (! in0_p && in1_p)
4270 /* If they don't overlap, the result is the second range. If the second
4271 is a subset of the first, the result is false. Otherwise,
4272 the range starts just after the first range and ends at the
4273 end of the second. */
4274 if (no_overlap)
4275 in_p = 1, low = low1, high = high1;
4276 else if (subset || highequal)
4277 in_p = 0, low = high = 0;
4278 else
4280 low = range_successor (high0);
4281 high = high1;
4282 in_p = (low != 0);
4286 else
4288 /* The case where we are excluding both ranges. Here the complex case
4289 is if they don't overlap. In that case, the only time we have a
4290 range is if they are adjacent. If the second is a subset of the
4291 first, the result is the first. Otherwise, the range to exclude
4292 starts at the beginning of the first range and ends at the end of the
4293 second. */
4294 if (no_overlap)
4296 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4297 range_successor (high0),
4298 1, low1, 0)))
4299 in_p = 0, low = low0, high = high1;
4300 else
4302 /* Canonicalize - [min, x] into - [-, x]. */
4303 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4304 switch (TREE_CODE (TREE_TYPE (low0)))
4306 case ENUMERAL_TYPE:
4307 if (TYPE_PRECISION (TREE_TYPE (low0))
4308 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4309 break;
4310 /* FALLTHROUGH */
4311 case INTEGER_TYPE:
4312 if (tree_int_cst_equal (low0,
4313 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4314 low0 = 0;
4315 break;
4316 case POINTER_TYPE:
4317 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4318 && integer_zerop (low0))
4319 low0 = 0;
4320 break;
4321 default:
4322 break;
4325 /* Canonicalize - [x, max] into - [x, -]. */
4326 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4327 switch (TREE_CODE (TREE_TYPE (high1)))
4329 case ENUMERAL_TYPE:
4330 if (TYPE_PRECISION (TREE_TYPE (high1))
4331 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4332 break;
4333 /* FALLTHROUGH */
4334 case INTEGER_TYPE:
4335 if (tree_int_cst_equal (high1,
4336 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4337 high1 = 0;
4338 break;
4339 case POINTER_TYPE:
4340 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4341 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4342 high1, 1,
4343 integer_one_node, 1)))
4344 high1 = 0;
4345 break;
4346 default:
4347 break;
4350 /* The ranges might be also adjacent between the maximum and
4351 minimum values of the given type. For
4352 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4353 return + [x + 1, y - 1]. */
4354 if (low0 == 0 && high1 == 0)
4356 low = range_successor (high0);
4357 high = range_predecessor (low1);
4358 if (low == 0 || high == 0)
4359 return 0;
4361 in_p = 1;
4363 else
4364 return 0;
4367 else if (subset)
4368 in_p = 0, low = low0, high = high0;
4369 else
4370 in_p = 0, low = low0, high = high1;
4373 *pin_p = in_p, *plow = low, *phigh = high;
4374 return 1;
4378 /* Subroutine of fold, looking inside expressions of the form
4379 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4380 of the COND_EXPR. This function is being used also to optimize
4381 A op B ? C : A, by reversing the comparison first.
4383 Return a folded expression whose code is not a COND_EXPR
4384 anymore, or NULL_TREE if no folding opportunity is found. */
4386 static tree
4387 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4389 enum tree_code comp_code = TREE_CODE (arg0);
4390 tree arg00 = TREE_OPERAND (arg0, 0);
4391 tree arg01 = TREE_OPERAND (arg0, 1);
4392 tree arg1_type = TREE_TYPE (arg1);
4393 tree tem;
4395 STRIP_NOPS (arg1);
4396 STRIP_NOPS (arg2);
4398 /* If we have A op 0 ? A : -A, consider applying the following
4399 transformations:
4401 A == 0? A : -A same as -A
4402 A != 0? A : -A same as A
4403 A >= 0? A : -A same as abs (A)
4404 A > 0? A : -A same as abs (A)
4405 A <= 0? A : -A same as -abs (A)
4406 A < 0? A : -A same as -abs (A)
4408 None of these transformations work for modes with signed
4409 zeros. If A is +/-0, the first two transformations will
4410 change the sign of the result (from +0 to -0, or vice
4411 versa). The last four will fix the sign of the result,
4412 even though the original expressions could be positive or
4413 negative, depending on the sign of A.
4415 Note that all these transformations are correct if A is
4416 NaN, since the two alternatives (A and -A) are also NaNs. */
4417 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4418 ? real_zerop (arg01)
4419 : integer_zerop (arg01))
4420 && ((TREE_CODE (arg2) == NEGATE_EXPR
4421 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4422 /* In the case that A is of the form X-Y, '-A' (arg2) may
4423 have already been folded to Y-X, check for that. */
4424 || (TREE_CODE (arg1) == MINUS_EXPR
4425 && TREE_CODE (arg2) == MINUS_EXPR
4426 && operand_equal_p (TREE_OPERAND (arg1, 0),
4427 TREE_OPERAND (arg2, 1), 0)
4428 && operand_equal_p (TREE_OPERAND (arg1, 1),
4429 TREE_OPERAND (arg2, 0), 0))))
4430 switch (comp_code)
4432 case EQ_EXPR:
4433 case UNEQ_EXPR:
4434 tem = fold_convert (arg1_type, arg1);
4435 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4436 case NE_EXPR:
4437 case LTGT_EXPR:
4438 return pedantic_non_lvalue (fold_convert (type, arg1));
4439 case UNGE_EXPR:
4440 case UNGT_EXPR:
4441 if (flag_trapping_math)
4442 break;
4443 /* Fall through. */
4444 case GE_EXPR:
4445 case GT_EXPR:
4446 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4447 arg1 = fold_convert (lang_hooks.types.signed_type
4448 (TREE_TYPE (arg1)), arg1);
4449 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4450 return pedantic_non_lvalue (fold_convert (type, tem));
4451 case UNLE_EXPR:
4452 case UNLT_EXPR:
4453 if (flag_trapping_math)
4454 break;
4455 case LE_EXPR:
4456 case LT_EXPR:
4457 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4458 arg1 = fold_convert (lang_hooks.types.signed_type
4459 (TREE_TYPE (arg1)), arg1);
4460 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4461 return negate_expr (fold_convert (type, tem));
4462 default:
4463 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4464 break;
4467 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4468 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4469 both transformations are correct when A is NaN: A != 0
4470 is then true, and A == 0 is false. */
4472 if (integer_zerop (arg01) && integer_zerop (arg2))
4474 if (comp_code == NE_EXPR)
4475 return pedantic_non_lvalue (fold_convert (type, arg1));
4476 else if (comp_code == EQ_EXPR)
4477 return build_int_cst (type, 0);
4480 /* Try some transformations of A op B ? A : B.
4482 A == B? A : B same as B
4483 A != B? A : B same as A
4484 A >= B? A : B same as max (A, B)
4485 A > B? A : B same as max (B, A)
4486 A <= B? A : B same as min (A, B)
4487 A < B? A : B same as min (B, A)
4489 As above, these transformations don't work in the presence
4490 of signed zeros. For example, if A and B are zeros of
4491 opposite sign, the first two transformations will change
4492 the sign of the result. In the last four, the original
4493 expressions give different results for (A=+0, B=-0) and
4494 (A=-0, B=+0), but the transformed expressions do not.
4496 The first two transformations are correct if either A or B
4497 is a NaN. In the first transformation, the condition will
4498 be false, and B will indeed be chosen. In the case of the
4499 second transformation, the condition A != B will be true,
4500 and A will be chosen.
4502 The conversions to max() and min() are not correct if B is
4503 a number and A is not. The conditions in the original
4504 expressions will be false, so all four give B. The min()
4505 and max() versions would give a NaN instead. */
4506 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4507 /* Avoid these transformations if the COND_EXPR may be used
4508 as an lvalue in the C++ front-end. PR c++/19199. */
4509 && (in_gimple_form
4510 || strcmp (lang_hooks.name, "GNU C++") != 0
4511 || ! maybe_lvalue_p (arg1)
4512 || ! maybe_lvalue_p (arg2)))
4514 tree comp_op0 = arg00;
4515 tree comp_op1 = arg01;
4516 tree comp_type = TREE_TYPE (comp_op0);
4518 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4519 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4521 comp_type = type;
4522 comp_op0 = arg1;
4523 comp_op1 = arg2;
4526 switch (comp_code)
4528 case EQ_EXPR:
4529 return pedantic_non_lvalue (fold_convert (type, arg2));
4530 case NE_EXPR:
4531 return pedantic_non_lvalue (fold_convert (type, arg1));
4532 case LE_EXPR:
4533 case LT_EXPR:
4534 case UNLE_EXPR:
4535 case UNLT_EXPR:
4536 /* In C++ a ?: expression can be an lvalue, so put the
4537 operand which will be used if they are equal first
4538 so that we can convert this back to the
4539 corresponding COND_EXPR. */
4540 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4542 comp_op0 = fold_convert (comp_type, comp_op0);
4543 comp_op1 = fold_convert (comp_type, comp_op1);
4544 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4545 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4546 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4547 return pedantic_non_lvalue (fold_convert (type, tem));
4549 break;
4550 case GE_EXPR:
4551 case GT_EXPR:
4552 case UNGE_EXPR:
4553 case UNGT_EXPR:
4554 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4556 comp_op0 = fold_convert (comp_type, comp_op0);
4557 comp_op1 = fold_convert (comp_type, comp_op1);
4558 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4559 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4560 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4561 return pedantic_non_lvalue (fold_convert (type, tem));
4563 break;
4564 case UNEQ_EXPR:
4565 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4566 return pedantic_non_lvalue (fold_convert (type, arg2));
4567 break;
4568 case LTGT_EXPR:
4569 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4570 return pedantic_non_lvalue (fold_convert (type, arg1));
4571 break;
4572 default:
4573 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4574 break;
4578 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4579 we might still be able to simplify this. For example,
4580 if C1 is one less or one more than C2, this might have started
4581 out as a MIN or MAX and been transformed by this function.
4582 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4584 if (INTEGRAL_TYPE_P (type)
4585 && TREE_CODE (arg01) == INTEGER_CST
4586 && TREE_CODE (arg2) == INTEGER_CST)
4587 switch (comp_code)
4589 case EQ_EXPR:
4590 /* We can replace A with C1 in this case. */
4591 arg1 = fold_convert (type, arg01);
4592 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4594 case LT_EXPR:
4595 /* If C1 is C2 + 1, this is min(A, C2). */
4596 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4597 OEP_ONLY_CONST)
4598 && operand_equal_p (arg01,
4599 const_binop (PLUS_EXPR, arg2,
4600 integer_one_node, 0),
4601 OEP_ONLY_CONST))
4602 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4603 type, arg1, arg2));
4604 break;
4606 case LE_EXPR:
4607 /* If C1 is C2 - 1, this is min(A, C2). */
4608 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4609 OEP_ONLY_CONST)
4610 && operand_equal_p (arg01,
4611 const_binop (MINUS_EXPR, arg2,
4612 integer_one_node, 0),
4613 OEP_ONLY_CONST))
4614 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4615 type, arg1, arg2));
4616 break;
4618 case GT_EXPR:
4619 /* If C1 is C2 - 1, this is max(A, C2). */
4620 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4621 OEP_ONLY_CONST)
4622 && operand_equal_p (arg01,
4623 const_binop (MINUS_EXPR, arg2,
4624 integer_one_node, 0),
4625 OEP_ONLY_CONST))
4626 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4627 type, arg1, arg2));
4628 break;
4630 case GE_EXPR:
4631 /* If C1 is C2 + 1, this is max(A, C2). */
4632 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4633 OEP_ONLY_CONST)
4634 && operand_equal_p (arg01,
4635 const_binop (PLUS_EXPR, arg2,
4636 integer_one_node, 0),
4637 OEP_ONLY_CONST))
4638 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4639 type, arg1, arg2));
4640 break;
4641 case NE_EXPR:
4642 break;
4643 default:
4644 gcc_unreachable ();
4647 return NULL_TREE;
4652 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4653 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4654 #endif
4656 /* EXP is some logical combination of boolean tests. See if we can
4657 merge it into some range test. Return the new tree if so. */
4659 static tree
4660 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4662 int or_op = (code == TRUTH_ORIF_EXPR
4663 || code == TRUTH_OR_EXPR);
4664 int in0_p, in1_p, in_p;
4665 tree low0, low1, low, high0, high1, high;
4666 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4667 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4668 tree tem;
4670 /* If this is an OR operation, invert both sides; we will invert
4671 again at the end. */
4672 if (or_op)
4673 in0_p = ! in0_p, in1_p = ! in1_p;
4675 /* If both expressions are the same, if we can merge the ranges, and we
4676 can build the range test, return it or it inverted. If one of the
4677 ranges is always true or always false, consider it to be the same
4678 expression as the other. */
4679 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4680 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4681 in1_p, low1, high1)
4682 && 0 != (tem = (build_range_check (type,
4683 lhs != 0 ? lhs
4684 : rhs != 0 ? rhs : integer_zero_node,
4685 in_p, low, high))))
4686 return or_op ? invert_truthvalue (tem) : tem;
4688 /* On machines where the branch cost is expensive, if this is a
4689 short-circuited branch and the underlying object on both sides
4690 is the same, make a non-short-circuit operation. */
4691 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4692 && lhs != 0 && rhs != 0
4693 && (code == TRUTH_ANDIF_EXPR
4694 || code == TRUTH_ORIF_EXPR)
4695 && operand_equal_p (lhs, rhs, 0))
4697 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4698 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4699 which cases we can't do this. */
4700 if (simple_operand_p (lhs))
4701 return build2 (code == TRUTH_ANDIF_EXPR
4702 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4703 type, op0, op1);
4705 else if (lang_hooks.decls.global_bindings_p () == 0
4706 && ! CONTAINS_PLACEHOLDER_P (lhs))
4708 tree common = save_expr (lhs);
4710 if (0 != (lhs = build_range_check (type, common,
4711 or_op ? ! in0_p : in0_p,
4712 low0, high0))
4713 && (0 != (rhs = build_range_check (type, common,
4714 or_op ? ! in1_p : in1_p,
4715 low1, high1))))
4716 return build2 (code == TRUTH_ANDIF_EXPR
4717 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4718 type, lhs, rhs);
4722 return 0;
4725 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4726 bit value. Arrange things so the extra bits will be set to zero if and
4727 only if C is signed-extended to its full width. If MASK is nonzero,
4728 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4730 static tree
4731 unextend (tree c, int p, int unsignedp, tree mask)
4733 tree type = TREE_TYPE (c);
4734 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4735 tree temp;
4737 if (p == modesize || unsignedp)
4738 return c;
4740 /* We work by getting just the sign bit into the low-order bit, then
4741 into the high-order bit, then sign-extend. We then XOR that value
4742 with C. */
4743 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4744 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4746 /* We must use a signed type in order to get an arithmetic right shift.
4747 However, we must also avoid introducing accidental overflows, so that
4748 a subsequent call to integer_zerop will work. Hence we must
4749 do the type conversion here. At this point, the constant is either
4750 zero or one, and the conversion to a signed type can never overflow.
4751 We could get an overflow if this conversion is done anywhere else. */
4752 if (TYPE_UNSIGNED (type))
4753 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4755 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4756 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4757 if (mask != 0)
4758 temp = const_binop (BIT_AND_EXPR, temp,
4759 fold_convert (TREE_TYPE (c), mask), 0);
4760 /* If necessary, convert the type back to match the type of C. */
4761 if (TYPE_UNSIGNED (type))
4762 temp = fold_convert (type, temp);
4764 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4767 /* Find ways of folding logical expressions of LHS and RHS:
4768 Try to merge two comparisons to the same innermost item.
4769 Look for range tests like "ch >= '0' && ch <= '9'".
4770 Look for combinations of simple terms on machines with expensive branches
4771 and evaluate the RHS unconditionally.
4773 For example, if we have p->a == 2 && p->b == 4 and we can make an
4774 object large enough to span both A and B, we can do this with a comparison
4775 against the object ANDed with the a mask.
4777 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4778 operations to do this with one comparison.
4780 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4781 function and the one above.
4783 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4784 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4786 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4787 two operands.
4789 We return the simplified tree or 0 if no optimization is possible. */
4791 static tree
4792 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4794 /* If this is the "or" of two comparisons, we can do something if
4795 the comparisons are NE_EXPR. If this is the "and", we can do something
4796 if the comparisons are EQ_EXPR. I.e.,
4797 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4799 WANTED_CODE is this operation code. For single bit fields, we can
4800 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4801 comparison for one-bit fields. */
4803 enum tree_code wanted_code;
4804 enum tree_code lcode, rcode;
4805 tree ll_arg, lr_arg, rl_arg, rr_arg;
4806 tree ll_inner, lr_inner, rl_inner, rr_inner;
4807 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4808 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4809 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4810 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4811 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4812 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4813 enum machine_mode lnmode, rnmode;
4814 tree ll_mask, lr_mask, rl_mask, rr_mask;
4815 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4816 tree l_const, r_const;
4817 tree lntype, rntype, result;
4818 int first_bit, end_bit;
4819 int volatilep;
4821 /* Start by getting the comparison codes. Fail if anything is volatile.
4822 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4823 it were surrounded with a NE_EXPR. */
4825 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4826 return 0;
4828 lcode = TREE_CODE (lhs);
4829 rcode = TREE_CODE (rhs);
4831 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4833 lhs = build2 (NE_EXPR, truth_type, lhs,
4834 build_int_cst (TREE_TYPE (lhs), 0));
4835 lcode = NE_EXPR;
4838 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4840 rhs = build2 (NE_EXPR, truth_type, rhs,
4841 build_int_cst (TREE_TYPE (rhs), 0));
4842 rcode = NE_EXPR;
4845 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4846 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4847 return 0;
4849 ll_arg = TREE_OPERAND (lhs, 0);
4850 lr_arg = TREE_OPERAND (lhs, 1);
4851 rl_arg = TREE_OPERAND (rhs, 0);
4852 rr_arg = TREE_OPERAND (rhs, 1);
4854 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4855 if (simple_operand_p (ll_arg)
4856 && simple_operand_p (lr_arg))
4858 tree result;
4859 if (operand_equal_p (ll_arg, rl_arg, 0)
4860 && operand_equal_p (lr_arg, rr_arg, 0))
4862 result = combine_comparisons (code, lcode, rcode,
4863 truth_type, ll_arg, lr_arg);
4864 if (result)
4865 return result;
4867 else if (operand_equal_p (ll_arg, rr_arg, 0)
4868 && operand_equal_p (lr_arg, rl_arg, 0))
4870 result = combine_comparisons (code, lcode,
4871 swap_tree_comparison (rcode),
4872 truth_type, ll_arg, lr_arg);
4873 if (result)
4874 return result;
4878 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4879 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4881 /* If the RHS can be evaluated unconditionally and its operands are
4882 simple, it wins to evaluate the RHS unconditionally on machines
4883 with expensive branches. In this case, this isn't a comparison
4884 that can be merged. Avoid doing this if the RHS is a floating-point
4885 comparison since those can trap. */
4887 if (BRANCH_COST >= 2
4888 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4889 && simple_operand_p (rl_arg)
4890 && simple_operand_p (rr_arg))
4892 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4893 if (code == TRUTH_OR_EXPR
4894 && lcode == NE_EXPR && integer_zerop (lr_arg)
4895 && rcode == NE_EXPR && integer_zerop (rr_arg)
4896 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4897 return build2 (NE_EXPR, truth_type,
4898 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4899 ll_arg, rl_arg),
4900 build_int_cst (TREE_TYPE (ll_arg), 0));
4902 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4903 if (code == TRUTH_AND_EXPR
4904 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4905 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4906 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4907 return build2 (EQ_EXPR, truth_type,
4908 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4909 ll_arg, rl_arg),
4910 build_int_cst (TREE_TYPE (ll_arg), 0));
4912 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4913 return build2 (code, truth_type, lhs, rhs);
4916 /* See if the comparisons can be merged. Then get all the parameters for
4917 each side. */
4919 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4920 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4921 return 0;
4923 volatilep = 0;
4924 ll_inner = decode_field_reference (ll_arg,
4925 &ll_bitsize, &ll_bitpos, &ll_mode,
4926 &ll_unsignedp, &volatilep, &ll_mask,
4927 &ll_and_mask);
4928 lr_inner = decode_field_reference (lr_arg,
4929 &lr_bitsize, &lr_bitpos, &lr_mode,
4930 &lr_unsignedp, &volatilep, &lr_mask,
4931 &lr_and_mask);
4932 rl_inner = decode_field_reference (rl_arg,
4933 &rl_bitsize, &rl_bitpos, &rl_mode,
4934 &rl_unsignedp, &volatilep, &rl_mask,
4935 &rl_and_mask);
4936 rr_inner = decode_field_reference (rr_arg,
4937 &rr_bitsize, &rr_bitpos, &rr_mode,
4938 &rr_unsignedp, &volatilep, &rr_mask,
4939 &rr_and_mask);
4941 /* It must be true that the inner operation on the lhs of each
4942 comparison must be the same if we are to be able to do anything.
4943 Then see if we have constants. If not, the same must be true for
4944 the rhs's. */
4945 if (volatilep || ll_inner == 0 || rl_inner == 0
4946 || ! operand_equal_p (ll_inner, rl_inner, 0))
4947 return 0;
4949 if (TREE_CODE (lr_arg) == INTEGER_CST
4950 && TREE_CODE (rr_arg) == INTEGER_CST)
4951 l_const = lr_arg, r_const = rr_arg;
4952 else if (lr_inner == 0 || rr_inner == 0
4953 || ! operand_equal_p (lr_inner, rr_inner, 0))
4954 return 0;
4955 else
4956 l_const = r_const = 0;
4958 /* If either comparison code is not correct for our logical operation,
4959 fail. However, we can convert a one-bit comparison against zero into
4960 the opposite comparison against that bit being set in the field. */
4962 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4963 if (lcode != wanted_code)
4965 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4967 /* Make the left operand unsigned, since we are only interested
4968 in the value of one bit. Otherwise we are doing the wrong
4969 thing below. */
4970 ll_unsignedp = 1;
4971 l_const = ll_mask;
4973 else
4974 return 0;
4977 /* This is analogous to the code for l_const above. */
4978 if (rcode != wanted_code)
4980 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4982 rl_unsignedp = 1;
4983 r_const = rl_mask;
4985 else
4986 return 0;
4989 /* After this point all optimizations will generate bit-field
4990 references, which we might not want. */
4991 if (! lang_hooks.can_use_bit_fields_p ())
4992 return 0;
4994 /* See if we can find a mode that contains both fields being compared on
4995 the left. If we can't, fail. Otherwise, update all constants and masks
4996 to be relative to a field of that size. */
4997 first_bit = MIN (ll_bitpos, rl_bitpos);
4998 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4999 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5000 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5001 volatilep);
5002 if (lnmode == VOIDmode)
5003 return 0;
5005 lnbitsize = GET_MODE_BITSIZE (lnmode);
5006 lnbitpos = first_bit & ~ (lnbitsize - 1);
5007 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5008 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5010 if (BYTES_BIG_ENDIAN)
5012 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5013 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5016 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5017 size_int (xll_bitpos), 0);
5018 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5019 size_int (xrl_bitpos), 0);
5021 if (l_const)
5023 l_const = fold_convert (lntype, l_const);
5024 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5025 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5026 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5027 fold_build1 (BIT_NOT_EXPR,
5028 lntype, ll_mask),
5029 0)))
5031 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5033 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5036 if (r_const)
5038 r_const = fold_convert (lntype, r_const);
5039 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5040 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5041 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5042 fold_build1 (BIT_NOT_EXPR,
5043 lntype, rl_mask),
5044 0)))
5046 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5048 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5052 /* If the right sides are not constant, do the same for it. Also,
5053 disallow this optimization if a size or signedness mismatch occurs
5054 between the left and right sides. */
5055 if (l_const == 0)
5057 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5058 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5059 /* Make sure the two fields on the right
5060 correspond to the left without being swapped. */
5061 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5062 return 0;
5064 first_bit = MIN (lr_bitpos, rr_bitpos);
5065 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5066 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5067 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5068 volatilep);
5069 if (rnmode == VOIDmode)
5070 return 0;
5072 rnbitsize = GET_MODE_BITSIZE (rnmode);
5073 rnbitpos = first_bit & ~ (rnbitsize - 1);
5074 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5075 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5077 if (BYTES_BIG_ENDIAN)
5079 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5080 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5083 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5084 size_int (xlr_bitpos), 0);
5085 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5086 size_int (xrr_bitpos), 0);
5088 /* Make a mask that corresponds to both fields being compared.
5089 Do this for both items being compared. If the operands are the
5090 same size and the bits being compared are in the same position
5091 then we can do this by masking both and comparing the masked
5092 results. */
5093 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5094 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5095 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5097 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5098 ll_unsignedp || rl_unsignedp);
5099 if (! all_ones_mask_p (ll_mask, lnbitsize))
5100 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5102 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5103 lr_unsignedp || rr_unsignedp);
5104 if (! all_ones_mask_p (lr_mask, rnbitsize))
5105 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5107 return build2 (wanted_code, truth_type, lhs, rhs);
5110 /* There is still another way we can do something: If both pairs of
5111 fields being compared are adjacent, we may be able to make a wider
5112 field containing them both.
5114 Note that we still must mask the lhs/rhs expressions. Furthermore,
5115 the mask must be shifted to account for the shift done by
5116 make_bit_field_ref. */
5117 if ((ll_bitsize + ll_bitpos == rl_bitpos
5118 && lr_bitsize + lr_bitpos == rr_bitpos)
5119 || (ll_bitpos == rl_bitpos + rl_bitsize
5120 && lr_bitpos == rr_bitpos + rr_bitsize))
5122 tree type;
5124 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5125 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5126 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5127 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5129 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5130 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5131 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5132 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5134 /* Convert to the smaller type before masking out unwanted bits. */
5135 type = lntype;
5136 if (lntype != rntype)
5138 if (lnbitsize > rnbitsize)
5140 lhs = fold_convert (rntype, lhs);
5141 ll_mask = fold_convert (rntype, ll_mask);
5142 type = rntype;
5144 else if (lnbitsize < rnbitsize)
5146 rhs = fold_convert (lntype, rhs);
5147 lr_mask = fold_convert (lntype, lr_mask);
5148 type = lntype;
5152 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5153 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5155 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5156 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5158 return build2 (wanted_code, truth_type, lhs, rhs);
5161 return 0;
5164 /* Handle the case of comparisons with constants. If there is something in
5165 common between the masks, those bits of the constants must be the same.
5166 If not, the condition is always false. Test for this to avoid generating
5167 incorrect code below. */
5168 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5169 if (! integer_zerop (result)
5170 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5171 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5173 if (wanted_code == NE_EXPR)
5175 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5176 return constant_boolean_node (true, truth_type);
5178 else
5180 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5181 return constant_boolean_node (false, truth_type);
5185 /* Construct the expression we will return. First get the component
5186 reference we will make. Unless the mask is all ones the width of
5187 that field, perform the mask operation. Then compare with the
5188 merged constant. */
5189 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5190 ll_unsignedp || rl_unsignedp);
5192 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5193 if (! all_ones_mask_p (ll_mask, lnbitsize))
5194 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5196 return build2 (wanted_code, truth_type, result,
5197 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5200 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5201 constant. */
5203 static tree
5204 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5206 tree arg0 = op0;
5207 enum tree_code op_code;
5208 tree comp_const = op1;
5209 tree minmax_const;
5210 int consts_equal, consts_lt;
5211 tree inner;
5213 STRIP_SIGN_NOPS (arg0);
5215 op_code = TREE_CODE (arg0);
5216 minmax_const = TREE_OPERAND (arg0, 1);
5217 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5218 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5219 inner = TREE_OPERAND (arg0, 0);
5221 /* If something does not permit us to optimize, return the original tree. */
5222 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5223 || TREE_CODE (comp_const) != INTEGER_CST
5224 || TREE_CONSTANT_OVERFLOW (comp_const)
5225 || TREE_CODE (minmax_const) != INTEGER_CST
5226 || TREE_CONSTANT_OVERFLOW (minmax_const))
5227 return NULL_TREE;
5229 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5230 and GT_EXPR, doing the rest with recursive calls using logical
5231 simplifications. */
5232 switch (code)
5234 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5236 /* FIXME: We should be able to invert code without building a
5237 scratch tree node, but doing so would require us to
5238 duplicate a part of invert_truthvalue here. */
5239 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5240 tem = optimize_minmax_comparison (TREE_CODE (tem),
5241 TREE_TYPE (tem),
5242 TREE_OPERAND (tem, 0),
5243 TREE_OPERAND (tem, 1));
5244 return invert_truthvalue (tem);
5247 case GE_EXPR:
5248 return
5249 fold_build2 (TRUTH_ORIF_EXPR, type,
5250 optimize_minmax_comparison
5251 (EQ_EXPR, type, arg0, comp_const),
5252 optimize_minmax_comparison
5253 (GT_EXPR, type, arg0, comp_const));
5255 case EQ_EXPR:
5256 if (op_code == MAX_EXPR && consts_equal)
5257 /* MAX (X, 0) == 0 -> X <= 0 */
5258 return fold_build2 (LE_EXPR, type, inner, comp_const);
5260 else if (op_code == MAX_EXPR && consts_lt)
5261 /* MAX (X, 0) == 5 -> X == 5 */
5262 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5264 else if (op_code == MAX_EXPR)
5265 /* MAX (X, 0) == -1 -> false */
5266 return omit_one_operand (type, integer_zero_node, inner);
5268 else if (consts_equal)
5269 /* MIN (X, 0) == 0 -> X >= 0 */
5270 return fold_build2 (GE_EXPR, type, inner, comp_const);
5272 else if (consts_lt)
5273 /* MIN (X, 0) == 5 -> false */
5274 return omit_one_operand (type, integer_zero_node, inner);
5276 else
5277 /* MIN (X, 0) == -1 -> X == -1 */
5278 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5280 case GT_EXPR:
5281 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5282 /* MAX (X, 0) > 0 -> X > 0
5283 MAX (X, 0) > 5 -> X > 5 */
5284 return fold_build2 (GT_EXPR, type, inner, comp_const);
5286 else if (op_code == MAX_EXPR)
5287 /* MAX (X, 0) > -1 -> true */
5288 return omit_one_operand (type, integer_one_node, inner);
5290 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5291 /* MIN (X, 0) > 0 -> false
5292 MIN (X, 0) > 5 -> false */
5293 return omit_one_operand (type, integer_zero_node, inner);
5295 else
5296 /* MIN (X, 0) > -1 -> X > -1 */
5297 return fold_build2 (GT_EXPR, type, inner, comp_const);
5299 default:
5300 return NULL_TREE;
5304 /* T is an integer expression that is being multiplied, divided, or taken a
5305 modulus (CODE says which and what kind of divide or modulus) by a
5306 constant C. See if we can eliminate that operation by folding it with
5307 other operations already in T. WIDE_TYPE, if non-null, is a type that
5308 should be used for the computation if wider than our type.
5310 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5311 (X * 2) + (Y * 4). We must, however, be assured that either the original
5312 expression would not overflow or that overflow is undefined for the type
5313 in the language in question.
5315 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5316 the machine has a multiply-accumulate insn or that this is part of an
5317 addressing calculation.
5319 If we return a non-null expression, it is an equivalent form of the
5320 original computation, but need not be in the original type. */
5322 static tree
5323 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5325 /* To avoid exponential search depth, refuse to allow recursion past
5326 three levels. Beyond that (1) it's highly unlikely that we'll find
5327 something interesting and (2) we've probably processed it before
5328 when we built the inner expression. */
5330 static int depth;
5331 tree ret;
5333 if (depth > 3)
5334 return NULL;
5336 depth++;
5337 ret = extract_muldiv_1 (t, c, code, wide_type);
5338 depth--;
5340 return ret;
5343 static tree
5344 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5346 tree type = TREE_TYPE (t);
5347 enum tree_code tcode = TREE_CODE (t);
5348 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5349 > GET_MODE_SIZE (TYPE_MODE (type)))
5350 ? wide_type : type);
5351 tree t1, t2;
5352 int same_p = tcode == code;
5353 tree op0 = NULL_TREE, op1 = NULL_TREE;
5355 /* Don't deal with constants of zero here; they confuse the code below. */
5356 if (integer_zerop (c))
5357 return NULL_TREE;
5359 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5360 op0 = TREE_OPERAND (t, 0);
5362 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5363 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5365 /* Note that we need not handle conditional operations here since fold
5366 already handles those cases. So just do arithmetic here. */
5367 switch (tcode)
5369 case INTEGER_CST:
5370 /* For a constant, we can always simplify if we are a multiply
5371 or (for divide and modulus) if it is a multiple of our constant. */
5372 if (code == MULT_EXPR
5373 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5374 return const_binop (code, fold_convert (ctype, t),
5375 fold_convert (ctype, c), 0);
5376 break;
5378 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5379 /* If op0 is an expression ... */
5380 if ((COMPARISON_CLASS_P (op0)
5381 || UNARY_CLASS_P (op0)
5382 || BINARY_CLASS_P (op0)
5383 || EXPRESSION_CLASS_P (op0))
5384 /* ... and is unsigned, and its type is smaller than ctype,
5385 then we cannot pass through as widening. */
5386 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5387 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5388 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5389 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5390 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5391 /* ... or this is a truncation (t is narrower than op0),
5392 then we cannot pass through this narrowing. */
5393 || (GET_MODE_SIZE (TYPE_MODE (type))
5394 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5395 /* ... or signedness changes for division or modulus,
5396 then we cannot pass through this conversion. */
5397 || (code != MULT_EXPR
5398 && (TYPE_UNSIGNED (ctype)
5399 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5400 break;
5402 /* Pass the constant down and see if we can make a simplification. If
5403 we can, replace this expression with the inner simplification for
5404 possible later conversion to our or some other type. */
5405 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5406 && TREE_CODE (t2) == INTEGER_CST
5407 && ! TREE_CONSTANT_OVERFLOW (t2)
5408 && (0 != (t1 = extract_muldiv (op0, t2, code,
5409 code == MULT_EXPR
5410 ? ctype : NULL_TREE))))
5411 return t1;
5412 break;
5414 case ABS_EXPR:
5415 /* If widening the type changes it from signed to unsigned, then we
5416 must avoid building ABS_EXPR itself as unsigned. */
5417 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5419 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5420 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5422 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5423 return fold_convert (ctype, t1);
5425 break;
5427 /* FALLTHROUGH */
5428 case NEGATE_EXPR:
5429 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5430 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5431 break;
5433 case MIN_EXPR: case MAX_EXPR:
5434 /* If widening the type changes the signedness, then we can't perform
5435 this optimization as that changes the result. */
5436 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5437 break;
5439 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5440 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5441 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5443 if (tree_int_cst_sgn (c) < 0)
5444 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5446 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5447 fold_convert (ctype, t2));
5449 break;
5451 case LSHIFT_EXPR: case RSHIFT_EXPR:
5452 /* If the second operand is constant, this is a multiplication
5453 or floor division, by a power of two, so we can treat it that
5454 way unless the multiplier or divisor overflows. Signed
5455 left-shift overflow is implementation-defined rather than
5456 undefined in C90, so do not convert signed left shift into
5457 multiplication. */
5458 if (TREE_CODE (op1) == INTEGER_CST
5459 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5460 /* const_binop may not detect overflow correctly,
5461 so check for it explicitly here. */
5462 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5463 && TREE_INT_CST_HIGH (op1) == 0
5464 && 0 != (t1 = fold_convert (ctype,
5465 const_binop (LSHIFT_EXPR,
5466 size_one_node,
5467 op1, 0)))
5468 && ! TREE_OVERFLOW (t1))
5469 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5470 ? MULT_EXPR : FLOOR_DIV_EXPR,
5471 ctype, fold_convert (ctype, op0), t1),
5472 c, code, wide_type);
5473 break;
5475 case PLUS_EXPR: case MINUS_EXPR:
5476 /* See if we can eliminate the operation on both sides. If we can, we
5477 can return a new PLUS or MINUS. If we can't, the only remaining
5478 cases where we can do anything are if the second operand is a
5479 constant. */
5480 t1 = extract_muldiv (op0, c, code, wide_type);
5481 t2 = extract_muldiv (op1, c, code, wide_type);
5482 if (t1 != 0 && t2 != 0
5483 && (code == MULT_EXPR
5484 /* If not multiplication, we can only do this if both operands
5485 are divisible by c. */
5486 || (multiple_of_p (ctype, op0, c)
5487 && multiple_of_p (ctype, op1, c))))
5488 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5489 fold_convert (ctype, t2));
5491 /* If this was a subtraction, negate OP1 and set it to be an addition.
5492 This simplifies the logic below. */
5493 if (tcode == MINUS_EXPR)
5494 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5496 if (TREE_CODE (op1) != INTEGER_CST)
5497 break;
5499 /* If either OP1 or C are negative, this optimization is not safe for
5500 some of the division and remainder types while for others we need
5501 to change the code. */
5502 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5504 if (code == CEIL_DIV_EXPR)
5505 code = FLOOR_DIV_EXPR;
5506 else if (code == FLOOR_DIV_EXPR)
5507 code = CEIL_DIV_EXPR;
5508 else if (code != MULT_EXPR
5509 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5510 break;
5513 /* If it's a multiply or a division/modulus operation of a multiple
5514 of our constant, do the operation and verify it doesn't overflow. */
5515 if (code == MULT_EXPR
5516 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5518 op1 = const_binop (code, fold_convert (ctype, op1),
5519 fold_convert (ctype, c), 0);
5520 /* We allow the constant to overflow with wrapping semantics. */
5521 if (op1 == 0
5522 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5523 break;
5525 else
5526 break;
5528 /* If we have an unsigned type is not a sizetype, we cannot widen
5529 the operation since it will change the result if the original
5530 computation overflowed. */
5531 if (TYPE_UNSIGNED (ctype)
5532 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5533 && ctype != type)
5534 break;
5536 /* If we were able to eliminate our operation from the first side,
5537 apply our operation to the second side and reform the PLUS. */
5538 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5539 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5541 /* The last case is if we are a multiply. In that case, we can
5542 apply the distributive law to commute the multiply and addition
5543 if the multiplication of the constants doesn't overflow. */
5544 if (code == MULT_EXPR)
5545 return fold_build2 (tcode, ctype,
5546 fold_build2 (code, ctype,
5547 fold_convert (ctype, op0),
5548 fold_convert (ctype, c)),
5549 op1);
5551 break;
5553 case MULT_EXPR:
5554 /* We have a special case here if we are doing something like
5555 (C * 8) % 4 since we know that's zero. */
5556 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5557 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5558 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5559 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5560 return omit_one_operand (type, integer_zero_node, op0);
5562 /* ... fall through ... */
5564 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5565 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5566 /* If we can extract our operation from the LHS, do so and return a
5567 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5568 do something only if the second operand is a constant. */
5569 if (same_p
5570 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5571 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5572 fold_convert (ctype, op1));
5573 else if (tcode == MULT_EXPR && code == MULT_EXPR
5574 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5575 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5576 fold_convert (ctype, t1));
5577 else if (TREE_CODE (op1) != INTEGER_CST)
5578 return 0;
5580 /* If these are the same operation types, we can associate them
5581 assuming no overflow. */
5582 if (tcode == code
5583 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5584 fold_convert (ctype, c), 0))
5585 && ! TREE_OVERFLOW (t1))
5586 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5588 /* If these operations "cancel" each other, we have the main
5589 optimizations of this pass, which occur when either constant is a
5590 multiple of the other, in which case we replace this with either an
5591 operation or CODE or TCODE.
5593 If we have an unsigned type that is not a sizetype, we cannot do
5594 this since it will change the result if the original computation
5595 overflowed. */
5596 if ((! TYPE_UNSIGNED (ctype)
5597 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5598 && ! flag_wrapv
5599 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5600 || (tcode == MULT_EXPR
5601 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5602 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5604 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5605 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5606 fold_convert (ctype,
5607 const_binop (TRUNC_DIV_EXPR,
5608 op1, c, 0)));
5609 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5610 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5611 fold_convert (ctype,
5612 const_binop (TRUNC_DIV_EXPR,
5613 c, op1, 0)));
5615 break;
5617 default:
5618 break;
5621 return 0;
5624 /* Return a node which has the indicated constant VALUE (either 0 or
5625 1), and is of the indicated TYPE. */
5627 tree
5628 constant_boolean_node (int value, tree type)
5630 if (type == integer_type_node)
5631 return value ? integer_one_node : integer_zero_node;
5632 else if (type == boolean_type_node)
5633 return value ? boolean_true_node : boolean_false_node;
5634 else
5635 return build_int_cst (type, value);
5639 /* Return true if expr looks like an ARRAY_REF and set base and
5640 offset to the appropriate trees. If there is no offset,
5641 offset is set to NULL_TREE. Base will be canonicalized to
5642 something you can get the element type from using
5643 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5644 in bytes to the base. */
5646 static bool
5647 extract_array_ref (tree expr, tree *base, tree *offset)
5649 /* One canonical form is a PLUS_EXPR with the first
5650 argument being an ADDR_EXPR with a possible NOP_EXPR
5651 attached. */
5652 if (TREE_CODE (expr) == PLUS_EXPR)
5654 tree op0 = TREE_OPERAND (expr, 0);
5655 tree inner_base, dummy1;
5656 /* Strip NOP_EXPRs here because the C frontends and/or
5657 folders present us (int *)&x.a + 4B possibly. */
5658 STRIP_NOPS (op0);
5659 if (extract_array_ref (op0, &inner_base, &dummy1))
5661 *base = inner_base;
5662 if (dummy1 == NULL_TREE)
5663 *offset = TREE_OPERAND (expr, 1);
5664 else
5665 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5666 dummy1, TREE_OPERAND (expr, 1));
5667 return true;
5670 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5671 which we transform into an ADDR_EXPR with appropriate
5672 offset. For other arguments to the ADDR_EXPR we assume
5673 zero offset and as such do not care about the ADDR_EXPR
5674 type and strip possible nops from it. */
5675 else if (TREE_CODE (expr) == ADDR_EXPR)
5677 tree op0 = TREE_OPERAND (expr, 0);
5678 if (TREE_CODE (op0) == ARRAY_REF)
5680 tree idx = TREE_OPERAND (op0, 1);
5681 *base = TREE_OPERAND (op0, 0);
5682 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5683 array_ref_element_size (op0));
5685 else
5687 /* Handle array-to-pointer decay as &a. */
5688 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5689 *base = TREE_OPERAND (expr, 0);
5690 else
5691 *base = expr;
5692 *offset = NULL_TREE;
5694 return true;
5696 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5697 else if (SSA_VAR_P (expr)
5698 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5700 *base = expr;
5701 *offset = NULL_TREE;
5702 return true;
5705 return false;
5709 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5710 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5711 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5712 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5713 COND is the first argument to CODE; otherwise (as in the example
5714 given here), it is the second argument. TYPE is the type of the
5715 original expression. Return NULL_TREE if no simplification is
5716 possible. */
5718 static tree
5719 fold_binary_op_with_conditional_arg (enum tree_code code,
5720 tree type, tree op0, tree op1,
5721 tree cond, tree arg, int cond_first_p)
5723 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5724 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5725 tree test, true_value, false_value;
5726 tree lhs = NULL_TREE;
5727 tree rhs = NULL_TREE;
5729 /* This transformation is only worthwhile if we don't have to wrap
5730 arg in a SAVE_EXPR, and the operation can be simplified on at least
5731 one of the branches once its pushed inside the COND_EXPR. */
5732 if (!TREE_CONSTANT (arg))
5733 return NULL_TREE;
5735 if (TREE_CODE (cond) == COND_EXPR)
5737 test = TREE_OPERAND (cond, 0);
5738 true_value = TREE_OPERAND (cond, 1);
5739 false_value = TREE_OPERAND (cond, 2);
5740 /* If this operand throws an expression, then it does not make
5741 sense to try to perform a logical or arithmetic operation
5742 involving it. */
5743 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5744 lhs = true_value;
5745 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5746 rhs = false_value;
5748 else
5750 tree testtype = TREE_TYPE (cond);
5751 test = cond;
5752 true_value = constant_boolean_node (true, testtype);
5753 false_value = constant_boolean_node (false, testtype);
5756 arg = fold_convert (arg_type, arg);
5757 if (lhs == 0)
5759 true_value = fold_convert (cond_type, true_value);
5760 if (cond_first_p)
5761 lhs = fold_build2 (code, type, true_value, arg);
5762 else
5763 lhs = fold_build2 (code, type, arg, true_value);
5765 if (rhs == 0)
5767 false_value = fold_convert (cond_type, false_value);
5768 if (cond_first_p)
5769 rhs = fold_build2 (code, type, false_value, arg);
5770 else
5771 rhs = fold_build2 (code, type, arg, false_value);
5774 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5775 return fold_convert (type, test);
5779 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5781 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5782 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5783 ADDEND is the same as X.
5785 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5786 and finite. The problematic cases are when X is zero, and its mode
5787 has signed zeros. In the case of rounding towards -infinity,
5788 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5789 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5791 static bool
5792 fold_real_zero_addition_p (tree type, tree addend, int negate)
5794 if (!real_zerop (addend))
5795 return false;
5797 /* Don't allow the fold with -fsignaling-nans. */
5798 if (HONOR_SNANS (TYPE_MODE (type)))
5799 return false;
5801 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5802 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5803 return true;
5805 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5806 if (TREE_CODE (addend) == REAL_CST
5807 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5808 negate = !negate;
5810 /* The mode has signed zeros, and we have to honor their sign.
5811 In this situation, there is only one case we can return true for.
5812 X - 0 is the same as X unless rounding towards -infinity is
5813 supported. */
5814 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5817 /* Subroutine of fold() that checks comparisons of built-in math
5818 functions against real constants.
5820 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5821 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5822 is the type of the result and ARG0 and ARG1 are the operands of the
5823 comparison. ARG1 must be a TREE_REAL_CST.
5825 The function returns the constant folded tree if a simplification
5826 can be made, and NULL_TREE otherwise. */
5828 static tree
5829 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5830 tree type, tree arg0, tree arg1)
5832 REAL_VALUE_TYPE c;
5834 if (BUILTIN_SQRT_P (fcode))
5836 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5837 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5839 c = TREE_REAL_CST (arg1);
5840 if (REAL_VALUE_NEGATIVE (c))
5842 /* sqrt(x) < y is always false, if y is negative. */
5843 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5844 return omit_one_operand (type, integer_zero_node, arg);
5846 /* sqrt(x) > y is always true, if y is negative and we
5847 don't care about NaNs, i.e. negative values of x. */
5848 if (code == NE_EXPR || !HONOR_NANS (mode))
5849 return omit_one_operand (type, integer_one_node, arg);
5851 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5852 return fold_build2 (GE_EXPR, type, arg,
5853 build_real (TREE_TYPE (arg), dconst0));
5855 else if (code == GT_EXPR || code == GE_EXPR)
5857 REAL_VALUE_TYPE c2;
5859 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5860 real_convert (&c2, mode, &c2);
5862 if (REAL_VALUE_ISINF (c2))
5864 /* sqrt(x) > y is x == +Inf, when y is very large. */
5865 if (HONOR_INFINITIES (mode))
5866 return fold_build2 (EQ_EXPR, type, arg,
5867 build_real (TREE_TYPE (arg), c2));
5869 /* sqrt(x) > y is always false, when y is very large
5870 and we don't care about infinities. */
5871 return omit_one_operand (type, integer_zero_node, arg);
5874 /* sqrt(x) > c is the same as x > c*c. */
5875 return fold_build2 (code, type, arg,
5876 build_real (TREE_TYPE (arg), c2));
5878 else if (code == LT_EXPR || code == LE_EXPR)
5880 REAL_VALUE_TYPE c2;
5882 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5883 real_convert (&c2, mode, &c2);
5885 if (REAL_VALUE_ISINF (c2))
5887 /* sqrt(x) < y is always true, when y is a very large
5888 value and we don't care about NaNs or Infinities. */
5889 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5890 return omit_one_operand (type, integer_one_node, arg);
5892 /* sqrt(x) < y is x != +Inf when y is very large and we
5893 don't care about NaNs. */
5894 if (! HONOR_NANS (mode))
5895 return fold_build2 (NE_EXPR, type, arg,
5896 build_real (TREE_TYPE (arg), c2));
5898 /* sqrt(x) < y is x >= 0 when y is very large and we
5899 don't care about Infinities. */
5900 if (! HONOR_INFINITIES (mode))
5901 return fold_build2 (GE_EXPR, type, arg,
5902 build_real (TREE_TYPE (arg), dconst0));
5904 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5905 if (lang_hooks.decls.global_bindings_p () != 0
5906 || CONTAINS_PLACEHOLDER_P (arg))
5907 return NULL_TREE;
5909 arg = save_expr (arg);
5910 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5911 fold_build2 (GE_EXPR, type, arg,
5912 build_real (TREE_TYPE (arg),
5913 dconst0)),
5914 fold_build2 (NE_EXPR, type, arg,
5915 build_real (TREE_TYPE (arg),
5916 c2)));
5919 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5920 if (! HONOR_NANS (mode))
5921 return fold_build2 (code, type, arg,
5922 build_real (TREE_TYPE (arg), c2));
5924 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5925 if (lang_hooks.decls.global_bindings_p () == 0
5926 && ! CONTAINS_PLACEHOLDER_P (arg))
5928 arg = save_expr (arg);
5929 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5930 fold_build2 (GE_EXPR, type, arg,
5931 build_real (TREE_TYPE (arg),
5932 dconst0)),
5933 fold_build2 (code, type, arg,
5934 build_real (TREE_TYPE (arg),
5935 c2)));
5940 return NULL_TREE;
5943 /* Subroutine of fold() that optimizes comparisons against Infinities,
5944 either +Inf or -Inf.
5946 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5947 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5948 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5950 The function returns the constant folded tree if a simplification
5951 can be made, and NULL_TREE otherwise. */
5953 static tree
5954 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5956 enum machine_mode mode;
5957 REAL_VALUE_TYPE max;
5958 tree temp;
5959 bool neg;
5961 mode = TYPE_MODE (TREE_TYPE (arg0));
5963 /* For negative infinity swap the sense of the comparison. */
5964 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5965 if (neg)
5966 code = swap_tree_comparison (code);
5968 switch (code)
5970 case GT_EXPR:
5971 /* x > +Inf is always false, if with ignore sNANs. */
5972 if (HONOR_SNANS (mode))
5973 return NULL_TREE;
5974 return omit_one_operand (type, integer_zero_node, arg0);
5976 case LE_EXPR:
5977 /* x <= +Inf is always true, if we don't case about NaNs. */
5978 if (! HONOR_NANS (mode))
5979 return omit_one_operand (type, integer_one_node, arg0);
5981 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5982 if (lang_hooks.decls.global_bindings_p () == 0
5983 && ! CONTAINS_PLACEHOLDER_P (arg0))
5985 arg0 = save_expr (arg0);
5986 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5988 break;
5990 case EQ_EXPR:
5991 case GE_EXPR:
5992 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5993 real_maxval (&max, neg, mode);
5994 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5995 arg0, build_real (TREE_TYPE (arg0), max));
5997 case LT_EXPR:
5998 /* x < +Inf is always equal to x <= DBL_MAX. */
5999 real_maxval (&max, neg, mode);
6000 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6001 arg0, build_real (TREE_TYPE (arg0), max));
6003 case NE_EXPR:
6004 /* x != +Inf is always equal to !(x > DBL_MAX). */
6005 real_maxval (&max, neg, mode);
6006 if (! HONOR_NANS (mode))
6007 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6008 arg0, build_real (TREE_TYPE (arg0), max));
6010 /* The transformation below creates non-gimple code and thus is
6011 not appropriate if we are in gimple form. */
6012 if (in_gimple_form)
6013 return NULL_TREE;
6015 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6016 arg0, build_real (TREE_TYPE (arg0), max));
6017 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6019 default:
6020 break;
6023 return NULL_TREE;
6026 /* Subroutine of fold() that optimizes comparisons of a division by
6027 a nonzero integer constant against an integer constant, i.e.
6028 X/C1 op C2.
6030 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6031 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6032 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6034 The function returns the constant folded tree if a simplification
6035 can be made, and NULL_TREE otherwise. */
6037 static tree
6038 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6040 tree prod, tmp, hi, lo;
6041 tree arg00 = TREE_OPERAND (arg0, 0);
6042 tree arg01 = TREE_OPERAND (arg0, 1);
6043 unsigned HOST_WIDE_INT lpart;
6044 HOST_WIDE_INT hpart;
6045 bool neg_overflow;
6046 int overflow;
6048 /* We have to do this the hard way to detect unsigned overflow.
6049 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6050 overflow = mul_double (TREE_INT_CST_LOW (arg01),
6051 TREE_INT_CST_HIGH (arg01),
6052 TREE_INT_CST_LOW (arg1),
6053 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
6054 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6055 prod = force_fit_type (prod, -1, overflow, false);
6056 neg_overflow = false;
6058 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
6060 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6061 lo = prod;
6063 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6064 overflow = add_double (TREE_INT_CST_LOW (prod),
6065 TREE_INT_CST_HIGH (prod),
6066 TREE_INT_CST_LOW (tmp),
6067 TREE_INT_CST_HIGH (tmp),
6068 &lpart, &hpart);
6069 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6070 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6071 TREE_CONSTANT_OVERFLOW (prod));
6073 else if (tree_int_cst_sgn (arg01) >= 0)
6075 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6076 switch (tree_int_cst_sgn (arg1))
6078 case -1:
6079 neg_overflow = true;
6080 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6081 hi = prod;
6082 break;
6084 case 0:
6085 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6086 hi = tmp;
6087 break;
6089 case 1:
6090 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6091 lo = prod;
6092 break;
6094 default:
6095 gcc_unreachable ();
6098 else
6100 /* A negative divisor reverses the relational operators. */
6101 code = swap_tree_comparison (code);
6103 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6104 switch (tree_int_cst_sgn (arg1))
6106 case -1:
6107 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6108 lo = prod;
6109 break;
6111 case 0:
6112 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6113 lo = tmp;
6114 break;
6116 case 1:
6117 neg_overflow = true;
6118 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6119 hi = prod;
6120 break;
6122 default:
6123 gcc_unreachable ();
6127 switch (code)
6129 case EQ_EXPR:
6130 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6131 return omit_one_operand (type, integer_zero_node, arg00);
6132 if (TREE_OVERFLOW (hi))
6133 return fold_build2 (GE_EXPR, type, arg00, lo);
6134 if (TREE_OVERFLOW (lo))
6135 return fold_build2 (LE_EXPR, type, arg00, hi);
6136 return build_range_check (type, arg00, 1, lo, hi);
6138 case NE_EXPR:
6139 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6140 return omit_one_operand (type, integer_one_node, arg00);
6141 if (TREE_OVERFLOW (hi))
6142 return fold_build2 (LT_EXPR, type, arg00, lo);
6143 if (TREE_OVERFLOW (lo))
6144 return fold_build2 (GT_EXPR, type, arg00, hi);
6145 return build_range_check (type, arg00, 0, lo, hi);
6147 case LT_EXPR:
6148 if (TREE_OVERFLOW (lo))
6150 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6151 return omit_one_operand (type, tmp, arg00);
6153 return fold_build2 (LT_EXPR, type, arg00, lo);
6155 case LE_EXPR:
6156 if (TREE_OVERFLOW (hi))
6158 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6159 return omit_one_operand (type, tmp, arg00);
6161 return fold_build2 (LE_EXPR, type, arg00, hi);
6163 case GT_EXPR:
6164 if (TREE_OVERFLOW (hi))
6166 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6167 return omit_one_operand (type, tmp, arg00);
6169 return fold_build2 (GT_EXPR, type, arg00, hi);
6171 case GE_EXPR:
6172 if (TREE_OVERFLOW (lo))
6174 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6175 return omit_one_operand (type, tmp, arg00);
6177 return fold_build2 (GE_EXPR, type, arg00, lo);
6179 default:
6180 break;
6183 return NULL_TREE;
6187 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6188 equality/inequality test, then return a simplified form of the test
6189 using a sign testing. Otherwise return NULL. TYPE is the desired
6190 result type. */
6192 static tree
6193 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6194 tree result_type)
6196 /* If this is testing a single bit, we can optimize the test. */
6197 if ((code == NE_EXPR || code == EQ_EXPR)
6198 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6199 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6201 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6202 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6203 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6205 if (arg00 != NULL_TREE
6206 /* This is only a win if casting to a signed type is cheap,
6207 i.e. when arg00's type is not a partial mode. */
6208 && TYPE_PRECISION (TREE_TYPE (arg00))
6209 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6211 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6212 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6213 result_type, fold_convert (stype, arg00),
6214 build_int_cst (stype, 0));
6218 return NULL_TREE;
6221 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6222 equality/inequality test, then return a simplified form of
6223 the test using shifts and logical operations. Otherwise return
6224 NULL. TYPE is the desired result type. */
6226 tree
6227 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6228 tree result_type)
6230 /* If this is testing a single bit, we can optimize the test. */
6231 if ((code == NE_EXPR || code == EQ_EXPR)
6232 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6233 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6235 tree inner = TREE_OPERAND (arg0, 0);
6236 tree type = TREE_TYPE (arg0);
6237 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6238 enum machine_mode operand_mode = TYPE_MODE (type);
6239 int ops_unsigned;
6240 tree signed_type, unsigned_type, intermediate_type;
6241 tree tem;
6243 /* First, see if we can fold the single bit test into a sign-bit
6244 test. */
6245 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6246 result_type);
6247 if (tem)
6248 return tem;
6250 /* Otherwise we have (A & C) != 0 where C is a single bit,
6251 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6252 Similarly for (A & C) == 0. */
6254 /* If INNER is a right shift of a constant and it plus BITNUM does
6255 not overflow, adjust BITNUM and INNER. */
6256 if (TREE_CODE (inner) == RSHIFT_EXPR
6257 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6258 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6259 && bitnum < TYPE_PRECISION (type)
6260 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6261 bitnum - TYPE_PRECISION (type)))
6263 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6264 inner = TREE_OPERAND (inner, 0);
6267 /* If we are going to be able to omit the AND below, we must do our
6268 operations as unsigned. If we must use the AND, we have a choice.
6269 Normally unsigned is faster, but for some machines signed is. */
6270 #ifdef LOAD_EXTEND_OP
6271 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6272 && !flag_syntax_only) ? 0 : 1;
6273 #else
6274 ops_unsigned = 1;
6275 #endif
6277 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6278 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6279 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6280 inner = fold_convert (intermediate_type, inner);
6282 if (bitnum != 0)
6283 inner = build2 (RSHIFT_EXPR, intermediate_type,
6284 inner, size_int (bitnum));
6286 if (code == EQ_EXPR)
6287 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6288 inner, integer_one_node);
6290 /* Put the AND last so it can combine with more things. */
6291 inner = build2 (BIT_AND_EXPR, intermediate_type,
6292 inner, integer_one_node);
6294 /* Make sure to return the proper type. */
6295 inner = fold_convert (result_type, inner);
6297 return inner;
6299 return NULL_TREE;
6302 /* Check whether we are allowed to reorder operands arg0 and arg1,
6303 such that the evaluation of arg1 occurs before arg0. */
6305 static bool
6306 reorder_operands_p (tree arg0, tree arg1)
6308 if (! flag_evaluation_order)
6309 return true;
6310 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6311 return true;
6312 return ! TREE_SIDE_EFFECTS (arg0)
6313 && ! TREE_SIDE_EFFECTS (arg1);
6316 /* Test whether it is preferable two swap two operands, ARG0 and
6317 ARG1, for example because ARG0 is an integer constant and ARG1
6318 isn't. If REORDER is true, only recommend swapping if we can
6319 evaluate the operands in reverse order. */
6321 bool
6322 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6324 STRIP_SIGN_NOPS (arg0);
6325 STRIP_SIGN_NOPS (arg1);
6327 if (TREE_CODE (arg1) == INTEGER_CST)
6328 return 0;
6329 if (TREE_CODE (arg0) == INTEGER_CST)
6330 return 1;
6332 if (TREE_CODE (arg1) == REAL_CST)
6333 return 0;
6334 if (TREE_CODE (arg0) == REAL_CST)
6335 return 1;
6337 if (TREE_CODE (arg1) == COMPLEX_CST)
6338 return 0;
6339 if (TREE_CODE (arg0) == COMPLEX_CST)
6340 return 1;
6342 if (TREE_CONSTANT (arg1))
6343 return 0;
6344 if (TREE_CONSTANT (arg0))
6345 return 1;
6347 if (optimize_size)
6348 return 0;
6350 if (reorder && flag_evaluation_order
6351 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6352 return 0;
6354 if (DECL_P (arg1))
6355 return 0;
6356 if (DECL_P (arg0))
6357 return 1;
6359 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6360 for commutative and comparison operators. Ensuring a canonical
6361 form allows the optimizers to find additional redundancies without
6362 having to explicitly check for both orderings. */
6363 if (TREE_CODE (arg0) == SSA_NAME
6364 && TREE_CODE (arg1) == SSA_NAME
6365 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6366 return 1;
6368 return 0;
6371 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6372 ARG0 is extended to a wider type. */
6374 static tree
6375 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6377 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6378 tree arg1_unw;
6379 tree shorter_type, outer_type;
6380 tree min, max;
6381 bool above, below;
6383 if (arg0_unw == arg0)
6384 return NULL_TREE;
6385 shorter_type = TREE_TYPE (arg0_unw);
6387 #ifdef HAVE_canonicalize_funcptr_for_compare
6388 /* Disable this optimization if we're casting a function pointer
6389 type on targets that require function pointer canonicalization. */
6390 if (HAVE_canonicalize_funcptr_for_compare
6391 && TREE_CODE (shorter_type) == POINTER_TYPE
6392 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6393 return NULL_TREE;
6394 #endif
6396 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6397 return NULL_TREE;
6399 arg1_unw = get_unwidened (arg1, shorter_type);
6401 /* If possible, express the comparison in the shorter mode. */
6402 if ((code == EQ_EXPR || code == NE_EXPR
6403 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6404 && (TREE_TYPE (arg1_unw) == shorter_type
6405 || (TREE_CODE (arg1_unw) == INTEGER_CST
6406 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6407 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6408 && int_fits_type_p (arg1_unw, shorter_type))))
6409 return fold_build2 (code, type, arg0_unw,
6410 fold_convert (shorter_type, arg1_unw));
6412 if (TREE_CODE (arg1_unw) != INTEGER_CST
6413 || TREE_CODE (shorter_type) != INTEGER_TYPE
6414 || !int_fits_type_p (arg1_unw, shorter_type))
6415 return NULL_TREE;
6417 /* If we are comparing with the integer that does not fit into the range
6418 of the shorter type, the result is known. */
6419 outer_type = TREE_TYPE (arg1_unw);
6420 min = lower_bound_in_type (outer_type, shorter_type);
6421 max = upper_bound_in_type (outer_type, shorter_type);
6423 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6424 max, arg1_unw));
6425 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6426 arg1_unw, min));
6428 switch (code)
6430 case EQ_EXPR:
6431 if (above || below)
6432 return omit_one_operand (type, integer_zero_node, arg0);
6433 break;
6435 case NE_EXPR:
6436 if (above || below)
6437 return omit_one_operand (type, integer_one_node, arg0);
6438 break;
6440 case LT_EXPR:
6441 case LE_EXPR:
6442 if (above)
6443 return omit_one_operand (type, integer_one_node, arg0);
6444 else if (below)
6445 return omit_one_operand (type, integer_zero_node, arg0);
6447 case GT_EXPR:
6448 case GE_EXPR:
6449 if (above)
6450 return omit_one_operand (type, integer_zero_node, arg0);
6451 else if (below)
6452 return omit_one_operand (type, integer_one_node, arg0);
6454 default:
6455 break;
6458 return NULL_TREE;
6461 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6462 ARG0 just the signedness is changed. */
6464 static tree
6465 fold_sign_changed_comparison (enum tree_code code, tree type,
6466 tree arg0, tree arg1)
6468 tree arg0_inner, tmp;
6469 tree inner_type, outer_type;
6471 if (TREE_CODE (arg0) != NOP_EXPR
6472 && TREE_CODE (arg0) != CONVERT_EXPR)
6473 return NULL_TREE;
6475 outer_type = TREE_TYPE (arg0);
6476 arg0_inner = TREE_OPERAND (arg0, 0);
6477 inner_type = TREE_TYPE (arg0_inner);
6479 #ifdef HAVE_canonicalize_funcptr_for_compare
6480 /* Disable this optimization if we're casting a function pointer
6481 type on targets that require function pointer canonicalization. */
6482 if (HAVE_canonicalize_funcptr_for_compare
6483 && TREE_CODE (inner_type) == POINTER_TYPE
6484 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6485 return NULL_TREE;
6486 #endif
6488 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6489 return NULL_TREE;
6491 if (TREE_CODE (arg1) != INTEGER_CST
6492 && !((TREE_CODE (arg1) == NOP_EXPR
6493 || TREE_CODE (arg1) == CONVERT_EXPR)
6494 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6495 return NULL_TREE;
6497 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6498 && code != NE_EXPR
6499 && code != EQ_EXPR)
6500 return NULL_TREE;
6502 if (TREE_CODE (arg1) == INTEGER_CST)
6504 tmp = build_int_cst_wide (inner_type,
6505 TREE_INT_CST_LOW (arg1),
6506 TREE_INT_CST_HIGH (arg1));
6507 arg1 = force_fit_type (tmp, 0,
6508 TREE_OVERFLOW (arg1),
6509 TREE_CONSTANT_OVERFLOW (arg1));
6511 else
6512 arg1 = fold_convert (inner_type, arg1);
6514 return fold_build2 (code, type, arg0_inner, arg1);
6517 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6518 step of the array. Reconstructs s and delta in the case of s * delta
6519 being an integer constant (and thus already folded).
6520 ADDR is the address. MULT is the multiplicative expression.
6521 If the function succeeds, the new address expression is returned. Otherwise
6522 NULL_TREE is returned. */
6524 static tree
6525 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6527 tree s, delta, step;
6528 tree ref = TREE_OPERAND (addr, 0), pref;
6529 tree ret, pos;
6530 tree itype;
6532 /* Canonicalize op1 into a possibly non-constant delta
6533 and an INTEGER_CST s. */
6534 if (TREE_CODE (op1) == MULT_EXPR)
6536 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6538 STRIP_NOPS (arg0);
6539 STRIP_NOPS (arg1);
6541 if (TREE_CODE (arg0) == INTEGER_CST)
6543 s = arg0;
6544 delta = arg1;
6546 else if (TREE_CODE (arg1) == INTEGER_CST)
6548 s = arg1;
6549 delta = arg0;
6551 else
6552 return NULL_TREE;
6554 else if (TREE_CODE (op1) == INTEGER_CST)
6556 delta = op1;
6557 s = NULL_TREE;
6559 else
6561 /* Simulate we are delta * 1. */
6562 delta = op1;
6563 s = integer_one_node;
6566 for (;; ref = TREE_OPERAND (ref, 0))
6568 if (TREE_CODE (ref) == ARRAY_REF)
6570 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6571 if (! itype)
6572 continue;
6574 step = array_ref_element_size (ref);
6575 if (TREE_CODE (step) != INTEGER_CST)
6576 continue;
6578 if (s)
6580 if (! tree_int_cst_equal (step, s))
6581 continue;
6583 else
6585 /* Try if delta is a multiple of step. */
6586 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6587 if (! tmp)
6588 continue;
6589 delta = tmp;
6592 break;
6595 if (!handled_component_p (ref))
6596 return NULL_TREE;
6599 /* We found the suitable array reference. So copy everything up to it,
6600 and replace the index. */
6602 pref = TREE_OPERAND (addr, 0);
6603 ret = copy_node (pref);
6604 pos = ret;
6606 while (pref != ref)
6608 pref = TREE_OPERAND (pref, 0);
6609 TREE_OPERAND (pos, 0) = copy_node (pref);
6610 pos = TREE_OPERAND (pos, 0);
6613 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6614 fold_convert (itype,
6615 TREE_OPERAND (pos, 1)),
6616 fold_convert (itype, delta));
6618 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6622 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6623 means A >= Y && A != MAX, but in this case we know that
6624 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6626 static tree
6627 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6629 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6631 if (TREE_CODE (bound) == LT_EXPR)
6632 a = TREE_OPERAND (bound, 0);
6633 else if (TREE_CODE (bound) == GT_EXPR)
6634 a = TREE_OPERAND (bound, 1);
6635 else
6636 return NULL_TREE;
6638 typea = TREE_TYPE (a);
6639 if (!INTEGRAL_TYPE_P (typea)
6640 && !POINTER_TYPE_P (typea))
6641 return NULL_TREE;
6643 if (TREE_CODE (ineq) == LT_EXPR)
6645 a1 = TREE_OPERAND (ineq, 1);
6646 y = TREE_OPERAND (ineq, 0);
6648 else if (TREE_CODE (ineq) == GT_EXPR)
6650 a1 = TREE_OPERAND (ineq, 0);
6651 y = TREE_OPERAND (ineq, 1);
6653 else
6654 return NULL_TREE;
6656 if (TREE_TYPE (a1) != typea)
6657 return NULL_TREE;
6659 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6660 if (!integer_onep (diff))
6661 return NULL_TREE;
6663 return fold_build2 (GE_EXPR, type, a, y);
6666 /* Fold a sum or difference of at least one multiplication.
6667 Returns the folded tree or NULL if no simplification could be made. */
6669 static tree
6670 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6672 tree arg00, arg01, arg10, arg11;
6673 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6675 /* (A * C) +- (B * C) -> (A+-B) * C.
6676 (A * C) +- A -> A * (C+-1).
6677 We are most concerned about the case where C is a constant,
6678 but other combinations show up during loop reduction. Since
6679 it is not difficult, try all four possibilities. */
6681 if (TREE_CODE (arg0) == MULT_EXPR)
6683 arg00 = TREE_OPERAND (arg0, 0);
6684 arg01 = TREE_OPERAND (arg0, 1);
6686 else
6688 arg00 = arg0;
6689 if (!FLOAT_TYPE_P (type))
6690 arg01 = build_int_cst (type, 1);
6691 else
6692 arg01 = build_real (type, dconst1);
6694 if (TREE_CODE (arg1) == MULT_EXPR)
6696 arg10 = TREE_OPERAND (arg1, 0);
6697 arg11 = TREE_OPERAND (arg1, 1);
6699 else
6701 arg10 = arg1;
6702 if (!FLOAT_TYPE_P (type))
6703 arg11 = build_int_cst (type, 1);
6704 else
6705 arg11 = build_real (type, dconst1);
6707 same = NULL_TREE;
6709 if (operand_equal_p (arg01, arg11, 0))
6710 same = arg01, alt0 = arg00, alt1 = arg10;
6711 else if (operand_equal_p (arg00, arg10, 0))
6712 same = arg00, alt0 = arg01, alt1 = arg11;
6713 else if (operand_equal_p (arg00, arg11, 0))
6714 same = arg00, alt0 = arg01, alt1 = arg10;
6715 else if (operand_equal_p (arg01, arg10, 0))
6716 same = arg01, alt0 = arg00, alt1 = arg11;
6718 /* No identical multiplicands; see if we can find a common
6719 power-of-two factor in non-power-of-two multiplies. This
6720 can help in multi-dimensional array access. */
6721 else if (host_integerp (arg01, 0)
6722 && host_integerp (arg11, 0))
6724 HOST_WIDE_INT int01, int11, tmp;
6725 bool swap = false;
6726 tree maybe_same;
6727 int01 = TREE_INT_CST_LOW (arg01);
6728 int11 = TREE_INT_CST_LOW (arg11);
6730 /* Move min of absolute values to int11. */
6731 if ((int01 >= 0 ? int01 : -int01)
6732 < (int11 >= 0 ? int11 : -int11))
6734 tmp = int01, int01 = int11, int11 = tmp;
6735 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6736 maybe_same = arg01;
6737 swap = true;
6739 else
6740 maybe_same = arg11;
6742 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6744 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6745 build_int_cst (TREE_TYPE (arg00),
6746 int01 / int11));
6747 alt1 = arg10;
6748 same = maybe_same;
6749 if (swap)
6750 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6754 if (same)
6755 return fold_build2 (MULT_EXPR, type,
6756 fold_build2 (code, type,
6757 fold_convert (type, alt0),
6758 fold_convert (type, alt1)),
6759 fold_convert (type, same));
6761 return NULL_TREE;
6764 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6765 specified by EXPR into the buffer PTR of length LEN bytes.
6766 Return the number of bytes placed in the buffer, or zero
6767 upon failure. */
6769 static int
6770 native_encode_int (tree expr, unsigned char *ptr, int len)
6772 tree type = TREE_TYPE (expr);
6773 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6774 int byte, offset, word, words;
6775 unsigned char value;
6777 if (total_bytes > len)
6778 return 0;
6779 words = total_bytes / UNITS_PER_WORD;
6781 for (byte = 0; byte < total_bytes; byte++)
6783 int bitpos = byte * BITS_PER_UNIT;
6784 if (bitpos < HOST_BITS_PER_WIDE_INT)
6785 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6786 else
6787 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6788 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6790 if (total_bytes > UNITS_PER_WORD)
6792 word = byte / UNITS_PER_WORD;
6793 if (WORDS_BIG_ENDIAN)
6794 word = (words - 1) - word;
6795 offset = word * UNITS_PER_WORD;
6796 if (BYTES_BIG_ENDIAN)
6797 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6798 else
6799 offset += byte % UNITS_PER_WORD;
6801 else
6802 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6803 ptr[offset] = value;
6805 return total_bytes;
6809 /* Subroutine of native_encode_expr. Encode the REAL_CST
6810 specified by EXPR into the buffer PTR of length LEN bytes.
6811 Return the number of bytes placed in the buffer, or zero
6812 upon failure. */
6814 static int
6815 native_encode_real (tree expr, unsigned char *ptr, int len)
6817 tree type = TREE_TYPE (expr);
6818 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6819 int byte, offset, word, words;
6820 unsigned char value;
6822 /* There are always 32 bits in each long, no matter the size of
6823 the hosts long. We handle floating point representations with
6824 up to 192 bits. */
6825 long tmp[6];
6827 if (total_bytes > len)
6828 return 0;
6829 words = total_bytes / UNITS_PER_WORD;
6831 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6833 for (byte = 0; byte < total_bytes; byte++)
6835 int bitpos = byte * BITS_PER_UNIT;
6836 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6838 if (total_bytes > UNITS_PER_WORD)
6840 word = byte / UNITS_PER_WORD;
6841 if (FLOAT_WORDS_BIG_ENDIAN)
6842 word = (words - 1) - word;
6843 offset = word * UNITS_PER_WORD;
6844 if (BYTES_BIG_ENDIAN)
6845 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6846 else
6847 offset += byte % UNITS_PER_WORD;
6849 else
6850 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6851 ptr[offset] = value;
6853 return total_bytes;
6856 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6857 specified by EXPR into the buffer PTR of length LEN bytes.
6858 Return the number of bytes placed in the buffer, or zero
6859 upon failure. */
6861 static int
6862 native_encode_complex (tree expr, unsigned char *ptr, int len)
6864 int rsize, isize;
6865 tree part;
6867 part = TREE_REALPART (expr);
6868 rsize = native_encode_expr (part, ptr, len);
6869 if (rsize == 0)
6870 return 0;
6871 part = TREE_IMAGPART (expr);
6872 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6873 if (isize != rsize)
6874 return 0;
6875 return rsize + isize;
6879 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6880 specified by EXPR into the buffer PTR of length LEN bytes.
6881 Return the number of bytes placed in the buffer, or zero
6882 upon failure. */
6884 static int
6885 native_encode_vector (tree expr, unsigned char *ptr, int len)
6887 int i, size, offset, count;
6888 tree elem, elements;
6890 size = 0;
6891 offset = 0;
6892 elements = TREE_VECTOR_CST_ELTS (expr);
6893 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6894 for (i = 0; i < count; i++)
6896 if (elements)
6898 elem = TREE_VALUE (elements);
6899 elements = TREE_CHAIN (elements);
6901 else
6902 elem = NULL_TREE;
6904 if (elem)
6906 size = native_encode_expr (elem, ptr+offset, len-offset);
6907 if (size == 0)
6908 return 0;
6910 else if (size != 0)
6912 if (offset + size > len)
6913 return 0;
6914 memset (ptr+offset, 0, size);
6916 else
6917 return 0;
6918 offset += size;
6920 return offset;
6924 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
6925 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
6926 buffer PTR of length LEN bytes. Return the number of bytes
6927 placed in the buffer, or zero upon failure. */
6929 static int
6930 native_encode_expr (tree expr, unsigned char *ptr, int len)
6932 switch (TREE_CODE (expr))
6934 case INTEGER_CST:
6935 return native_encode_int (expr, ptr, len);
6937 case REAL_CST:
6938 return native_encode_real (expr, ptr, len);
6940 case COMPLEX_CST:
6941 return native_encode_complex (expr, ptr, len);
6943 case VECTOR_CST:
6944 return native_encode_vector (expr, ptr, len);
6946 default:
6947 return 0;
6952 /* Subroutine of native_interpret_expr. Interpret the contents of
6953 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
6954 If the buffer cannot be interpreted, return NULL_TREE. */
6956 static tree
6957 native_interpret_int (tree type, unsigned char *ptr, int len)
6959 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6960 int byte, offset, word, words;
6961 unsigned char value;
6962 unsigned int HOST_WIDE_INT lo = 0;
6963 HOST_WIDE_INT hi = 0;
6965 if (total_bytes > len)
6966 return NULL_TREE;
6967 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
6968 return NULL_TREE;
6969 words = total_bytes / UNITS_PER_WORD;
6971 for (byte = 0; byte < total_bytes; byte++)
6973 int bitpos = byte * BITS_PER_UNIT;
6974 if (total_bytes > UNITS_PER_WORD)
6976 word = byte / UNITS_PER_WORD;
6977 if (WORDS_BIG_ENDIAN)
6978 word = (words - 1) - word;
6979 offset = word * UNITS_PER_WORD;
6980 if (BYTES_BIG_ENDIAN)
6981 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6982 else
6983 offset += byte % UNITS_PER_WORD;
6985 else
6986 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6987 value = ptr[offset];
6989 if (bitpos < HOST_BITS_PER_WIDE_INT)
6990 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
6991 else
6992 hi |= (unsigned HOST_WIDE_INT) value
6993 << (bitpos - HOST_BITS_PER_WIDE_INT);
6996 return force_fit_type (build_int_cst_wide (type, lo, hi),
6997 0, false, false);
7001 /* Subroutine of native_interpret_expr. Interpret the contents of
7002 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7003 If the buffer cannot be interpreted, return NULL_TREE. */
7005 static tree
7006 native_interpret_real (tree type, unsigned char *ptr, int len)
7008 enum machine_mode mode = TYPE_MODE (type);
7009 int total_bytes = GET_MODE_SIZE (mode);
7010 int byte, offset, word, words;
7011 unsigned char value;
7012 /* There are always 32 bits in each long, no matter the size of
7013 the hosts long. We handle floating point representations with
7014 up to 192 bits. */
7015 REAL_VALUE_TYPE r;
7016 long tmp[6];
7018 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7019 if (total_bytes > len || total_bytes > 24)
7020 return NULL_TREE;
7021 words = total_bytes / UNITS_PER_WORD;
7023 memset (tmp, 0, sizeof (tmp));
7024 for (byte = 0; byte < total_bytes; byte++)
7026 int bitpos = byte * BITS_PER_UNIT;
7027 if (total_bytes > UNITS_PER_WORD)
7029 word = byte / UNITS_PER_WORD;
7030 if (FLOAT_WORDS_BIG_ENDIAN)
7031 word = (words - 1) - word;
7032 offset = word * UNITS_PER_WORD;
7033 if (BYTES_BIG_ENDIAN)
7034 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7035 else
7036 offset += byte % UNITS_PER_WORD;
7038 else
7039 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7040 value = ptr[offset];
7042 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7045 real_from_target (&r, tmp, mode);
7046 return build_real (type, r);
7050 /* Subroutine of native_interpret_expr. Interpret the contents of
7051 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7052 If the buffer cannot be interpreted, return NULL_TREE. */
7054 static tree
7055 native_interpret_complex (tree type, unsigned char *ptr, int len)
7057 tree etype, rpart, ipart;
7058 int size;
7060 etype = TREE_TYPE (type);
7061 size = GET_MODE_SIZE (TYPE_MODE (etype));
7062 if (size * 2 > len)
7063 return NULL_TREE;
7064 rpart = native_interpret_expr (etype, ptr, size);
7065 if (!rpart)
7066 return NULL_TREE;
7067 ipart = native_interpret_expr (etype, ptr+size, size);
7068 if (!ipart)
7069 return NULL_TREE;
7070 return build_complex (type, rpart, ipart);
7074 /* Subroutine of native_interpret_expr. Interpret the contents of
7075 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7076 If the buffer cannot be interpreted, return NULL_TREE. */
7078 static tree
7079 native_interpret_vector (tree type, unsigned char *ptr, int len)
7081 tree etype, elem, elements;
7082 int i, size, count;
7084 etype = TREE_TYPE (type);
7085 size = GET_MODE_SIZE (TYPE_MODE (etype));
7086 count = TYPE_VECTOR_SUBPARTS (type);
7087 if (size * count > len)
7088 return NULL_TREE;
7090 elements = NULL_TREE;
7091 for (i = count - 1; i >= 0; i--)
7093 elem = native_interpret_expr (etype, ptr+(i*size), size);
7094 if (!elem)
7095 return NULL_TREE;
7096 elements = tree_cons (NULL_TREE, elem, elements);
7098 return build_vector (type, elements);
7102 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7103 the buffer PTR of length LEN as a constant of type TYPE. For
7104 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7105 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7106 return NULL_TREE. */
7108 static tree
7109 native_interpret_expr (tree type, unsigned char *ptr, int len)
7111 switch (TREE_CODE (type))
7113 case INTEGER_TYPE:
7114 case ENUMERAL_TYPE:
7115 case BOOLEAN_TYPE:
7116 return native_interpret_int (type, ptr, len);
7118 case REAL_TYPE:
7119 return native_interpret_real (type, ptr, len);
7121 case COMPLEX_TYPE:
7122 return native_interpret_complex (type, ptr, len);
7124 case VECTOR_TYPE:
7125 return native_interpret_vector (type, ptr, len);
7127 default:
7128 return NULL_TREE;
7133 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7134 TYPE at compile-time. If we're unable to perform the conversion
7135 return NULL_TREE. */
7137 static tree
7138 fold_view_convert_expr (tree type, tree expr)
7140 /* We support up to 512-bit values (for V8DFmode). */
7141 unsigned char buffer[64];
7142 int len;
7144 /* Check that the host and target are sane. */
7145 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7146 return NULL_TREE;
7148 len = native_encode_expr (expr, buffer, sizeof (buffer));
7149 if (len == 0)
7150 return NULL_TREE;
7152 return native_interpret_expr (type, buffer, len);
7156 /* Fold a unary expression of code CODE and type TYPE with operand
7157 OP0. Return the folded expression if folding is successful.
7158 Otherwise, return NULL_TREE. */
7160 tree
7161 fold_unary (enum tree_code code, tree type, tree op0)
7163 tree tem;
7164 tree arg0;
7165 enum tree_code_class kind = TREE_CODE_CLASS (code);
7167 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7168 && TREE_CODE_LENGTH (code) == 1);
7170 arg0 = op0;
7171 if (arg0)
7173 if (code == NOP_EXPR || code == CONVERT_EXPR
7174 || code == FLOAT_EXPR || code == ABS_EXPR)
7176 /* Don't use STRIP_NOPS, because signedness of argument type
7177 matters. */
7178 STRIP_SIGN_NOPS (arg0);
7180 else
7182 /* Strip any conversions that don't change the mode. This
7183 is safe for every expression, except for a comparison
7184 expression because its signedness is derived from its
7185 operands.
7187 Note that this is done as an internal manipulation within
7188 the constant folder, in order to find the simplest
7189 representation of the arguments so that their form can be
7190 studied. In any cases, the appropriate type conversions
7191 should be put back in the tree that will get out of the
7192 constant folder. */
7193 STRIP_NOPS (arg0);
7197 if (TREE_CODE_CLASS (code) == tcc_unary)
7199 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7200 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7201 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7202 else if (TREE_CODE (arg0) == COND_EXPR)
7204 tree arg01 = TREE_OPERAND (arg0, 1);
7205 tree arg02 = TREE_OPERAND (arg0, 2);
7206 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7207 arg01 = fold_build1 (code, type, arg01);
7208 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7209 arg02 = fold_build1 (code, type, arg02);
7210 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7211 arg01, arg02);
7213 /* If this was a conversion, and all we did was to move into
7214 inside the COND_EXPR, bring it back out. But leave it if
7215 it is a conversion from integer to integer and the
7216 result precision is no wider than a word since such a
7217 conversion is cheap and may be optimized away by combine,
7218 while it couldn't if it were outside the COND_EXPR. Then return
7219 so we don't get into an infinite recursion loop taking the
7220 conversion out and then back in. */
7222 if ((code == NOP_EXPR || code == CONVERT_EXPR
7223 || code == NON_LVALUE_EXPR)
7224 && TREE_CODE (tem) == COND_EXPR
7225 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7226 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7227 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7228 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7229 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7230 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7231 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7232 && (INTEGRAL_TYPE_P
7233 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7234 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7235 || flag_syntax_only))
7236 tem = build1 (code, type,
7237 build3 (COND_EXPR,
7238 TREE_TYPE (TREE_OPERAND
7239 (TREE_OPERAND (tem, 1), 0)),
7240 TREE_OPERAND (tem, 0),
7241 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7242 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7243 return tem;
7245 else if (COMPARISON_CLASS_P (arg0))
7247 if (TREE_CODE (type) == BOOLEAN_TYPE)
7249 arg0 = copy_node (arg0);
7250 TREE_TYPE (arg0) = type;
7251 return arg0;
7253 else if (TREE_CODE (type) != INTEGER_TYPE)
7254 return fold_build3 (COND_EXPR, type, arg0,
7255 fold_build1 (code, type,
7256 integer_one_node),
7257 fold_build1 (code, type,
7258 integer_zero_node));
7262 switch (code)
7264 case NOP_EXPR:
7265 case FLOAT_EXPR:
7266 case CONVERT_EXPR:
7267 case FIX_TRUNC_EXPR:
7268 case FIX_CEIL_EXPR:
7269 case FIX_FLOOR_EXPR:
7270 case FIX_ROUND_EXPR:
7271 if (TREE_TYPE (op0) == type)
7272 return op0;
7274 /* If we have (type) (a CMP b) and type is an integral type, return
7275 new expression involving the new type. */
7276 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7277 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7278 TREE_OPERAND (op0, 1));
7280 /* Handle cases of two conversions in a row. */
7281 if (TREE_CODE (op0) == NOP_EXPR
7282 || TREE_CODE (op0) == CONVERT_EXPR)
7284 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7285 tree inter_type = TREE_TYPE (op0);
7286 int inside_int = INTEGRAL_TYPE_P (inside_type);
7287 int inside_ptr = POINTER_TYPE_P (inside_type);
7288 int inside_float = FLOAT_TYPE_P (inside_type);
7289 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7290 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7291 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7292 int inter_int = INTEGRAL_TYPE_P (inter_type);
7293 int inter_ptr = POINTER_TYPE_P (inter_type);
7294 int inter_float = FLOAT_TYPE_P (inter_type);
7295 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7296 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7297 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7298 int final_int = INTEGRAL_TYPE_P (type);
7299 int final_ptr = POINTER_TYPE_P (type);
7300 int final_float = FLOAT_TYPE_P (type);
7301 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7302 unsigned int final_prec = TYPE_PRECISION (type);
7303 int final_unsignedp = TYPE_UNSIGNED (type);
7305 /* In addition to the cases of two conversions in a row
7306 handled below, if we are converting something to its own
7307 type via an object of identical or wider precision, neither
7308 conversion is needed. */
7309 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7310 && ((inter_int && final_int) || (inter_float && final_float))
7311 && inter_prec >= final_prec)
7312 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7314 /* Likewise, if the intermediate and final types are either both
7315 float or both integer, we don't need the middle conversion if
7316 it is wider than the final type and doesn't change the signedness
7317 (for integers). Avoid this if the final type is a pointer
7318 since then we sometimes need the inner conversion. Likewise if
7319 the outer has a precision not equal to the size of its mode. */
7320 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7321 || (inter_float && inside_float)
7322 || (inter_vec && inside_vec))
7323 && inter_prec >= inside_prec
7324 && (inter_float || inter_vec
7325 || inter_unsignedp == inside_unsignedp)
7326 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7327 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7328 && ! final_ptr
7329 && (! final_vec || inter_prec == inside_prec))
7330 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7332 /* If we have a sign-extension of a zero-extended value, we can
7333 replace that by a single zero-extension. */
7334 if (inside_int && inter_int && final_int
7335 && inside_prec < inter_prec && inter_prec < final_prec
7336 && inside_unsignedp && !inter_unsignedp)
7337 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7339 /* Two conversions in a row are not needed unless:
7340 - some conversion is floating-point (overstrict for now), or
7341 - some conversion is a vector (overstrict for now), or
7342 - the intermediate type is narrower than both initial and
7343 final, or
7344 - the intermediate type and innermost type differ in signedness,
7345 and the outermost type is wider than the intermediate, or
7346 - the initial type is a pointer type and the precisions of the
7347 intermediate and final types differ, or
7348 - the final type is a pointer type and the precisions of the
7349 initial and intermediate types differ. */
7350 if (! inside_float && ! inter_float && ! final_float
7351 && ! inside_vec && ! inter_vec && ! final_vec
7352 && (inter_prec > inside_prec || inter_prec > final_prec)
7353 && ! (inside_int && inter_int
7354 && inter_unsignedp != inside_unsignedp
7355 && inter_prec < final_prec)
7356 && ((inter_unsignedp && inter_prec > inside_prec)
7357 == (final_unsignedp && final_prec > inter_prec))
7358 && ! (inside_ptr && inter_prec != final_prec)
7359 && ! (final_ptr && inside_prec != inter_prec)
7360 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7361 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7362 && ! final_ptr)
7363 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7366 /* Handle (T *)&A.B.C for A being of type T and B and C
7367 living at offset zero. This occurs frequently in
7368 C++ upcasting and then accessing the base. */
7369 if (TREE_CODE (op0) == ADDR_EXPR
7370 && POINTER_TYPE_P (type)
7371 && handled_component_p (TREE_OPERAND (op0, 0)))
7373 HOST_WIDE_INT bitsize, bitpos;
7374 tree offset;
7375 enum machine_mode mode;
7376 int unsignedp, volatilep;
7377 tree base = TREE_OPERAND (op0, 0);
7378 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7379 &mode, &unsignedp, &volatilep, false);
7380 /* If the reference was to a (constant) zero offset, we can use
7381 the address of the base if it has the same base type
7382 as the result type. */
7383 if (! offset && bitpos == 0
7384 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7385 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7386 return fold_convert (type, build_fold_addr_expr (base));
7389 if (TREE_CODE (op0) == MODIFY_EXPR
7390 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7391 /* Detect assigning a bitfield. */
7392 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7393 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7395 /* Don't leave an assignment inside a conversion
7396 unless assigning a bitfield. */
7397 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7398 /* First do the assignment, then return converted constant. */
7399 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7400 TREE_NO_WARNING (tem) = 1;
7401 TREE_USED (tem) = 1;
7402 return tem;
7405 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7406 constants (if x has signed type, the sign bit cannot be set
7407 in c). This folds extension into the BIT_AND_EXPR. */
7408 if (INTEGRAL_TYPE_P (type)
7409 && TREE_CODE (type) != BOOLEAN_TYPE
7410 && TREE_CODE (op0) == BIT_AND_EXPR
7411 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7413 tree and = op0;
7414 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7415 int change = 0;
7417 if (TYPE_UNSIGNED (TREE_TYPE (and))
7418 || (TYPE_PRECISION (type)
7419 <= TYPE_PRECISION (TREE_TYPE (and))))
7420 change = 1;
7421 else if (TYPE_PRECISION (TREE_TYPE (and1))
7422 <= HOST_BITS_PER_WIDE_INT
7423 && host_integerp (and1, 1))
7425 unsigned HOST_WIDE_INT cst;
7427 cst = tree_low_cst (and1, 1);
7428 cst &= (HOST_WIDE_INT) -1
7429 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7430 change = (cst == 0);
7431 #ifdef LOAD_EXTEND_OP
7432 if (change
7433 && !flag_syntax_only
7434 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7435 == ZERO_EXTEND))
7437 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7438 and0 = fold_convert (uns, and0);
7439 and1 = fold_convert (uns, and1);
7441 #endif
7443 if (change)
7445 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7446 TREE_INT_CST_HIGH (and1));
7447 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7448 TREE_CONSTANT_OVERFLOW (and1));
7449 return fold_build2 (BIT_AND_EXPR, type,
7450 fold_convert (type, and0), tem);
7454 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7455 T2 being pointers to types of the same size. */
7456 if (POINTER_TYPE_P (type)
7457 && BINARY_CLASS_P (arg0)
7458 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7459 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7461 tree arg00 = TREE_OPERAND (arg0, 0);
7462 tree t0 = type;
7463 tree t1 = TREE_TYPE (arg00);
7464 tree tt0 = TREE_TYPE (t0);
7465 tree tt1 = TREE_TYPE (t1);
7466 tree s0 = TYPE_SIZE (tt0);
7467 tree s1 = TYPE_SIZE (tt1);
7469 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7470 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7471 TREE_OPERAND (arg0, 1));
7474 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7475 of the same precision, and X is a integer type not narrower than
7476 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7477 if (INTEGRAL_TYPE_P (type)
7478 && TREE_CODE (op0) == BIT_NOT_EXPR
7479 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7480 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7481 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7482 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7484 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7485 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7486 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7487 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7490 tem = fold_convert_const (code, type, arg0);
7491 return tem ? tem : NULL_TREE;
7493 case VIEW_CONVERT_EXPR:
7494 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7495 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7496 return fold_view_convert_expr (type, op0);
7498 case NEGATE_EXPR:
7499 if (negate_expr_p (arg0))
7500 return fold_convert (type, negate_expr (arg0));
7501 return NULL_TREE;
7503 case ABS_EXPR:
7504 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7505 return fold_abs_const (arg0, type);
7506 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7507 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7508 /* Convert fabs((double)float) into (double)fabsf(float). */
7509 else if (TREE_CODE (arg0) == NOP_EXPR
7510 && TREE_CODE (type) == REAL_TYPE)
7512 tree targ0 = strip_float_extensions (arg0);
7513 if (targ0 != arg0)
7514 return fold_convert (type, fold_build1 (ABS_EXPR,
7515 TREE_TYPE (targ0),
7516 targ0));
7518 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7519 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7520 return arg0;
7522 /* Strip sign ops from argument. */
7523 if (TREE_CODE (type) == REAL_TYPE)
7525 tem = fold_strip_sign_ops (arg0);
7526 if (tem)
7527 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7529 return NULL_TREE;
7531 case CONJ_EXPR:
7532 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7533 return fold_convert (type, arg0);
7534 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7535 return build2 (COMPLEX_EXPR, type,
7536 TREE_OPERAND (arg0, 0),
7537 negate_expr (TREE_OPERAND (arg0, 1)));
7538 else if (TREE_CODE (arg0) == COMPLEX_CST)
7539 return build_complex (type, TREE_REALPART (arg0),
7540 negate_expr (TREE_IMAGPART (arg0)));
7541 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7542 return fold_build2 (TREE_CODE (arg0), type,
7543 fold_build1 (CONJ_EXPR, type,
7544 TREE_OPERAND (arg0, 0)),
7545 fold_build1 (CONJ_EXPR, type,
7546 TREE_OPERAND (arg0, 1)));
7547 else if (TREE_CODE (arg0) == CONJ_EXPR)
7548 return TREE_OPERAND (arg0, 0);
7549 return NULL_TREE;
7551 case BIT_NOT_EXPR:
7552 if (TREE_CODE (arg0) == INTEGER_CST)
7553 return fold_not_const (arg0, type);
7554 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7555 return TREE_OPERAND (arg0, 0);
7556 /* Convert ~ (-A) to A - 1. */
7557 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7558 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7559 build_int_cst (type, 1));
7560 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7561 else if (INTEGRAL_TYPE_P (type)
7562 && ((TREE_CODE (arg0) == MINUS_EXPR
7563 && integer_onep (TREE_OPERAND (arg0, 1)))
7564 || (TREE_CODE (arg0) == PLUS_EXPR
7565 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7566 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7567 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7568 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7569 && (tem = fold_unary (BIT_NOT_EXPR, type,
7570 fold_convert (type,
7571 TREE_OPERAND (arg0, 0)))))
7572 return fold_build2 (BIT_XOR_EXPR, type, tem,
7573 fold_convert (type, TREE_OPERAND (arg0, 1)));
7574 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7575 && (tem = fold_unary (BIT_NOT_EXPR, type,
7576 fold_convert (type,
7577 TREE_OPERAND (arg0, 1)))))
7578 return fold_build2 (BIT_XOR_EXPR, type,
7579 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7581 return NULL_TREE;
7583 case TRUTH_NOT_EXPR:
7584 /* The argument to invert_truthvalue must have Boolean type. */
7585 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7586 arg0 = fold_convert (boolean_type_node, arg0);
7588 /* Note that the operand of this must be an int
7589 and its values must be 0 or 1.
7590 ("true" is a fixed value perhaps depending on the language,
7591 but we don't handle values other than 1 correctly yet.) */
7592 tem = invert_truthvalue (arg0);
7593 /* Avoid infinite recursion. */
7594 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7595 return NULL_TREE;
7596 return fold_convert (type, tem);
7598 case REALPART_EXPR:
7599 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7600 return NULL_TREE;
7601 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7602 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7603 TREE_OPERAND (arg0, 1));
7604 else if (TREE_CODE (arg0) == COMPLEX_CST)
7605 return TREE_REALPART (arg0);
7606 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7607 return fold_build2 (TREE_CODE (arg0), type,
7608 fold_build1 (REALPART_EXPR, type,
7609 TREE_OPERAND (arg0, 0)),
7610 fold_build1 (REALPART_EXPR, type,
7611 TREE_OPERAND (arg0, 1)));
7612 return NULL_TREE;
7614 case IMAGPART_EXPR:
7615 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7616 return fold_convert (type, integer_zero_node);
7617 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7618 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7619 TREE_OPERAND (arg0, 0));
7620 else if (TREE_CODE (arg0) == COMPLEX_CST)
7621 return TREE_IMAGPART (arg0);
7622 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7623 return fold_build2 (TREE_CODE (arg0), type,
7624 fold_build1 (IMAGPART_EXPR, type,
7625 TREE_OPERAND (arg0, 0)),
7626 fold_build1 (IMAGPART_EXPR, type,
7627 TREE_OPERAND (arg0, 1)));
7628 return NULL_TREE;
7630 default:
7631 return NULL_TREE;
7632 } /* switch (code) */
7635 /* Fold a binary expression of code CODE and type TYPE with operands
7636 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7637 Return the folded expression if folding is successful. Otherwise,
7638 return NULL_TREE. */
7640 static tree
7641 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7643 enum tree_code compl_code;
7645 if (code == MIN_EXPR)
7646 compl_code = MAX_EXPR;
7647 else if (code == MAX_EXPR)
7648 compl_code = MIN_EXPR;
7649 else
7650 gcc_unreachable ();
7652 /* MIN (MAX (a, b), b) == b.  */
7653 if (TREE_CODE (op0) == compl_code
7654 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7655 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7657 /* MIN (MAX (b, a), b) == b.  */
7658 if (TREE_CODE (op0) == compl_code
7659 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7660 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7661 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7663 /* MIN (a, MAX (a, b)) == a.  */
7664 if (TREE_CODE (op1) == compl_code
7665 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7666 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7667 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7669 /* MIN (a, MAX (b, a)) == a.  */
7670 if (TREE_CODE (op1) == compl_code
7671 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7672 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7673 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7675 return NULL_TREE;
7678 /* Subroutine of fold_binary. This routine performs all of the
7679 transformations that are common to the equality/inequality
7680 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7681 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7682 fold_binary should call fold_binary. Fold a comparison with
7683 tree code CODE and type TYPE with operands OP0 and OP1. Return
7684 the folded comparison or NULL_TREE. */
7686 static tree
7687 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7689 tree arg0, arg1, tem;
7691 arg0 = op0;
7692 arg1 = op1;
7694 STRIP_SIGN_NOPS (arg0);
7695 STRIP_SIGN_NOPS (arg1);
7697 tem = fold_relational_const (code, type, arg0, arg1);
7698 if (tem != NULL_TREE)
7699 return tem;
7701 /* If one arg is a real or integer constant, put it last. */
7702 if (tree_swap_operands_p (arg0, arg1, true))
7703 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7705 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7706 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7707 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7708 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7709 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7710 && !(flag_wrapv || flag_trapv))
7711 && (TREE_CODE (arg1) == INTEGER_CST
7712 && !TREE_OVERFLOW (arg1)))
7714 tree const1 = TREE_OPERAND (arg0, 1);
7715 tree const2 = arg1;
7716 tree variable = TREE_OPERAND (arg0, 0);
7717 tree lhs;
7718 int lhs_add;
7719 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7721 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7722 TREE_TYPE (arg1), const2, const1);
7723 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7724 && (TREE_CODE (lhs) != INTEGER_CST
7725 || !TREE_OVERFLOW (lhs)))
7726 return fold_build2 (code, type, variable, lhs);
7729 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7731 tree targ0 = strip_float_extensions (arg0);
7732 tree targ1 = strip_float_extensions (arg1);
7733 tree newtype = TREE_TYPE (targ0);
7735 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7736 newtype = TREE_TYPE (targ1);
7738 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7739 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7740 return fold_build2 (code, type, fold_convert (newtype, targ0),
7741 fold_convert (newtype, targ1));
7743 /* (-a) CMP (-b) -> b CMP a */
7744 if (TREE_CODE (arg0) == NEGATE_EXPR
7745 && TREE_CODE (arg1) == NEGATE_EXPR)
7746 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
7747 TREE_OPERAND (arg0, 0));
7749 if (TREE_CODE (arg1) == REAL_CST)
7751 REAL_VALUE_TYPE cst;
7752 cst = TREE_REAL_CST (arg1);
7754 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7755 if (TREE_CODE (arg0) == NEGATE_EXPR)
7756 return fold_build2 (swap_tree_comparison (code), type,
7757 TREE_OPERAND (arg0, 0),
7758 build_real (TREE_TYPE (arg1),
7759 REAL_VALUE_NEGATE (cst)));
7761 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7762 /* a CMP (-0) -> a CMP 0 */
7763 if (REAL_VALUE_MINUS_ZERO (cst))
7764 return fold_build2 (code, type, arg0,
7765 build_real (TREE_TYPE (arg1), dconst0));
7767 /* x != NaN is always true, other ops are always false. */
7768 if (REAL_VALUE_ISNAN (cst)
7769 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7771 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7772 return omit_one_operand (type, tem, arg0);
7775 /* Fold comparisons against infinity. */
7776 if (REAL_VALUE_ISINF (cst))
7778 tem = fold_inf_compare (code, type, arg0, arg1);
7779 if (tem != NULL_TREE)
7780 return tem;
7784 /* If this is a comparison of a real constant with a PLUS_EXPR
7785 or a MINUS_EXPR of a real constant, we can convert it into a
7786 comparison with a revised real constant as long as no overflow
7787 occurs when unsafe_math_optimizations are enabled. */
7788 if (flag_unsafe_math_optimizations
7789 && TREE_CODE (arg1) == REAL_CST
7790 && (TREE_CODE (arg0) == PLUS_EXPR
7791 || TREE_CODE (arg0) == MINUS_EXPR)
7792 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7793 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7794 ? MINUS_EXPR : PLUS_EXPR,
7795 arg1, TREE_OPERAND (arg0, 1), 0))
7796 && ! TREE_CONSTANT_OVERFLOW (tem))
7797 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
7799 /* Likewise, we can simplify a comparison of a real constant with
7800 a MINUS_EXPR whose first operand is also a real constant, i.e.
7801 (c1 - x) < c2 becomes x > c1-c2. */
7802 if (flag_unsafe_math_optimizations
7803 && TREE_CODE (arg1) == REAL_CST
7804 && TREE_CODE (arg0) == MINUS_EXPR
7805 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7806 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7807 arg1, 0))
7808 && ! TREE_CONSTANT_OVERFLOW (tem))
7809 return fold_build2 (swap_tree_comparison (code), type,
7810 TREE_OPERAND (arg0, 1), tem);
7812 /* Fold comparisons against built-in math functions. */
7813 if (TREE_CODE (arg1) == REAL_CST
7814 && flag_unsafe_math_optimizations
7815 && ! flag_errno_math)
7817 enum built_in_function fcode = builtin_mathfn_code (arg0);
7819 if (fcode != END_BUILTINS)
7821 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7822 if (tem != NULL_TREE)
7823 return tem;
7828 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7829 if (TREE_CONSTANT (arg1)
7830 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7831 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7832 /* This optimization is invalid for ordered comparisons
7833 if CONST+INCR overflows or if foo+incr might overflow.
7834 This optimization is invalid for floating point due to rounding.
7835 For pointer types we assume overflow doesn't happen. */
7836 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7837 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7838 && (code == EQ_EXPR || code == NE_EXPR))))
7840 tree varop, newconst;
7842 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7844 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
7845 arg1, TREE_OPERAND (arg0, 1));
7846 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7847 TREE_OPERAND (arg0, 0),
7848 TREE_OPERAND (arg0, 1));
7850 else
7852 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
7853 arg1, TREE_OPERAND (arg0, 1));
7854 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7855 TREE_OPERAND (arg0, 0),
7856 TREE_OPERAND (arg0, 1));
7860 /* If VAROP is a reference to a bitfield, we must mask
7861 the constant by the width of the field. */
7862 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7863 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
7864 && host_integerp (DECL_SIZE (TREE_OPERAND
7865 (TREE_OPERAND (varop, 0), 1)), 1))
7867 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7868 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
7869 tree folded_compare, shift;
7871 /* First check whether the comparison would come out
7872 always the same. If we don't do that we would
7873 change the meaning with the masking. */
7874 folded_compare = fold_build2 (code, type,
7875 TREE_OPERAND (varop, 0), arg1);
7876 if (TREE_CODE (folded_compare) == INTEGER_CST)
7877 return omit_one_operand (type, folded_compare, varop);
7879 shift = build_int_cst (NULL_TREE,
7880 TYPE_PRECISION (TREE_TYPE (varop)) - size);
7881 shift = fold_convert (TREE_TYPE (varop), shift);
7882 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7883 newconst, shift);
7884 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7885 newconst, shift);
7888 return fold_build2 (code, type, varop, newconst);
7891 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7892 && (TREE_CODE (arg0) == NOP_EXPR
7893 || TREE_CODE (arg0) == CONVERT_EXPR))
7895 /* If we are widening one operand of an integer comparison,
7896 see if the other operand is similarly being widened. Perhaps we
7897 can do the comparison in the narrower type. */
7898 tem = fold_widened_comparison (code, type, arg0, arg1);
7899 if (tem)
7900 return tem;
7902 /* Or if we are changing signedness. */
7903 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
7904 if (tem)
7905 return tem;
7908 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7909 constant, we can simplify it. */
7910 if (TREE_CODE (arg1) == INTEGER_CST
7911 && (TREE_CODE (arg0) == MIN_EXPR
7912 || TREE_CODE (arg0) == MAX_EXPR)
7913 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7915 tem = optimize_minmax_comparison (code, type, op0, op1);
7916 if (tem)
7917 return tem;
7920 /* Simplify comparison of something with itself. (For IEEE
7921 floating-point, we can only do some of these simplifications.) */
7922 if (operand_equal_p (arg0, arg1, 0))
7924 switch (code)
7926 case EQ_EXPR:
7927 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7928 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7929 return constant_boolean_node (1, type);
7930 break;
7932 case GE_EXPR:
7933 case LE_EXPR:
7934 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7935 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7936 return constant_boolean_node (1, type);
7937 return fold_build2 (EQ_EXPR, type, arg0, arg1);
7939 case NE_EXPR:
7940 /* For NE, we can only do this simplification if integer
7941 or we don't honor IEEE floating point NaNs. */
7942 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7943 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7944 break;
7945 /* ... fall through ... */
7946 case GT_EXPR:
7947 case LT_EXPR:
7948 return constant_boolean_node (0, type);
7949 default:
7950 gcc_unreachable ();
7954 /* If we are comparing an expression that just has comparisons
7955 of two integer values, arithmetic expressions of those comparisons,
7956 and constants, we can simplify it. There are only three cases
7957 to check: the two values can either be equal, the first can be
7958 greater, or the second can be greater. Fold the expression for
7959 those three values. Since each value must be 0 or 1, we have
7960 eight possibilities, each of which corresponds to the constant 0
7961 or 1 or one of the six possible comparisons.
7963 This handles common cases like (a > b) == 0 but also handles
7964 expressions like ((x > y) - (y > x)) > 0, which supposedly
7965 occur in macroized code. */
7967 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7969 tree cval1 = 0, cval2 = 0;
7970 int save_p = 0;
7972 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7973 /* Don't handle degenerate cases here; they should already
7974 have been handled anyway. */
7975 && cval1 != 0 && cval2 != 0
7976 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7977 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7978 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7979 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7980 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7981 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7982 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7984 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7985 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7987 /* We can't just pass T to eval_subst in case cval1 or cval2
7988 was the same as ARG1. */
7990 tree high_result
7991 = fold_build2 (code, type,
7992 eval_subst (arg0, cval1, maxval,
7993 cval2, minval),
7994 arg1);
7995 tree equal_result
7996 = fold_build2 (code, type,
7997 eval_subst (arg0, cval1, maxval,
7998 cval2, maxval),
7999 arg1);
8000 tree low_result
8001 = fold_build2 (code, type,
8002 eval_subst (arg0, cval1, minval,
8003 cval2, maxval),
8004 arg1);
8006 /* All three of these results should be 0 or 1. Confirm they are.
8007 Then use those values to select the proper code to use. */
8009 if (TREE_CODE (high_result) == INTEGER_CST
8010 && TREE_CODE (equal_result) == INTEGER_CST
8011 && TREE_CODE (low_result) == INTEGER_CST)
8013 /* Make a 3-bit mask with the high-order bit being the
8014 value for `>', the next for '=', and the low for '<'. */
8015 switch ((integer_onep (high_result) * 4)
8016 + (integer_onep (equal_result) * 2)
8017 + integer_onep (low_result))
8019 case 0:
8020 /* Always false. */
8021 return omit_one_operand (type, integer_zero_node, arg0);
8022 case 1:
8023 code = LT_EXPR;
8024 break;
8025 case 2:
8026 code = EQ_EXPR;
8027 break;
8028 case 3:
8029 code = LE_EXPR;
8030 break;
8031 case 4:
8032 code = GT_EXPR;
8033 break;
8034 case 5:
8035 code = NE_EXPR;
8036 break;
8037 case 6:
8038 code = GE_EXPR;
8039 break;
8040 case 7:
8041 /* Always true. */
8042 return omit_one_operand (type, integer_one_node, arg0);
8045 if (save_p)
8046 return save_expr (build2 (code, type, cval1, cval2));
8047 return fold_build2 (code, type, cval1, cval2);
8052 /* Fold a comparison of the address of COMPONENT_REFs with the same
8053 type and component to a comparison of the address of the base
8054 object. In short, &x->a OP &y->a to x OP y and
8055 &x->a OP &y.a to x OP &y */
8056 if (TREE_CODE (arg0) == ADDR_EXPR
8057 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8058 && TREE_CODE (arg1) == ADDR_EXPR
8059 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8061 tree cref0 = TREE_OPERAND (arg0, 0);
8062 tree cref1 = TREE_OPERAND (arg1, 0);
8063 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8065 tree op0 = TREE_OPERAND (cref0, 0);
8066 tree op1 = TREE_OPERAND (cref1, 0);
8067 return fold_build2 (code, type,
8068 build_fold_addr_expr (op0),
8069 build_fold_addr_expr (op1));
8073 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8074 into a single range test. */
8075 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8076 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8077 && TREE_CODE (arg1) == INTEGER_CST
8078 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8079 && !integer_zerop (TREE_OPERAND (arg0, 1))
8080 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8081 && !TREE_OVERFLOW (arg1))
8083 tem = fold_div_compare (code, type, arg0, arg1);
8084 if (tem != NULL_TREE)
8085 return tem;
8088 return NULL_TREE;
8091 /* Fold a binary expression of code CODE and type TYPE with operands
8092 OP0 and OP1. Return the folded expression if folding is
8093 successful. Otherwise, return NULL_TREE. */
8095 tree
8096 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8098 enum tree_code_class kind = TREE_CODE_CLASS (code);
8099 tree arg0, arg1, tem;
8100 tree t1 = NULL_TREE;
8102 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8103 && TREE_CODE_LENGTH (code) == 2
8104 && op0 != NULL_TREE
8105 && op1 != NULL_TREE);
8107 arg0 = op0;
8108 arg1 = op1;
8110 /* Strip any conversions that don't change the mode. This is
8111 safe for every expression, except for a comparison expression
8112 because its signedness is derived from its operands. So, in
8113 the latter case, only strip conversions that don't change the
8114 signedness.
8116 Note that this is done as an internal manipulation within the
8117 constant folder, in order to find the simplest representation
8118 of the arguments so that their form can be studied. In any
8119 cases, the appropriate type conversions should be put back in
8120 the tree that will get out of the constant folder. */
8122 if (kind == tcc_comparison)
8124 STRIP_SIGN_NOPS (arg0);
8125 STRIP_SIGN_NOPS (arg1);
8127 else
8129 STRIP_NOPS (arg0);
8130 STRIP_NOPS (arg1);
8133 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8134 constant but we can't do arithmetic on them. */
8135 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8136 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8137 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8138 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8140 if (kind == tcc_binary)
8141 tem = const_binop (code, arg0, arg1, 0);
8142 else if (kind == tcc_comparison)
8143 tem = fold_relational_const (code, type, arg0, arg1);
8144 else
8145 tem = NULL_TREE;
8147 if (tem != NULL_TREE)
8149 if (TREE_TYPE (tem) != type)
8150 tem = fold_convert (type, tem);
8151 return tem;
8155 /* If this is a commutative operation, and ARG0 is a constant, move it
8156 to ARG1 to reduce the number of tests below. */
8157 if (commutative_tree_code (code)
8158 && tree_swap_operands_p (arg0, arg1, true))
8159 return fold_build2 (code, type, op1, op0);
8161 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8163 First check for cases where an arithmetic operation is applied to a
8164 compound, conditional, or comparison operation. Push the arithmetic
8165 operation inside the compound or conditional to see if any folding
8166 can then be done. Convert comparison to conditional for this purpose.
8167 The also optimizes non-constant cases that used to be done in
8168 expand_expr.
8170 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8171 one of the operands is a comparison and the other is a comparison, a
8172 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8173 code below would make the expression more complex. Change it to a
8174 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8175 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8177 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8178 || code == EQ_EXPR || code == NE_EXPR)
8179 && ((truth_value_p (TREE_CODE (arg0))
8180 && (truth_value_p (TREE_CODE (arg1))
8181 || (TREE_CODE (arg1) == BIT_AND_EXPR
8182 && integer_onep (TREE_OPERAND (arg1, 1)))))
8183 || (truth_value_p (TREE_CODE (arg1))
8184 && (truth_value_p (TREE_CODE (arg0))
8185 || (TREE_CODE (arg0) == BIT_AND_EXPR
8186 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8188 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8189 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8190 : TRUTH_XOR_EXPR,
8191 boolean_type_node,
8192 fold_convert (boolean_type_node, arg0),
8193 fold_convert (boolean_type_node, arg1));
8195 if (code == EQ_EXPR)
8196 tem = invert_truthvalue (tem);
8198 return fold_convert (type, tem);
8201 if (TREE_CODE_CLASS (code) == tcc_binary
8202 || TREE_CODE_CLASS (code) == tcc_comparison)
8204 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8205 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8206 fold_build2 (code, type,
8207 TREE_OPERAND (arg0, 1), op1));
8208 if (TREE_CODE (arg1) == COMPOUND_EXPR
8209 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8210 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8211 fold_build2 (code, type,
8212 op0, TREE_OPERAND (arg1, 1)));
8214 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8216 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8217 arg0, arg1,
8218 /*cond_first_p=*/1);
8219 if (tem != NULL_TREE)
8220 return tem;
8223 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8225 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8226 arg1, arg0,
8227 /*cond_first_p=*/0);
8228 if (tem != NULL_TREE)
8229 return tem;
8233 switch (code)
8235 case PLUS_EXPR:
8236 /* A + (-B) -> A - B */
8237 if (TREE_CODE (arg1) == NEGATE_EXPR)
8238 return fold_build2 (MINUS_EXPR, type,
8239 fold_convert (type, arg0),
8240 fold_convert (type, TREE_OPERAND (arg1, 0)));
8241 /* (-A) + B -> B - A */
8242 if (TREE_CODE (arg0) == NEGATE_EXPR
8243 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8244 return fold_build2 (MINUS_EXPR, type,
8245 fold_convert (type, arg1),
8246 fold_convert (type, TREE_OPERAND (arg0, 0)));
8247 /* Convert ~A + 1 to -A. */
8248 if (INTEGRAL_TYPE_P (type)
8249 && TREE_CODE (arg0) == BIT_NOT_EXPR
8250 && integer_onep (arg1))
8251 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8253 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8254 same or one. */
8255 if ((TREE_CODE (arg0) == MULT_EXPR
8256 || TREE_CODE (arg1) == MULT_EXPR)
8257 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8259 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8260 if (tem)
8261 return tem;
8264 if (! FLOAT_TYPE_P (type))
8266 if (integer_zerop (arg1))
8267 return non_lvalue (fold_convert (type, arg0));
8269 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8270 with a constant, and the two constants have no bits in common,
8271 we should treat this as a BIT_IOR_EXPR since this may produce more
8272 simplifications. */
8273 if (TREE_CODE (arg0) == BIT_AND_EXPR
8274 && TREE_CODE (arg1) == BIT_AND_EXPR
8275 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8276 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8277 && integer_zerop (const_binop (BIT_AND_EXPR,
8278 TREE_OPERAND (arg0, 1),
8279 TREE_OPERAND (arg1, 1), 0)))
8281 code = BIT_IOR_EXPR;
8282 goto bit_ior;
8285 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8286 (plus (plus (mult) (mult)) (foo)) so that we can
8287 take advantage of the factoring cases below. */
8288 if (((TREE_CODE (arg0) == PLUS_EXPR
8289 || TREE_CODE (arg0) == MINUS_EXPR)
8290 && TREE_CODE (arg1) == MULT_EXPR)
8291 || ((TREE_CODE (arg1) == PLUS_EXPR
8292 || TREE_CODE (arg1) == MINUS_EXPR)
8293 && TREE_CODE (arg0) == MULT_EXPR))
8295 tree parg0, parg1, parg, marg;
8296 enum tree_code pcode;
8298 if (TREE_CODE (arg1) == MULT_EXPR)
8299 parg = arg0, marg = arg1;
8300 else
8301 parg = arg1, marg = arg0;
8302 pcode = TREE_CODE (parg);
8303 parg0 = TREE_OPERAND (parg, 0);
8304 parg1 = TREE_OPERAND (parg, 1);
8305 STRIP_NOPS (parg0);
8306 STRIP_NOPS (parg1);
8308 if (TREE_CODE (parg0) == MULT_EXPR
8309 && TREE_CODE (parg1) != MULT_EXPR)
8310 return fold_build2 (pcode, type,
8311 fold_build2 (PLUS_EXPR, type,
8312 fold_convert (type, parg0),
8313 fold_convert (type, marg)),
8314 fold_convert (type, parg1));
8315 if (TREE_CODE (parg0) != MULT_EXPR
8316 && TREE_CODE (parg1) == MULT_EXPR)
8317 return fold_build2 (PLUS_EXPR, type,
8318 fold_convert (type, parg0),
8319 fold_build2 (pcode, type,
8320 fold_convert (type, marg),
8321 fold_convert (type,
8322 parg1)));
8325 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8326 of the array. Loop optimizer sometimes produce this type of
8327 expressions. */
8328 if (TREE_CODE (arg0) == ADDR_EXPR)
8330 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8331 if (tem)
8332 return fold_convert (type, tem);
8334 else if (TREE_CODE (arg1) == ADDR_EXPR)
8336 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8337 if (tem)
8338 return fold_convert (type, tem);
8341 else
8343 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8344 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8345 return non_lvalue (fold_convert (type, arg0));
8347 /* Likewise if the operands are reversed. */
8348 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8349 return non_lvalue (fold_convert (type, arg1));
8351 /* Convert X + -C into X - C. */
8352 if (TREE_CODE (arg1) == REAL_CST
8353 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8355 tem = fold_negate_const (arg1, type);
8356 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8357 return fold_build2 (MINUS_EXPR, type,
8358 fold_convert (type, arg0),
8359 fold_convert (type, tem));
8362 if (flag_unsafe_math_optimizations
8363 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8364 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8365 && (tem = distribute_real_division (code, type, arg0, arg1)))
8366 return tem;
8368 /* Convert x+x into x*2.0. */
8369 if (operand_equal_p (arg0, arg1, 0)
8370 && SCALAR_FLOAT_TYPE_P (type))
8371 return fold_build2 (MULT_EXPR, type, arg0,
8372 build_real (type, dconst2));
8374 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8375 if (flag_unsafe_math_optimizations
8376 && TREE_CODE (arg1) == PLUS_EXPR
8377 && TREE_CODE (arg0) != MULT_EXPR)
8379 tree tree10 = TREE_OPERAND (arg1, 0);
8380 tree tree11 = TREE_OPERAND (arg1, 1);
8381 if (TREE_CODE (tree11) == MULT_EXPR
8382 && TREE_CODE (tree10) == MULT_EXPR)
8384 tree tree0;
8385 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8386 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8389 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8390 if (flag_unsafe_math_optimizations
8391 && TREE_CODE (arg0) == PLUS_EXPR
8392 && TREE_CODE (arg1) != MULT_EXPR)
8394 tree tree00 = TREE_OPERAND (arg0, 0);
8395 tree tree01 = TREE_OPERAND (arg0, 1);
8396 if (TREE_CODE (tree01) == MULT_EXPR
8397 && TREE_CODE (tree00) == MULT_EXPR)
8399 tree tree0;
8400 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8401 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8406 bit_rotate:
8407 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8408 is a rotate of A by C1 bits. */
8409 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8410 is a rotate of A by B bits. */
8412 enum tree_code code0, code1;
8413 code0 = TREE_CODE (arg0);
8414 code1 = TREE_CODE (arg1);
8415 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8416 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8417 && operand_equal_p (TREE_OPERAND (arg0, 0),
8418 TREE_OPERAND (arg1, 0), 0)
8419 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8421 tree tree01, tree11;
8422 enum tree_code code01, code11;
8424 tree01 = TREE_OPERAND (arg0, 1);
8425 tree11 = TREE_OPERAND (arg1, 1);
8426 STRIP_NOPS (tree01);
8427 STRIP_NOPS (tree11);
8428 code01 = TREE_CODE (tree01);
8429 code11 = TREE_CODE (tree11);
8430 if (code01 == INTEGER_CST
8431 && code11 == INTEGER_CST
8432 && TREE_INT_CST_HIGH (tree01) == 0
8433 && TREE_INT_CST_HIGH (tree11) == 0
8434 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8435 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8436 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8437 code0 == LSHIFT_EXPR ? tree01 : tree11);
8438 else if (code11 == MINUS_EXPR)
8440 tree tree110, tree111;
8441 tree110 = TREE_OPERAND (tree11, 0);
8442 tree111 = TREE_OPERAND (tree11, 1);
8443 STRIP_NOPS (tree110);
8444 STRIP_NOPS (tree111);
8445 if (TREE_CODE (tree110) == INTEGER_CST
8446 && 0 == compare_tree_int (tree110,
8447 TYPE_PRECISION
8448 (TREE_TYPE (TREE_OPERAND
8449 (arg0, 0))))
8450 && operand_equal_p (tree01, tree111, 0))
8451 return build2 ((code0 == LSHIFT_EXPR
8452 ? LROTATE_EXPR
8453 : RROTATE_EXPR),
8454 type, TREE_OPERAND (arg0, 0), tree01);
8456 else if (code01 == MINUS_EXPR)
8458 tree tree010, tree011;
8459 tree010 = TREE_OPERAND (tree01, 0);
8460 tree011 = TREE_OPERAND (tree01, 1);
8461 STRIP_NOPS (tree010);
8462 STRIP_NOPS (tree011);
8463 if (TREE_CODE (tree010) == INTEGER_CST
8464 && 0 == compare_tree_int (tree010,
8465 TYPE_PRECISION
8466 (TREE_TYPE (TREE_OPERAND
8467 (arg0, 0))))
8468 && operand_equal_p (tree11, tree011, 0))
8469 return build2 ((code0 != LSHIFT_EXPR
8470 ? LROTATE_EXPR
8471 : RROTATE_EXPR),
8472 type, TREE_OPERAND (arg0, 0), tree11);
8477 associate:
8478 /* In most languages, can't associate operations on floats through
8479 parentheses. Rather than remember where the parentheses were, we
8480 don't associate floats at all, unless the user has specified
8481 -funsafe-math-optimizations. */
8483 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8485 tree var0, con0, lit0, minus_lit0;
8486 tree var1, con1, lit1, minus_lit1;
8488 /* Split both trees into variables, constants, and literals. Then
8489 associate each group together, the constants with literals,
8490 then the result with variables. This increases the chances of
8491 literals being recombined later and of generating relocatable
8492 expressions for the sum of a constant and literal. */
8493 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8494 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8495 code == MINUS_EXPR);
8497 /* Only do something if we found more than two objects. Otherwise,
8498 nothing has changed and we risk infinite recursion. */
8499 if (2 < ((var0 != 0) + (var1 != 0)
8500 + (con0 != 0) + (con1 != 0)
8501 + (lit0 != 0) + (lit1 != 0)
8502 + (minus_lit0 != 0) + (minus_lit1 != 0)))
8504 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8505 if (code == MINUS_EXPR)
8506 code = PLUS_EXPR;
8508 var0 = associate_trees (var0, var1, code, type);
8509 con0 = associate_trees (con0, con1, code, type);
8510 lit0 = associate_trees (lit0, lit1, code, type);
8511 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8513 /* Preserve the MINUS_EXPR if the negative part of the literal is
8514 greater than the positive part. Otherwise, the multiplicative
8515 folding code (i.e extract_muldiv) may be fooled in case
8516 unsigned constants are subtracted, like in the following
8517 example: ((X*2 + 4) - 8U)/2. */
8518 if (minus_lit0 && lit0)
8520 if (TREE_CODE (lit0) == INTEGER_CST
8521 && TREE_CODE (minus_lit0) == INTEGER_CST
8522 && tree_int_cst_lt (lit0, minus_lit0))
8524 minus_lit0 = associate_trees (minus_lit0, lit0,
8525 MINUS_EXPR, type);
8526 lit0 = 0;
8528 else
8530 lit0 = associate_trees (lit0, minus_lit0,
8531 MINUS_EXPR, type);
8532 minus_lit0 = 0;
8535 if (minus_lit0)
8537 if (con0 == 0)
8538 return fold_convert (type,
8539 associate_trees (var0, minus_lit0,
8540 MINUS_EXPR, type));
8541 else
8543 con0 = associate_trees (con0, minus_lit0,
8544 MINUS_EXPR, type);
8545 return fold_convert (type,
8546 associate_trees (var0, con0,
8547 PLUS_EXPR, type));
8551 con0 = associate_trees (con0, lit0, code, type);
8552 return fold_convert (type, associate_trees (var0, con0,
8553 code, type));
8557 return NULL_TREE;
8559 case MINUS_EXPR:
8560 /* A - (-B) -> A + B */
8561 if (TREE_CODE (arg1) == NEGATE_EXPR)
8562 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8563 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8564 if (TREE_CODE (arg0) == NEGATE_EXPR
8565 && (FLOAT_TYPE_P (type)
8566 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8567 && negate_expr_p (arg1)
8568 && reorder_operands_p (arg0, arg1))
8569 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8570 TREE_OPERAND (arg0, 0));
8571 /* Convert -A - 1 to ~A. */
8572 if (INTEGRAL_TYPE_P (type)
8573 && TREE_CODE (arg0) == NEGATE_EXPR
8574 && integer_onep (arg1))
8575 return fold_build1 (BIT_NOT_EXPR, type,
8576 fold_convert (type, TREE_OPERAND (arg0, 0)));
8578 /* Convert -1 - A to ~A. */
8579 if (INTEGRAL_TYPE_P (type)
8580 && integer_all_onesp (arg0))
8581 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8583 if (! FLOAT_TYPE_P (type))
8585 if (integer_zerop (arg0))
8586 return negate_expr (fold_convert (type, arg1));
8587 if (integer_zerop (arg1))
8588 return non_lvalue (fold_convert (type, arg0));
8590 /* Fold A - (A & B) into ~B & A. */
8591 if (!TREE_SIDE_EFFECTS (arg0)
8592 && TREE_CODE (arg1) == BIT_AND_EXPR)
8594 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8595 return fold_build2 (BIT_AND_EXPR, type,
8596 fold_build1 (BIT_NOT_EXPR, type,
8597 TREE_OPERAND (arg1, 0)),
8598 arg0);
8599 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8600 return fold_build2 (BIT_AND_EXPR, type,
8601 fold_build1 (BIT_NOT_EXPR, type,
8602 TREE_OPERAND (arg1, 1)),
8603 arg0);
8606 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8607 any power of 2 minus 1. */
8608 if (TREE_CODE (arg0) == BIT_AND_EXPR
8609 && TREE_CODE (arg1) == BIT_AND_EXPR
8610 && operand_equal_p (TREE_OPERAND (arg0, 0),
8611 TREE_OPERAND (arg1, 0), 0))
8613 tree mask0 = TREE_OPERAND (arg0, 1);
8614 tree mask1 = TREE_OPERAND (arg1, 1);
8615 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
8617 if (operand_equal_p (tem, mask1, 0))
8619 tem = fold_build2 (BIT_XOR_EXPR, type,
8620 TREE_OPERAND (arg0, 0), mask1);
8621 return fold_build2 (MINUS_EXPR, type, tem, mask1);
8626 /* See if ARG1 is zero and X - ARG1 reduces to X. */
8627 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
8628 return non_lvalue (fold_convert (type, arg0));
8630 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
8631 ARG0 is zero and X + ARG0 reduces to X, since that would mean
8632 (-ARG1 + ARG0) reduces to -ARG1. */
8633 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8634 return negate_expr (fold_convert (type, arg1));
8636 /* Fold &x - &x. This can happen from &x.foo - &x.
8637 This is unsafe for certain floats even in non-IEEE formats.
8638 In IEEE, it is unsafe because it does wrong for NaNs.
8639 Also note that operand_equal_p is always false if an operand
8640 is volatile. */
8642 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8643 && operand_equal_p (arg0, arg1, 0))
8644 return fold_convert (type, integer_zero_node);
8646 /* A - B -> A + (-B) if B is easily negatable. */
8647 if (negate_expr_p (arg1)
8648 && ((FLOAT_TYPE_P (type)
8649 /* Avoid this transformation if B is a positive REAL_CST. */
8650 && (TREE_CODE (arg1) != REAL_CST
8651 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
8652 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
8653 return fold_build2 (PLUS_EXPR, type,
8654 fold_convert (type, arg0),
8655 fold_convert (type, negate_expr (arg1)));
8657 /* Try folding difference of addresses. */
8659 HOST_WIDE_INT diff;
8661 if ((TREE_CODE (arg0) == ADDR_EXPR
8662 || TREE_CODE (arg1) == ADDR_EXPR)
8663 && ptr_difference_const (arg0, arg1, &diff))
8664 return build_int_cst_type (type, diff);
8667 /* Fold &a[i] - &a[j] to i-j. */
8668 if (TREE_CODE (arg0) == ADDR_EXPR
8669 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
8670 && TREE_CODE (arg1) == ADDR_EXPR
8671 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
8673 tree aref0 = TREE_OPERAND (arg0, 0);
8674 tree aref1 = TREE_OPERAND (arg1, 0);
8675 if (operand_equal_p (TREE_OPERAND (aref0, 0),
8676 TREE_OPERAND (aref1, 0), 0))
8678 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
8679 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
8680 tree esz = array_ref_element_size (aref0);
8681 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8682 return fold_build2 (MULT_EXPR, type, diff,
8683 fold_convert (type, esz));
8688 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
8689 of the array. Loop optimizer sometimes produce this type of
8690 expressions. */
8691 if (TREE_CODE (arg0) == ADDR_EXPR)
8693 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
8694 if (tem)
8695 return fold_convert (type, tem);
8698 if (flag_unsafe_math_optimizations
8699 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8700 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8701 && (tem = distribute_real_division (code, type, arg0, arg1)))
8702 return tem;
8704 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
8705 same or one. */
8706 if ((TREE_CODE (arg0) == MULT_EXPR
8707 || TREE_CODE (arg1) == MULT_EXPR)
8708 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8710 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8711 if (tem)
8712 return tem;
8715 goto associate;
8717 case MULT_EXPR:
8718 /* (-A) * (-B) -> A * B */
8719 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8720 return fold_build2 (MULT_EXPR, type,
8721 TREE_OPERAND (arg0, 0),
8722 negate_expr (arg1));
8723 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8724 return fold_build2 (MULT_EXPR, type,
8725 negate_expr (arg0),
8726 TREE_OPERAND (arg1, 0));
8728 if (! FLOAT_TYPE_P (type))
8730 if (integer_zerop (arg1))
8731 return omit_one_operand (type, arg1, arg0);
8732 if (integer_onep (arg1))
8733 return non_lvalue (fold_convert (type, arg0));
8734 /* Transform x * -1 into -x. */
8735 if (integer_all_onesp (arg1))
8736 return fold_convert (type, negate_expr (arg0));
8738 /* (a * (1 << b)) is (a << b) */
8739 if (TREE_CODE (arg1) == LSHIFT_EXPR
8740 && integer_onep (TREE_OPERAND (arg1, 0)))
8741 return fold_build2 (LSHIFT_EXPR, type, arg0,
8742 TREE_OPERAND (arg1, 1));
8743 if (TREE_CODE (arg0) == LSHIFT_EXPR
8744 && integer_onep (TREE_OPERAND (arg0, 0)))
8745 return fold_build2 (LSHIFT_EXPR, type, arg1,
8746 TREE_OPERAND (arg0, 1));
8748 if (TREE_CODE (arg1) == INTEGER_CST
8749 && 0 != (tem = extract_muldiv (op0,
8750 fold_convert (type, arg1),
8751 code, NULL_TREE)))
8752 return fold_convert (type, tem);
8755 else
8757 /* Maybe fold x * 0 to 0. The expressions aren't the same
8758 when x is NaN, since x * 0 is also NaN. Nor are they the
8759 same in modes with signed zeros, since multiplying a
8760 negative value by 0 gives -0, not +0. */
8761 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8762 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8763 && real_zerop (arg1))
8764 return omit_one_operand (type, arg1, arg0);
8765 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
8766 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8767 && real_onep (arg1))
8768 return non_lvalue (fold_convert (type, arg0));
8770 /* Transform x * -1.0 into -x. */
8771 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8772 && real_minus_onep (arg1))
8773 return fold_convert (type, negate_expr (arg0));
8775 /* Convert (C1/X)*C2 into (C1*C2)/X. */
8776 if (flag_unsafe_math_optimizations
8777 && TREE_CODE (arg0) == RDIV_EXPR
8778 && TREE_CODE (arg1) == REAL_CST
8779 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
8781 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
8782 arg1, 0);
8783 if (tem)
8784 return fold_build2 (RDIV_EXPR, type, tem,
8785 TREE_OPERAND (arg0, 1));
8788 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
8789 if (operand_equal_p (arg0, arg1, 0))
8791 tree tem = fold_strip_sign_ops (arg0);
8792 if (tem != NULL_TREE)
8794 tem = fold_convert (type, tem);
8795 return fold_build2 (MULT_EXPR, type, tem, tem);
8799 if (flag_unsafe_math_optimizations)
8801 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8802 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8804 /* Optimizations of root(...)*root(...). */
8805 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
8807 tree rootfn, arg, arglist;
8808 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8809 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8811 /* Optimize sqrt(x)*sqrt(x) as x. */
8812 if (BUILTIN_SQRT_P (fcode0)
8813 && operand_equal_p (arg00, arg10, 0)
8814 && ! HONOR_SNANS (TYPE_MODE (type)))
8815 return arg00;
8817 /* Optimize root(x)*root(y) as root(x*y). */
8818 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8819 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8820 arglist = build_tree_list (NULL_TREE, arg);
8821 return build_function_call_expr (rootfn, arglist);
8824 /* Optimize expN(x)*expN(y) as expN(x+y). */
8825 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
8827 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8828 tree arg = fold_build2 (PLUS_EXPR, type,
8829 TREE_VALUE (TREE_OPERAND (arg0, 1)),
8830 TREE_VALUE (TREE_OPERAND (arg1, 1)));
8831 tree arglist = build_tree_list (NULL_TREE, arg);
8832 return build_function_call_expr (expfn, arglist);
8835 /* Optimizations of pow(...)*pow(...). */
8836 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
8837 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
8838 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
8840 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8841 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8842 1)));
8843 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8844 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8845 1)));
8847 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
8848 if (operand_equal_p (arg01, arg11, 0))
8850 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8851 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8852 tree arglist = tree_cons (NULL_TREE, arg,
8853 build_tree_list (NULL_TREE,
8854 arg01));
8855 return build_function_call_expr (powfn, arglist);
8858 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
8859 if (operand_equal_p (arg00, arg10, 0))
8861 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8862 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
8863 tree arglist = tree_cons (NULL_TREE, arg00,
8864 build_tree_list (NULL_TREE,
8865 arg));
8866 return build_function_call_expr (powfn, arglist);
8870 /* Optimize tan(x)*cos(x) as sin(x). */
8871 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
8872 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
8873 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
8874 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
8875 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
8876 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
8877 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8878 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8880 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
8882 if (sinfn != NULL_TREE)
8883 return build_function_call_expr (sinfn,
8884 TREE_OPERAND (arg0, 1));
8887 /* Optimize x*pow(x,c) as pow(x,c+1). */
8888 if (fcode1 == BUILT_IN_POW
8889 || fcode1 == BUILT_IN_POWF
8890 || fcode1 == BUILT_IN_POWL)
8892 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8893 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8894 1)));
8895 if (TREE_CODE (arg11) == REAL_CST
8896 && ! TREE_CONSTANT_OVERFLOW (arg11)
8897 && operand_equal_p (arg0, arg10, 0))
8899 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8900 REAL_VALUE_TYPE c;
8901 tree arg, arglist;
8903 c = TREE_REAL_CST (arg11);
8904 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8905 arg = build_real (type, c);
8906 arglist = build_tree_list (NULL_TREE, arg);
8907 arglist = tree_cons (NULL_TREE, arg0, arglist);
8908 return build_function_call_expr (powfn, arglist);
8912 /* Optimize pow(x,c)*x as pow(x,c+1). */
8913 if (fcode0 == BUILT_IN_POW
8914 || fcode0 == BUILT_IN_POWF
8915 || fcode0 == BUILT_IN_POWL)
8917 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8918 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8919 1)));
8920 if (TREE_CODE (arg01) == REAL_CST
8921 && ! TREE_CONSTANT_OVERFLOW (arg01)
8922 && operand_equal_p (arg1, arg00, 0))
8924 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8925 REAL_VALUE_TYPE c;
8926 tree arg, arglist;
8928 c = TREE_REAL_CST (arg01);
8929 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8930 arg = build_real (type, c);
8931 arglist = build_tree_list (NULL_TREE, arg);
8932 arglist = tree_cons (NULL_TREE, arg1, arglist);
8933 return build_function_call_expr (powfn, arglist);
8937 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8938 if (! optimize_size
8939 && operand_equal_p (arg0, arg1, 0))
8941 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8943 if (powfn)
8945 tree arg = build_real (type, dconst2);
8946 tree arglist = build_tree_list (NULL_TREE, arg);
8947 arglist = tree_cons (NULL_TREE, arg0, arglist);
8948 return build_function_call_expr (powfn, arglist);
8953 goto associate;
8955 case BIT_IOR_EXPR:
8956 bit_ior:
8957 if (integer_all_onesp (arg1))
8958 return omit_one_operand (type, arg1, arg0);
8959 if (integer_zerop (arg1))
8960 return non_lvalue (fold_convert (type, arg0));
8961 if (operand_equal_p (arg0, arg1, 0))
8962 return non_lvalue (fold_convert (type, arg0));
8964 /* ~X | X is -1. */
8965 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8966 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8968 t1 = build_int_cst (type, -1);
8969 t1 = force_fit_type (t1, 0, false, false);
8970 return omit_one_operand (type, t1, arg1);
8973 /* X | ~X is -1. */
8974 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8975 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8977 t1 = build_int_cst (type, -1);
8978 t1 = force_fit_type (t1, 0, false, false);
8979 return omit_one_operand (type, t1, arg0);
8982 /* Canonicalize (X & C1) | C2. */
8983 if (TREE_CODE (arg0) == BIT_AND_EXPR
8984 && TREE_CODE (arg1) == INTEGER_CST
8985 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8987 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
8988 int width = TYPE_PRECISION (type);
8989 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
8990 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8991 hi2 = TREE_INT_CST_HIGH (arg1);
8992 lo2 = TREE_INT_CST_LOW (arg1);
8994 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
8995 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
8996 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
8998 if (width > HOST_BITS_PER_WIDE_INT)
9000 mhi = (unsigned HOST_WIDE_INT) -1
9001 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9002 mlo = -1;
9004 else
9006 mhi = 0;
9007 mlo = (unsigned HOST_WIDE_INT) -1
9008 >> (HOST_BITS_PER_WIDE_INT - width);
9011 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9012 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9013 return fold_build2 (BIT_IOR_EXPR, type,
9014 TREE_OPERAND (arg0, 0), arg1);
9016 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9017 hi1 &= mhi;
9018 lo1 &= mlo;
9019 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9020 return fold_build2 (BIT_IOR_EXPR, type,
9021 fold_build2 (BIT_AND_EXPR, type,
9022 TREE_OPERAND (arg0, 0),
9023 build_int_cst_wide (type,
9024 lo1 & ~lo2,
9025 hi1 & ~hi2)),
9026 arg1);
9029 /* (X & Y) | Y is (X, Y). */
9030 if (TREE_CODE (arg0) == BIT_AND_EXPR
9031 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9032 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9033 /* (X & Y) | X is (Y, X). */
9034 if (TREE_CODE (arg0) == BIT_AND_EXPR
9035 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9036 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9037 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9038 /* X | (X & Y) is (Y, X). */
9039 if (TREE_CODE (arg1) == BIT_AND_EXPR
9040 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9041 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9042 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9043 /* X | (Y & X) is (Y, X). */
9044 if (TREE_CODE (arg1) == BIT_AND_EXPR
9045 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9046 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9047 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9049 t1 = distribute_bit_expr (code, type, arg0, arg1);
9050 if (t1 != NULL_TREE)
9051 return t1;
9053 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9055 This results in more efficient code for machines without a NAND
9056 instruction. Combine will canonicalize to the first form
9057 which will allow use of NAND instructions provided by the
9058 backend if they exist. */
9059 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9060 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9062 return fold_build1 (BIT_NOT_EXPR, type,
9063 build2 (BIT_AND_EXPR, type,
9064 TREE_OPERAND (arg0, 0),
9065 TREE_OPERAND (arg1, 0)));
9068 /* See if this can be simplified into a rotate first. If that
9069 is unsuccessful continue in the association code. */
9070 goto bit_rotate;
9072 case BIT_XOR_EXPR:
9073 if (integer_zerop (arg1))
9074 return non_lvalue (fold_convert (type, arg0));
9075 if (integer_all_onesp (arg1))
9076 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9077 if (operand_equal_p (arg0, arg1, 0))
9078 return omit_one_operand (type, integer_zero_node, arg0);
9080 /* ~X ^ X is -1. */
9081 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9082 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9084 t1 = build_int_cst (type, -1);
9085 t1 = force_fit_type (t1, 0, false, false);
9086 return omit_one_operand (type, t1, arg1);
9089 /* X ^ ~X is -1. */
9090 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9091 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9093 t1 = build_int_cst (type, -1);
9094 t1 = force_fit_type (t1, 0, false, false);
9095 return omit_one_operand (type, t1, arg0);
9098 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9099 with a constant, and the two constants have no bits in common,
9100 we should treat this as a BIT_IOR_EXPR since this may produce more
9101 simplifications. */
9102 if (TREE_CODE (arg0) == BIT_AND_EXPR
9103 && TREE_CODE (arg1) == BIT_AND_EXPR
9104 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9105 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9106 && integer_zerop (const_binop (BIT_AND_EXPR,
9107 TREE_OPERAND (arg0, 1),
9108 TREE_OPERAND (arg1, 1), 0)))
9110 code = BIT_IOR_EXPR;
9111 goto bit_ior;
9114 /* (X | Y) ^ X -> Y & ~ X*/
9115 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9116 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9118 tree t2 = TREE_OPERAND (arg0, 1);
9119 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9120 arg1);
9121 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9122 fold_convert (type, t1));
9123 return t1;
9126 /* (Y | X) ^ X -> Y & ~ X*/
9127 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9128 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9130 tree t2 = TREE_OPERAND (arg0, 0);
9131 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9132 arg1);
9133 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9134 fold_convert (type, t1));
9135 return t1;
9138 /* X ^ (X | Y) -> Y & ~ X*/
9139 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9140 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9142 tree t2 = TREE_OPERAND (arg1, 1);
9143 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9144 arg0);
9145 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9146 fold_convert (type, t1));
9147 return t1;
9150 /* X ^ (Y | X) -> Y & ~ X*/
9151 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9152 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9154 tree t2 = TREE_OPERAND (arg1, 0);
9155 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9156 arg0);
9157 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9158 fold_convert (type, t1));
9159 return t1;
9162 /* Convert ~X ^ ~Y to X ^ Y. */
9163 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9164 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9165 return fold_build2 (code, type,
9166 fold_convert (type, TREE_OPERAND (arg0, 0)),
9167 fold_convert (type, TREE_OPERAND (arg1, 0)));
9169 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9170 if (TREE_CODE (arg0) == BIT_AND_EXPR
9171 && integer_onep (TREE_OPERAND (arg0, 1))
9172 && integer_onep (arg1))
9173 return fold_build2 (EQ_EXPR, type, arg0,
9174 build_int_cst (TREE_TYPE (arg0), 0));
9176 /* Fold (X & Y) ^ Y as ~X & Y. */
9177 if (TREE_CODE (arg0) == BIT_AND_EXPR
9178 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9180 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9181 return fold_build2 (BIT_AND_EXPR, type,
9182 fold_build1 (BIT_NOT_EXPR, type, tem),
9183 fold_convert (type, arg1));
9185 /* Fold (X & Y) ^ X as ~Y & X. */
9186 if (TREE_CODE (arg0) == BIT_AND_EXPR
9187 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9188 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9190 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9191 return fold_build2 (BIT_AND_EXPR, type,
9192 fold_build1 (BIT_NOT_EXPR, type, tem),
9193 fold_convert (type, arg1));
9195 /* Fold X ^ (X & Y) as X & ~Y. */
9196 if (TREE_CODE (arg1) == BIT_AND_EXPR
9197 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9199 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9200 return fold_build2 (BIT_AND_EXPR, type,
9201 fold_convert (type, arg0),
9202 fold_build1 (BIT_NOT_EXPR, type, tem));
9204 /* Fold X ^ (Y & X) as ~Y & X. */
9205 if (TREE_CODE (arg1) == BIT_AND_EXPR
9206 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9207 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9209 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9210 return fold_build2 (BIT_AND_EXPR, type,
9211 fold_build1 (BIT_NOT_EXPR, type, tem),
9212 fold_convert (type, arg0));
9215 /* See if this can be simplified into a rotate first. If that
9216 is unsuccessful continue in the association code. */
9217 goto bit_rotate;
9219 case BIT_AND_EXPR:
9220 if (integer_all_onesp (arg1))
9221 return non_lvalue (fold_convert (type, arg0));
9222 if (integer_zerop (arg1))
9223 return omit_one_operand (type, arg1, arg0);
9224 if (operand_equal_p (arg0, arg1, 0))
9225 return non_lvalue (fold_convert (type, arg0));
9227 /* ~X & X is always zero. */
9228 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9229 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9230 return omit_one_operand (type, integer_zero_node, arg1);
9232 /* X & ~X is always zero. */
9233 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9234 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9235 return omit_one_operand (type, integer_zero_node, arg0);
9237 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9238 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9239 && TREE_CODE (arg1) == INTEGER_CST
9240 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9241 return fold_build2 (BIT_IOR_EXPR, type,
9242 fold_build2 (BIT_AND_EXPR, type,
9243 TREE_OPERAND (arg0, 0), arg1),
9244 fold_build2 (BIT_AND_EXPR, type,
9245 TREE_OPERAND (arg0, 1), arg1));
9247 /* (X | Y) & Y is (X, Y). */
9248 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9249 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9250 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9251 /* (X | Y) & X is (Y, X). */
9252 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9253 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9254 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9255 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9256 /* X & (X | Y) is (Y, X). */
9257 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9258 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9259 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9260 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9261 /* X & (Y | X) is (Y, X). */
9262 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9263 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9264 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9265 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9267 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9268 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9269 && integer_onep (TREE_OPERAND (arg0, 1))
9270 && integer_onep (arg1))
9272 tem = TREE_OPERAND (arg0, 0);
9273 return fold_build2 (EQ_EXPR, type,
9274 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9275 build_int_cst (TREE_TYPE (tem), 1)),
9276 build_int_cst (TREE_TYPE (tem), 0));
9278 /* Fold ~X & 1 as (X & 1) == 0. */
9279 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9280 && integer_onep (arg1))
9282 tem = TREE_OPERAND (arg0, 0);
9283 return fold_build2 (EQ_EXPR, type,
9284 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9285 build_int_cst (TREE_TYPE (tem), 1)),
9286 build_int_cst (TREE_TYPE (tem), 0));
9289 /* Fold (X ^ Y) & Y as ~X & Y. */
9290 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9291 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9293 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9294 return fold_build2 (BIT_AND_EXPR, type,
9295 fold_build1 (BIT_NOT_EXPR, type, tem),
9296 fold_convert (type, arg1));
9298 /* Fold (X ^ Y) & X as ~Y & X. */
9299 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9300 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9301 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9303 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9304 return fold_build2 (BIT_AND_EXPR, type,
9305 fold_build1 (BIT_NOT_EXPR, type, tem),
9306 fold_convert (type, arg1));
9308 /* Fold X & (X ^ Y) as X & ~Y. */
9309 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9310 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9312 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9313 return fold_build2 (BIT_AND_EXPR, type,
9314 fold_convert (type, arg0),
9315 fold_build1 (BIT_NOT_EXPR, type, tem));
9317 /* Fold X & (Y ^ X) as ~Y & X. */
9318 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9319 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9320 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9322 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9323 return fold_build2 (BIT_AND_EXPR, type,
9324 fold_build1 (BIT_NOT_EXPR, type, tem),
9325 fold_convert (type, arg0));
9328 t1 = distribute_bit_expr (code, type, arg0, arg1);
9329 if (t1 != NULL_TREE)
9330 return t1;
9331 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9332 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9333 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9335 unsigned int prec
9336 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9338 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9339 && (~TREE_INT_CST_LOW (arg1)
9340 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9341 return fold_convert (type, TREE_OPERAND (arg0, 0));
9344 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9346 This results in more efficient code for machines without a NOR
9347 instruction. Combine will canonicalize to the first form
9348 which will allow use of NOR instructions provided by the
9349 backend if they exist. */
9350 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9351 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9353 return fold_build1 (BIT_NOT_EXPR, type,
9354 build2 (BIT_IOR_EXPR, type,
9355 TREE_OPERAND (arg0, 0),
9356 TREE_OPERAND (arg1, 0)));
9359 goto associate;
9361 case RDIV_EXPR:
9362 /* Don't touch a floating-point divide by zero unless the mode
9363 of the constant can represent infinity. */
9364 if (TREE_CODE (arg1) == REAL_CST
9365 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9366 && real_zerop (arg1))
9367 return NULL_TREE;
9369 /* Optimize A / A to 1.0 if we don't care about
9370 NaNs or Infinities. Skip the transformation
9371 for non-real operands. */
9372 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9373 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9374 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9375 && operand_equal_p (arg0, arg1, 0))
9377 tree r = build_real (TREE_TYPE (arg0), dconst1);
9379 return omit_two_operands (type, r, arg0, arg1);
9382 /* The complex version of the above A / A optimization. */
9383 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9384 && operand_equal_p (arg0, arg1, 0))
9386 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9387 if (! HONOR_NANS (TYPE_MODE (elem_type))
9388 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9390 tree r = build_real (elem_type, dconst1);
9391 /* omit_two_operands will call fold_convert for us. */
9392 return omit_two_operands (type, r, arg0, arg1);
9396 /* (-A) / (-B) -> A / B */
9397 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9398 return fold_build2 (RDIV_EXPR, type,
9399 TREE_OPERAND (arg0, 0),
9400 negate_expr (arg1));
9401 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9402 return fold_build2 (RDIV_EXPR, type,
9403 negate_expr (arg0),
9404 TREE_OPERAND (arg1, 0));
9406 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9407 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9408 && real_onep (arg1))
9409 return non_lvalue (fold_convert (type, arg0));
9411 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9412 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9413 && real_minus_onep (arg1))
9414 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9416 /* If ARG1 is a constant, we can convert this to a multiply by the
9417 reciprocal. This does not have the same rounding properties,
9418 so only do this if -funsafe-math-optimizations. We can actually
9419 always safely do it if ARG1 is a power of two, but it's hard to
9420 tell if it is or not in a portable manner. */
9421 if (TREE_CODE (arg1) == REAL_CST)
9423 if (flag_unsafe_math_optimizations
9424 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9425 arg1, 0)))
9426 return fold_build2 (MULT_EXPR, type, arg0, tem);
9427 /* Find the reciprocal if optimizing and the result is exact. */
9428 if (optimize)
9430 REAL_VALUE_TYPE r;
9431 r = TREE_REAL_CST (arg1);
9432 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9434 tem = build_real (type, r);
9435 return fold_build2 (MULT_EXPR, type,
9436 fold_convert (type, arg0), tem);
9440 /* Convert A/B/C to A/(B*C). */
9441 if (flag_unsafe_math_optimizations
9442 && TREE_CODE (arg0) == RDIV_EXPR)
9443 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9444 fold_build2 (MULT_EXPR, type,
9445 TREE_OPERAND (arg0, 1), arg1));
9447 /* Convert A/(B/C) to (A/B)*C. */
9448 if (flag_unsafe_math_optimizations
9449 && TREE_CODE (arg1) == RDIV_EXPR)
9450 return fold_build2 (MULT_EXPR, type,
9451 fold_build2 (RDIV_EXPR, type, arg0,
9452 TREE_OPERAND (arg1, 0)),
9453 TREE_OPERAND (arg1, 1));
9455 /* Convert C1/(X*C2) into (C1/C2)/X. */
9456 if (flag_unsafe_math_optimizations
9457 && TREE_CODE (arg1) == MULT_EXPR
9458 && TREE_CODE (arg0) == REAL_CST
9459 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9461 tree tem = const_binop (RDIV_EXPR, arg0,
9462 TREE_OPERAND (arg1, 1), 0);
9463 if (tem)
9464 return fold_build2 (RDIV_EXPR, type, tem,
9465 TREE_OPERAND (arg1, 0));
9468 if (flag_unsafe_math_optimizations)
9470 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9471 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9473 /* Optimize sin(x)/cos(x) as tan(x). */
9474 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9475 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9476 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9477 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9478 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9480 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9482 if (tanfn != NULL_TREE)
9483 return build_function_call_expr (tanfn,
9484 TREE_OPERAND (arg0, 1));
9487 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9488 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9489 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9490 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9491 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9492 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9494 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9496 if (tanfn != NULL_TREE)
9498 tree tmp = TREE_OPERAND (arg0, 1);
9499 tmp = build_function_call_expr (tanfn, tmp);
9500 return fold_build2 (RDIV_EXPR, type,
9501 build_real (type, dconst1), tmp);
9505 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9506 NaNs or Infinities. */
9507 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9508 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9509 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9511 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9512 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9514 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9515 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9516 && operand_equal_p (arg00, arg01, 0))
9518 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9520 if (cosfn != NULL_TREE)
9521 return build_function_call_expr (cosfn,
9522 TREE_OPERAND (arg0, 1));
9526 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9527 NaNs or Infinities. */
9528 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9529 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9530 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9532 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9533 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9535 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9536 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9537 && operand_equal_p (arg00, arg01, 0))
9539 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9541 if (cosfn != NULL_TREE)
9543 tree tmp = TREE_OPERAND (arg0, 1);
9544 tmp = build_function_call_expr (cosfn, tmp);
9545 return fold_build2 (RDIV_EXPR, type,
9546 build_real (type, dconst1),
9547 tmp);
9552 /* Optimize pow(x,c)/x as pow(x,c-1). */
9553 if (fcode0 == BUILT_IN_POW
9554 || fcode0 == BUILT_IN_POWF
9555 || fcode0 == BUILT_IN_POWL)
9557 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9558 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9559 if (TREE_CODE (arg01) == REAL_CST
9560 && ! TREE_CONSTANT_OVERFLOW (arg01)
9561 && operand_equal_p (arg1, arg00, 0))
9563 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9564 REAL_VALUE_TYPE c;
9565 tree arg, arglist;
9567 c = TREE_REAL_CST (arg01);
9568 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9569 arg = build_real (type, c);
9570 arglist = build_tree_list (NULL_TREE, arg);
9571 arglist = tree_cons (NULL_TREE, arg1, arglist);
9572 return build_function_call_expr (powfn, arglist);
9576 /* Optimize x/expN(y) into x*expN(-y). */
9577 if (BUILTIN_EXPONENT_P (fcode1))
9579 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9580 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
9581 tree arglist = build_tree_list (NULL_TREE,
9582 fold_convert (type, arg));
9583 arg1 = build_function_call_expr (expfn, arglist);
9584 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9587 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9588 if (fcode1 == BUILT_IN_POW
9589 || fcode1 == BUILT_IN_POWF
9590 || fcode1 == BUILT_IN_POWL)
9592 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9593 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9594 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
9595 tree neg11 = fold_convert (type, negate_expr (arg11));
9596 tree arglist = tree_cons(NULL_TREE, arg10,
9597 build_tree_list (NULL_TREE, neg11));
9598 arg1 = build_function_call_expr (powfn, arglist);
9599 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9602 return NULL_TREE;
9604 case TRUNC_DIV_EXPR:
9605 case FLOOR_DIV_EXPR:
9606 /* Simplify A / (B << N) where A and B are positive and B is
9607 a power of 2, to A >> (N + log2(B)). */
9608 if (TREE_CODE (arg1) == LSHIFT_EXPR
9609 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9611 tree sval = TREE_OPERAND (arg1, 0);
9612 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
9614 tree sh_cnt = TREE_OPERAND (arg1, 1);
9615 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
9617 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
9618 sh_cnt, build_int_cst (NULL_TREE, pow2));
9619 return fold_build2 (RSHIFT_EXPR, type,
9620 fold_convert (type, arg0), sh_cnt);
9623 /* Fall thru */
9625 case ROUND_DIV_EXPR:
9626 case CEIL_DIV_EXPR:
9627 case EXACT_DIV_EXPR:
9628 if (integer_onep (arg1))
9629 return non_lvalue (fold_convert (type, arg0));
9630 if (integer_zerop (arg1))
9631 return NULL_TREE;
9632 /* X / -1 is -X. */
9633 if (!TYPE_UNSIGNED (type)
9634 && TREE_CODE (arg1) == INTEGER_CST
9635 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9636 && TREE_INT_CST_HIGH (arg1) == -1)
9637 return fold_convert (type, negate_expr (arg0));
9639 /* Convert -A / -B to A / B when the type is signed and overflow is
9640 undefined. */
9641 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9642 && TREE_CODE (arg0) == NEGATE_EXPR
9643 && negate_expr_p (arg1))
9644 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9645 negate_expr (arg1));
9646 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9647 && TREE_CODE (arg1) == NEGATE_EXPR
9648 && negate_expr_p (arg0))
9649 return fold_build2 (code, type, negate_expr (arg0),
9650 TREE_OPERAND (arg1, 0));
9652 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
9653 operation, EXACT_DIV_EXPR.
9655 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
9656 At one time others generated faster code, it's not clear if they do
9657 after the last round to changes to the DIV code in expmed.c. */
9658 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
9659 && multiple_of_p (type, arg0, arg1))
9660 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
9662 if (TREE_CODE (arg1) == INTEGER_CST
9663 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9664 return fold_convert (type, tem);
9666 return NULL_TREE;
9668 case CEIL_MOD_EXPR:
9669 case FLOOR_MOD_EXPR:
9670 case ROUND_MOD_EXPR:
9671 case TRUNC_MOD_EXPR:
9672 /* X % 1 is always zero, but be sure to preserve any side
9673 effects in X. */
9674 if (integer_onep (arg1))
9675 return omit_one_operand (type, integer_zero_node, arg0);
9677 /* X % 0, return X % 0 unchanged so that we can get the
9678 proper warnings and errors. */
9679 if (integer_zerop (arg1))
9680 return NULL_TREE;
9682 /* 0 % X is always zero, but be sure to preserve any side
9683 effects in X. Place this after checking for X == 0. */
9684 if (integer_zerop (arg0))
9685 return omit_one_operand (type, integer_zero_node, arg1);
9687 /* X % -1 is zero. */
9688 if (!TYPE_UNSIGNED (type)
9689 && TREE_CODE (arg1) == INTEGER_CST
9690 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9691 && TREE_INT_CST_HIGH (arg1) == -1)
9692 return omit_one_operand (type, integer_zero_node, arg0);
9694 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
9695 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
9696 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
9697 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9699 tree c = arg1;
9700 /* Also optimize A % (C << N) where C is a power of 2,
9701 to A & ((C << N) - 1). */
9702 if (TREE_CODE (arg1) == LSHIFT_EXPR)
9703 c = TREE_OPERAND (arg1, 0);
9705 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
9707 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
9708 arg1, integer_one_node);
9709 return fold_build2 (BIT_AND_EXPR, type,
9710 fold_convert (type, arg0),
9711 fold_convert (type, mask));
9715 /* X % -C is the same as X % C. */
9716 if (code == TRUNC_MOD_EXPR
9717 && !TYPE_UNSIGNED (type)
9718 && TREE_CODE (arg1) == INTEGER_CST
9719 && !TREE_CONSTANT_OVERFLOW (arg1)
9720 && TREE_INT_CST_HIGH (arg1) < 0
9721 && !flag_trapv
9722 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
9723 && !sign_bit_p (arg1, arg1))
9724 return fold_build2 (code, type, fold_convert (type, arg0),
9725 fold_convert (type, negate_expr (arg1)));
9727 /* X % -Y is the same as X % Y. */
9728 if (code == TRUNC_MOD_EXPR
9729 && !TYPE_UNSIGNED (type)
9730 && TREE_CODE (arg1) == NEGATE_EXPR
9731 && !flag_trapv)
9732 return fold_build2 (code, type, fold_convert (type, arg0),
9733 fold_convert (type, TREE_OPERAND (arg1, 0)));
9735 if (TREE_CODE (arg1) == INTEGER_CST
9736 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9737 return fold_convert (type, tem);
9739 return NULL_TREE;
9741 case LROTATE_EXPR:
9742 case RROTATE_EXPR:
9743 if (integer_all_onesp (arg0))
9744 return omit_one_operand (type, arg0, arg1);
9745 goto shift;
9747 case RSHIFT_EXPR:
9748 /* Optimize -1 >> x for arithmetic right shifts. */
9749 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
9750 return omit_one_operand (type, arg0, arg1);
9751 /* ... fall through ... */
9753 case LSHIFT_EXPR:
9754 shift:
9755 if (integer_zerop (arg1))
9756 return non_lvalue (fold_convert (type, arg0));
9757 if (integer_zerop (arg0))
9758 return omit_one_operand (type, arg0, arg1);
9760 /* Since negative shift count is not well-defined,
9761 don't try to compute it in the compiler. */
9762 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
9763 return NULL_TREE;
9765 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
9766 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
9767 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9768 && host_integerp (TREE_OPERAND (arg0, 1), false)
9769 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9771 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
9772 + TREE_INT_CST_LOW (arg1));
9774 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
9775 being well defined. */
9776 if (low >= TYPE_PRECISION (type))
9778 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
9779 low = low % TYPE_PRECISION (type);
9780 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
9781 return build_int_cst (type, 0);
9782 else
9783 low = TYPE_PRECISION (type) - 1;
9786 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9787 build_int_cst (type, low));
9790 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
9791 into x & ((unsigned)-1 >> c) for unsigned types. */
9792 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
9793 || (TYPE_UNSIGNED (type)
9794 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
9795 && host_integerp (arg1, false)
9796 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9797 && host_integerp (TREE_OPERAND (arg0, 1), false)
9798 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9800 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9801 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
9802 tree lshift;
9803 tree arg00;
9805 if (low0 == low1)
9807 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9809 lshift = build_int_cst (type, -1);
9810 lshift = int_const_binop (code, lshift, arg1, 0);
9812 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
9816 /* Rewrite an LROTATE_EXPR by a constant into an
9817 RROTATE_EXPR by a new constant. */
9818 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
9820 tree tem = build_int_cst (NULL_TREE,
9821 GET_MODE_BITSIZE (TYPE_MODE (type)));
9822 tem = fold_convert (TREE_TYPE (arg1), tem);
9823 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
9824 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
9827 /* If we have a rotate of a bit operation with the rotate count and
9828 the second operand of the bit operation both constant,
9829 permute the two operations. */
9830 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9831 && (TREE_CODE (arg0) == BIT_AND_EXPR
9832 || TREE_CODE (arg0) == BIT_IOR_EXPR
9833 || TREE_CODE (arg0) == BIT_XOR_EXPR)
9834 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9835 return fold_build2 (TREE_CODE (arg0), type,
9836 fold_build2 (code, type,
9837 TREE_OPERAND (arg0, 0), arg1),
9838 fold_build2 (code, type,
9839 TREE_OPERAND (arg0, 1), arg1));
9841 /* Two consecutive rotates adding up to the width of the mode can
9842 be ignored. */
9843 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9844 && TREE_CODE (arg0) == RROTATE_EXPR
9845 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9846 && TREE_INT_CST_HIGH (arg1) == 0
9847 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
9848 && ((TREE_INT_CST_LOW (arg1)
9849 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
9850 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
9851 return TREE_OPERAND (arg0, 0);
9853 return NULL_TREE;
9855 case MIN_EXPR:
9856 if (operand_equal_p (arg0, arg1, 0))
9857 return omit_one_operand (type, arg0, arg1);
9858 if (INTEGRAL_TYPE_P (type)
9859 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9860 return omit_one_operand (type, arg1, arg0);
9861 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
9862 if (tem)
9863 return tem;
9864 goto associate;
9866 case MAX_EXPR:
9867 if (operand_equal_p (arg0, arg1, 0))
9868 return omit_one_operand (type, arg0, arg1);
9869 if (INTEGRAL_TYPE_P (type)
9870 && TYPE_MAX_VALUE (type)
9871 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
9872 return omit_one_operand (type, arg1, arg0);
9873 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
9874 if (tem)
9875 return tem;
9876 goto associate;
9878 case TRUTH_ANDIF_EXPR:
9879 /* Note that the operands of this must be ints
9880 and their values must be 0 or 1.
9881 ("true" is a fixed value perhaps depending on the language.) */
9882 /* If first arg is constant zero, return it. */
9883 if (integer_zerop (arg0))
9884 return fold_convert (type, arg0);
9885 case TRUTH_AND_EXPR:
9886 /* If either arg is constant true, drop it. */
9887 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
9888 return non_lvalue (fold_convert (type, arg1));
9889 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
9890 /* Preserve sequence points. */
9891 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
9892 return non_lvalue (fold_convert (type, arg0));
9893 /* If second arg is constant zero, result is zero, but first arg
9894 must be evaluated. */
9895 if (integer_zerop (arg1))
9896 return omit_one_operand (type, arg1, arg0);
9897 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
9898 case will be handled here. */
9899 if (integer_zerop (arg0))
9900 return omit_one_operand (type, arg0, arg1);
9902 /* !X && X is always false. */
9903 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9904 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9905 return omit_one_operand (type, integer_zero_node, arg1);
9906 /* X && !X is always false. */
9907 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
9908 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9909 return omit_one_operand (type, integer_zero_node, arg0);
9911 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
9912 means A >= Y && A != MAX, but in this case we know that
9913 A < X <= MAX. */
9915 if (!TREE_SIDE_EFFECTS (arg0)
9916 && !TREE_SIDE_EFFECTS (arg1))
9918 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
9919 if (tem && !operand_equal_p (tem, arg0, 0))
9920 return fold_build2 (code, type, tem, arg1);
9922 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
9923 if (tem && !operand_equal_p (tem, arg1, 0))
9924 return fold_build2 (code, type, arg0, tem);
9927 truth_andor:
9928 /* We only do these simplifications if we are optimizing. */
9929 if (!optimize)
9930 return NULL_TREE;
9932 /* Check for things like (A || B) && (A || C). We can convert this
9933 to A || (B && C). Note that either operator can be any of the four
9934 truth and/or operations and the transformation will still be
9935 valid. Also note that we only care about order for the
9936 ANDIF and ORIF operators. If B contains side effects, this
9937 might change the truth-value of A. */
9938 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9939 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9940 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9941 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9942 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9943 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9945 tree a00 = TREE_OPERAND (arg0, 0);
9946 tree a01 = TREE_OPERAND (arg0, 1);
9947 tree a10 = TREE_OPERAND (arg1, 0);
9948 tree a11 = TREE_OPERAND (arg1, 1);
9949 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9950 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9951 && (code == TRUTH_AND_EXPR
9952 || code == TRUTH_OR_EXPR));
9954 if (operand_equal_p (a00, a10, 0))
9955 return fold_build2 (TREE_CODE (arg0), type, a00,
9956 fold_build2 (code, type, a01, a11));
9957 else if (commutative && operand_equal_p (a00, a11, 0))
9958 return fold_build2 (TREE_CODE (arg0), type, a00,
9959 fold_build2 (code, type, a01, a10));
9960 else if (commutative && operand_equal_p (a01, a10, 0))
9961 return fold_build2 (TREE_CODE (arg0), type, a01,
9962 fold_build2 (code, type, a00, a11));
9964 /* This case if tricky because we must either have commutative
9965 operators or else A10 must not have side-effects. */
9967 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9968 && operand_equal_p (a01, a11, 0))
9969 return fold_build2 (TREE_CODE (arg0), type,
9970 fold_build2 (code, type, a00, a10),
9971 a01);
9974 /* See if we can build a range comparison. */
9975 if (0 != (tem = fold_range_test (code, type, op0, op1)))
9976 return tem;
9978 /* Check for the possibility of merging component references. If our
9979 lhs is another similar operation, try to merge its rhs with our
9980 rhs. Then try to merge our lhs and rhs. */
9981 if (TREE_CODE (arg0) == code
9982 && 0 != (tem = fold_truthop (code, type,
9983 TREE_OPERAND (arg0, 1), arg1)))
9984 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9986 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
9987 return tem;
9989 return NULL_TREE;
9991 case TRUTH_ORIF_EXPR:
9992 /* Note that the operands of this must be ints
9993 and their values must be 0 or true.
9994 ("true" is a fixed value perhaps depending on the language.) */
9995 /* If first arg is constant true, return it. */
9996 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
9997 return fold_convert (type, arg0);
9998 case TRUTH_OR_EXPR:
9999 /* If either arg is constant zero, drop it. */
10000 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10001 return non_lvalue (fold_convert (type, arg1));
10002 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10003 /* Preserve sequence points. */
10004 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10005 return non_lvalue (fold_convert (type, arg0));
10006 /* If second arg is constant true, result is true, but we must
10007 evaluate first arg. */
10008 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10009 return omit_one_operand (type, arg1, arg0);
10010 /* Likewise for first arg, but note this only occurs here for
10011 TRUTH_OR_EXPR. */
10012 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10013 return omit_one_operand (type, arg0, arg1);
10015 /* !X || X is always true. */
10016 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10017 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10018 return omit_one_operand (type, integer_one_node, arg1);
10019 /* X || !X is always true. */
10020 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10021 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10022 return omit_one_operand (type, integer_one_node, arg0);
10024 goto truth_andor;
10026 case TRUTH_XOR_EXPR:
10027 /* If the second arg is constant zero, drop it. */
10028 if (integer_zerop (arg1))
10029 return non_lvalue (fold_convert (type, arg0));
10030 /* If the second arg is constant true, this is a logical inversion. */
10031 if (integer_onep (arg1))
10033 /* Only call invert_truthvalue if operand is a truth value. */
10034 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10035 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10036 else
10037 tem = invert_truthvalue (arg0);
10038 return non_lvalue (fold_convert (type, tem));
10040 /* Identical arguments cancel to zero. */
10041 if (operand_equal_p (arg0, arg1, 0))
10042 return omit_one_operand (type, integer_zero_node, arg0);
10044 /* !X ^ X is always true. */
10045 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10046 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10047 return omit_one_operand (type, integer_one_node, arg1);
10049 /* X ^ !X is always true. */
10050 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10051 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10052 return omit_one_operand (type, integer_one_node, arg0);
10054 return NULL_TREE;
10056 case EQ_EXPR:
10057 case NE_EXPR:
10058 tem = fold_comparison (code, type, op0, op1);
10059 if (tem != NULL_TREE)
10060 return tem;
10062 /* bool_var != 0 becomes bool_var. */
10063 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10064 && code == NE_EXPR)
10065 return non_lvalue (fold_convert (type, arg0));
10067 /* bool_var == 1 becomes bool_var. */
10068 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10069 && code == EQ_EXPR)
10070 return non_lvalue (fold_convert (type, arg0));
10072 /* bool_var != 1 becomes !bool_var. */
10073 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10074 && code == NE_EXPR)
10075 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10077 /* bool_var == 0 becomes !bool_var. */
10078 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10079 && code == EQ_EXPR)
10080 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10082 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
10083 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10084 && TREE_CODE (arg1) == INTEGER_CST)
10085 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10086 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10087 arg1));
10089 /* If this is an equality comparison of the address of a non-weak
10090 object against zero, then we know the result. */
10091 if (TREE_CODE (arg0) == ADDR_EXPR
10092 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10093 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10094 && integer_zerop (arg1))
10095 return constant_boolean_node (code != EQ_EXPR, type);
10097 /* If this is an equality comparison of the address of two non-weak,
10098 unaliased symbols neither of which are extern (since we do not
10099 have access to attributes for externs), then we know the result. */
10100 if (TREE_CODE (arg0) == ADDR_EXPR
10101 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10102 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10103 && ! lookup_attribute ("alias",
10104 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10105 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10106 && TREE_CODE (arg1) == ADDR_EXPR
10107 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10108 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10109 && ! lookup_attribute ("alias",
10110 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10111 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10113 /* We know that we're looking at the address of two
10114 non-weak, unaliased, static _DECL nodes.
10116 It is both wasteful and incorrect to call operand_equal_p
10117 to compare the two ADDR_EXPR nodes. It is wasteful in that
10118 all we need to do is test pointer equality for the arguments
10119 to the two ADDR_EXPR nodes. It is incorrect to use
10120 operand_equal_p as that function is NOT equivalent to a
10121 C equality test. It can in fact return false for two
10122 objects which would test as equal using the C equality
10123 operator. */
10124 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10125 return constant_boolean_node (equal
10126 ? code == EQ_EXPR : code != EQ_EXPR,
10127 type);
10130 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10131 a MINUS_EXPR of a constant, we can convert it into a comparison with
10132 a revised constant as long as no overflow occurs. */
10133 if (TREE_CODE (arg1) == INTEGER_CST
10134 && (TREE_CODE (arg0) == PLUS_EXPR
10135 || TREE_CODE (arg0) == MINUS_EXPR)
10136 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10137 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10138 ? MINUS_EXPR : PLUS_EXPR,
10139 arg1, TREE_OPERAND (arg0, 1), 0))
10140 && ! TREE_CONSTANT_OVERFLOW (tem))
10141 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10143 /* Similarly for a NEGATE_EXPR. */
10144 if (TREE_CODE (arg0) == NEGATE_EXPR
10145 && TREE_CODE (arg1) == INTEGER_CST
10146 && 0 != (tem = negate_expr (arg1))
10147 && TREE_CODE (tem) == INTEGER_CST
10148 && ! TREE_CONSTANT_OVERFLOW (tem))
10149 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10151 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10152 for !=. Don't do this for ordered comparisons due to overflow. */
10153 if (TREE_CODE (arg0) == MINUS_EXPR
10154 && integer_zerop (arg1))
10155 return fold_build2 (code, type,
10156 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10158 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10159 if (TREE_CODE (arg0) == ABS_EXPR
10160 && (integer_zerop (arg1) || real_zerop (arg1)))
10161 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10163 /* If this is an EQ or NE comparison with zero and ARG0 is
10164 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10165 two operations, but the latter can be done in one less insn
10166 on machines that have only two-operand insns or on which a
10167 constant cannot be the first operand. */
10168 if (TREE_CODE (arg0) == BIT_AND_EXPR
10169 && integer_zerop (arg1))
10171 tree arg00 = TREE_OPERAND (arg0, 0);
10172 tree arg01 = TREE_OPERAND (arg0, 1);
10173 if (TREE_CODE (arg00) == LSHIFT_EXPR
10174 && integer_onep (TREE_OPERAND (arg00, 0)))
10175 return
10176 fold_build2 (code, type,
10177 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10178 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10179 arg01, TREE_OPERAND (arg00, 1)),
10180 fold_convert (TREE_TYPE (arg0),
10181 integer_one_node)),
10182 arg1);
10183 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10184 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10185 return
10186 fold_build2 (code, type,
10187 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10188 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10189 arg00, TREE_OPERAND (arg01, 1)),
10190 fold_convert (TREE_TYPE (arg0),
10191 integer_one_node)),
10192 arg1);
10195 /* If this is an NE or EQ comparison of zero against the result of a
10196 signed MOD operation whose second operand is a power of 2, make
10197 the MOD operation unsigned since it is simpler and equivalent. */
10198 if (integer_zerop (arg1)
10199 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10200 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10201 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10202 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10203 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10204 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10206 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10207 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10208 fold_convert (newtype,
10209 TREE_OPERAND (arg0, 0)),
10210 fold_convert (newtype,
10211 TREE_OPERAND (arg0, 1)));
10213 return fold_build2 (code, type, newmod,
10214 fold_convert (newtype, arg1));
10217 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10218 C1 is a valid shift constant, and C2 is a power of two, i.e.
10219 a single bit. */
10220 if (TREE_CODE (arg0) == BIT_AND_EXPR
10221 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10222 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10223 == INTEGER_CST
10224 && integer_pow2p (TREE_OPERAND (arg0, 1))
10225 && integer_zerop (arg1))
10227 tree itype = TREE_TYPE (arg0);
10228 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10229 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10231 /* Check for a valid shift count. */
10232 if (TREE_INT_CST_HIGH (arg001) == 0
10233 && TREE_INT_CST_LOW (arg001) < prec)
10235 tree arg01 = TREE_OPERAND (arg0, 1);
10236 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10237 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10238 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10239 can be rewritten as (X & (C2 << C1)) != 0. */
10240 if ((log2 + TREE_INT_CST_LOW (arg01)) < prec)
10242 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10243 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10244 return fold_build2 (code, type, tem, arg1);
10246 /* Otherwise, for signed (arithmetic) shifts,
10247 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10248 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10249 else if (!TYPE_UNSIGNED (itype))
10250 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10251 arg000, build_int_cst (itype, 0));
10252 /* Otherwise, of unsigned (logical) shifts,
10253 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10254 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10255 else
10256 return omit_one_operand (type,
10257 code == EQ_EXPR ? integer_one_node
10258 : integer_zero_node,
10259 arg000);
10263 /* If this is an NE comparison of zero with an AND of one, remove the
10264 comparison since the AND will give the correct value. */
10265 if (code == NE_EXPR
10266 && integer_zerop (arg1)
10267 && TREE_CODE (arg0) == BIT_AND_EXPR
10268 && integer_onep (TREE_OPERAND (arg0, 1)))
10269 return fold_convert (type, arg0);
10271 /* If we have (A & C) == C where C is a power of 2, convert this into
10272 (A & C) != 0. Similarly for NE_EXPR. */
10273 if (TREE_CODE (arg0) == BIT_AND_EXPR
10274 && integer_pow2p (TREE_OPERAND (arg0, 1))
10275 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10276 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10277 arg0, fold_convert (TREE_TYPE (arg0),
10278 integer_zero_node));
10280 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10281 bit, then fold the expression into A < 0 or A >= 0. */
10282 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10283 if (tem)
10284 return tem;
10286 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10287 Similarly for NE_EXPR. */
10288 if (TREE_CODE (arg0) == BIT_AND_EXPR
10289 && TREE_CODE (arg1) == INTEGER_CST
10290 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10292 tree notc = fold_build1 (BIT_NOT_EXPR,
10293 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10294 TREE_OPERAND (arg0, 1));
10295 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10296 arg1, notc);
10297 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10298 if (integer_nonzerop (dandnotc))
10299 return omit_one_operand (type, rslt, arg0);
10302 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10303 Similarly for NE_EXPR. */
10304 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10305 && TREE_CODE (arg1) == INTEGER_CST
10306 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10308 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10309 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10310 TREE_OPERAND (arg0, 1), notd);
10311 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10312 if (integer_nonzerop (candnotd))
10313 return omit_one_operand (type, rslt, arg0);
10316 /* If this is a comparison of a field, we may be able to simplify it. */
10317 if (((TREE_CODE (arg0) == COMPONENT_REF
10318 && lang_hooks.can_use_bit_fields_p ())
10319 || TREE_CODE (arg0) == BIT_FIELD_REF)
10320 /* Handle the constant case even without -O
10321 to make sure the warnings are given. */
10322 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10324 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10325 if (t1)
10326 return t1;
10329 /* Optimize comparisons of strlen vs zero to a compare of the
10330 first character of the string vs zero. To wit,
10331 strlen(ptr) == 0 => *ptr == 0
10332 strlen(ptr) != 0 => *ptr != 0
10333 Other cases should reduce to one of these two (or a constant)
10334 due to the return value of strlen being unsigned. */
10335 if (TREE_CODE (arg0) == CALL_EXPR
10336 && integer_zerop (arg1))
10338 tree fndecl = get_callee_fndecl (arg0);
10339 tree arglist;
10341 if (fndecl
10342 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10343 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10344 && (arglist = TREE_OPERAND (arg0, 1))
10345 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10346 && ! TREE_CHAIN (arglist))
10348 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10349 return fold_build2 (code, type, iref,
10350 build_int_cst (TREE_TYPE (iref), 0));
10354 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10355 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10356 if (TREE_CODE (arg0) == RSHIFT_EXPR
10357 && integer_zerop (arg1)
10358 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10360 tree arg00 = TREE_OPERAND (arg0, 0);
10361 tree arg01 = TREE_OPERAND (arg0, 1);
10362 tree itype = TREE_TYPE (arg00);
10363 if (TREE_INT_CST_HIGH (arg01) == 0
10364 && TREE_INT_CST_LOW (arg01)
10365 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10367 if (TYPE_UNSIGNED (itype))
10369 itype = lang_hooks.types.signed_type (itype);
10370 arg00 = fold_convert (itype, arg00);
10372 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10373 type, arg00, build_int_cst (itype, 0));
10377 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10378 if (integer_zerop (arg1)
10379 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10380 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10381 TREE_OPERAND (arg0, 1));
10383 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10384 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10385 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10386 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10387 build_int_cst (TREE_TYPE (arg1), 0));
10388 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10389 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10390 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10391 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10392 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10393 build_int_cst (TREE_TYPE (arg1), 0));
10395 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10396 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10397 && TREE_CODE (arg1) == INTEGER_CST
10398 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10399 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10400 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10401 TREE_OPERAND (arg0, 1), arg1));
10403 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10404 (X & C) == 0 when C is a single bit. */
10405 if (TREE_CODE (arg0) == BIT_AND_EXPR
10406 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10407 && integer_zerop (arg1)
10408 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10410 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10411 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10412 TREE_OPERAND (arg0, 1));
10413 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10414 type, tem, arg1);
10417 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10418 constant C is a power of two, i.e. a single bit. */
10419 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10420 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10421 && integer_zerop (arg1)
10422 && integer_pow2p (TREE_OPERAND (arg0, 1))
10423 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10424 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10426 tree arg00 = TREE_OPERAND (arg0, 0);
10427 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10428 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10431 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10432 when is C is a power of two, i.e. a single bit. */
10433 if (TREE_CODE (arg0) == BIT_AND_EXPR
10434 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10435 && integer_zerop (arg1)
10436 && integer_pow2p (TREE_OPERAND (arg0, 1))
10437 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10438 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10440 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10441 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10442 arg000, TREE_OPERAND (arg0, 1));
10443 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10444 tem, build_int_cst (TREE_TYPE (tem), 0));
10447 /* If this is a comparison of two exprs that look like an
10448 ARRAY_REF of the same object, then we can fold this to a
10449 comparison of the two offsets. This is only safe for
10450 EQ_EXPR and NE_EXPR because of overflow issues. */
10452 tree base0, offset0, base1, offset1;
10454 if (extract_array_ref (arg0, &base0, &offset0)
10455 && extract_array_ref (arg1, &base1, &offset1)
10456 && operand_equal_p (base0, base1, 0))
10458 /* Handle no offsets on both sides specially. */
10459 if (offset0 == NULL_TREE && offset1 == NULL_TREE)
10460 return fold_build2 (code, type, integer_zero_node,
10461 integer_zero_node);
10463 if (!offset0 || !offset1
10464 || TREE_TYPE (offset0) == TREE_TYPE (offset1))
10466 if (offset0 == NULL_TREE)
10467 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
10468 if (offset1 == NULL_TREE)
10469 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
10470 return fold_build2 (code, type, offset0, offset1);
10475 if (integer_zerop (arg1)
10476 && tree_expr_nonzero_p (arg0))
10478 tree res = constant_boolean_node (code==NE_EXPR, type);
10479 return omit_one_operand (type, res, arg0);
10481 return NULL_TREE;
10483 case LT_EXPR:
10484 case GT_EXPR:
10485 case LE_EXPR:
10486 case GE_EXPR:
10487 tem = fold_comparison (code, type, op0, op1);
10488 if (tem != NULL_TREE)
10489 return tem;
10491 /* Transform comparisons of the form X +- C CMP X. */
10492 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10493 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10494 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10495 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10496 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10497 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
10498 && !(flag_wrapv || flag_trapv))))
10500 tree arg01 = TREE_OPERAND (arg0, 1);
10501 enum tree_code code0 = TREE_CODE (arg0);
10502 int is_positive;
10504 if (TREE_CODE (arg01) == REAL_CST)
10505 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10506 else
10507 is_positive = tree_int_cst_sgn (arg01);
10509 /* (X - c) > X becomes false. */
10510 if (code == GT_EXPR
10511 && ((code0 == MINUS_EXPR && is_positive >= 0)
10512 || (code0 == PLUS_EXPR && is_positive <= 0)))
10513 return constant_boolean_node (0, type);
10515 /* Likewise (X + c) < X becomes false. */
10516 if (code == LT_EXPR
10517 && ((code0 == PLUS_EXPR && is_positive >= 0)
10518 || (code0 == MINUS_EXPR && is_positive <= 0)))
10519 return constant_boolean_node (0, type);
10521 /* Convert (X - c) <= X to true. */
10522 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10523 && code == LE_EXPR
10524 && ((code0 == MINUS_EXPR && is_positive >= 0)
10525 || (code0 == PLUS_EXPR && is_positive <= 0)))
10526 return constant_boolean_node (1, type);
10528 /* Convert (X + c) >= X to true. */
10529 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10530 && code == GE_EXPR
10531 && ((code0 == PLUS_EXPR && is_positive >= 0)
10532 || (code0 == MINUS_EXPR && is_positive <= 0)))
10533 return constant_boolean_node (1, type);
10535 if (TREE_CODE (arg01) == INTEGER_CST)
10537 /* Convert X + c > X and X - c < X to true for integers. */
10538 if (code == GT_EXPR
10539 && ((code0 == PLUS_EXPR && is_positive > 0)
10540 || (code0 == MINUS_EXPR && is_positive < 0)))
10541 return constant_boolean_node (1, type);
10543 if (code == LT_EXPR
10544 && ((code0 == MINUS_EXPR && is_positive > 0)
10545 || (code0 == PLUS_EXPR && is_positive < 0)))
10546 return constant_boolean_node (1, type);
10548 /* Convert X + c <= X and X - c >= X to false for integers. */
10549 if (code == LE_EXPR
10550 && ((code0 == PLUS_EXPR && is_positive > 0)
10551 || (code0 == MINUS_EXPR && is_positive < 0)))
10552 return constant_boolean_node (0, type);
10554 if (code == GE_EXPR
10555 && ((code0 == MINUS_EXPR && is_positive > 0)
10556 || (code0 == PLUS_EXPR && is_positive < 0)))
10557 return constant_boolean_node (0, type);
10561 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10562 This transformation affects the cases which are handled in later
10563 optimizations involving comparisons with non-negative constants. */
10564 if (TREE_CODE (arg1) == INTEGER_CST
10565 && TREE_CODE (arg0) != INTEGER_CST
10566 && tree_int_cst_sgn (arg1) > 0)
10568 if (code == GE_EXPR)
10570 arg1 = const_binop (MINUS_EXPR, arg1,
10571 build_int_cst (TREE_TYPE (arg1), 1), 0);
10572 return fold_build2 (GT_EXPR, type, arg0,
10573 fold_convert (TREE_TYPE (arg0), arg1));
10575 if (code == LT_EXPR)
10577 arg1 = const_binop (MINUS_EXPR, arg1,
10578 build_int_cst (TREE_TYPE (arg1), 1), 0);
10579 return fold_build2 (LE_EXPR, type, arg0,
10580 fold_convert (TREE_TYPE (arg0), arg1));
10584 /* Comparisons with the highest or lowest possible integer of
10585 the specified size will have known values. */
10587 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
10589 if (TREE_CODE (arg1) == INTEGER_CST
10590 && ! TREE_CONSTANT_OVERFLOW (arg1)
10591 && width <= 2 * HOST_BITS_PER_WIDE_INT
10592 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10593 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10595 HOST_WIDE_INT signed_max_hi;
10596 unsigned HOST_WIDE_INT signed_max_lo;
10597 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
10599 if (width <= HOST_BITS_PER_WIDE_INT)
10601 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10602 - 1;
10603 signed_max_hi = 0;
10604 max_hi = 0;
10606 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10608 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10609 min_lo = 0;
10610 min_hi = 0;
10612 else
10614 max_lo = signed_max_lo;
10615 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10616 min_hi = -1;
10619 else
10621 width -= HOST_BITS_PER_WIDE_INT;
10622 signed_max_lo = -1;
10623 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10624 - 1;
10625 max_lo = -1;
10626 min_lo = 0;
10628 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10630 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10631 min_hi = 0;
10633 else
10635 max_hi = signed_max_hi;
10636 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10640 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
10641 && TREE_INT_CST_LOW (arg1) == max_lo)
10642 switch (code)
10644 case GT_EXPR:
10645 return omit_one_operand (type, integer_zero_node, arg0);
10647 case GE_EXPR:
10648 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10650 case LE_EXPR:
10651 return omit_one_operand (type, integer_one_node, arg0);
10653 case LT_EXPR:
10654 return fold_build2 (NE_EXPR, type, arg0, arg1);
10656 /* The GE_EXPR and LT_EXPR cases above are not normally
10657 reached because of previous transformations. */
10659 default:
10660 break;
10662 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10663 == max_hi
10664 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
10665 switch (code)
10667 case GT_EXPR:
10668 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10669 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10670 case LE_EXPR:
10671 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10672 return fold_build2 (NE_EXPR, type, arg0, arg1);
10673 default:
10674 break;
10676 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10677 == min_hi
10678 && TREE_INT_CST_LOW (arg1) == min_lo)
10679 switch (code)
10681 case LT_EXPR:
10682 return omit_one_operand (type, integer_zero_node, arg0);
10684 case LE_EXPR:
10685 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10687 case GE_EXPR:
10688 return omit_one_operand (type, integer_one_node, arg0);
10690 case GT_EXPR:
10691 return fold_build2 (NE_EXPR, type, op0, op1);
10693 default:
10694 break;
10696 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10697 == min_hi
10698 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
10699 switch (code)
10701 case GE_EXPR:
10702 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10703 return fold_build2 (NE_EXPR, type, arg0, arg1);
10704 case LT_EXPR:
10705 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10706 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10707 default:
10708 break;
10711 else if (!in_gimple_form
10712 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
10713 && TREE_INT_CST_LOW (arg1) == signed_max_lo
10714 && TYPE_UNSIGNED (TREE_TYPE (arg1))
10715 /* signed_type does not work on pointer types. */
10716 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
10718 /* The following case also applies to X < signed_max+1
10719 and X >= signed_max+1 because previous transformations. */
10720 if (code == LE_EXPR || code == GT_EXPR)
10722 tree st0, st1;
10723 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
10724 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
10725 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10726 type, fold_convert (st0, arg0),
10727 build_int_cst (st1, 0));
10733 /* If we are comparing an ABS_EXPR with a constant, we can
10734 convert all the cases into explicit comparisons, but they may
10735 well not be faster than doing the ABS and one comparison.
10736 But ABS (X) <= C is a range comparison, which becomes a subtraction
10737 and a comparison, and is probably faster. */
10738 if (code == LE_EXPR
10739 && TREE_CODE (arg1) == INTEGER_CST
10740 && TREE_CODE (arg0) == ABS_EXPR
10741 && ! TREE_SIDE_EFFECTS (arg0)
10742 && (0 != (tem = negate_expr (arg1)))
10743 && TREE_CODE (tem) == INTEGER_CST
10744 && ! TREE_CONSTANT_OVERFLOW (tem))
10745 return fold_build2 (TRUTH_ANDIF_EXPR, type,
10746 build2 (GE_EXPR, type,
10747 TREE_OPERAND (arg0, 0), tem),
10748 build2 (LE_EXPR, type,
10749 TREE_OPERAND (arg0, 0), arg1));
10751 /* Convert ABS_EXPR<x> >= 0 to true. */
10752 if (code == GE_EXPR
10753 && tree_expr_nonnegative_p (arg0)
10754 && (integer_zerop (arg1)
10755 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10756 && real_zerop (arg1))))
10757 return omit_one_operand (type, integer_one_node, arg0);
10759 /* Convert ABS_EXPR<x> < 0 to false. */
10760 if (code == LT_EXPR
10761 && tree_expr_nonnegative_p (arg0)
10762 && (integer_zerop (arg1) || real_zerop (arg1)))
10763 return omit_one_operand (type, integer_zero_node, arg0);
10765 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
10766 and similarly for >= into !=. */
10767 if ((code == LT_EXPR || code == GE_EXPR)
10768 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10769 && TREE_CODE (arg1) == LSHIFT_EXPR
10770 && integer_onep (TREE_OPERAND (arg1, 0)))
10771 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10772 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10773 TREE_OPERAND (arg1, 1)),
10774 build_int_cst (TREE_TYPE (arg0), 0));
10776 if ((code == LT_EXPR || code == GE_EXPR)
10777 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10778 && (TREE_CODE (arg1) == NOP_EXPR
10779 || TREE_CODE (arg1) == CONVERT_EXPR)
10780 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
10781 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
10782 return
10783 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10784 fold_convert (TREE_TYPE (arg0),
10785 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10786 TREE_OPERAND (TREE_OPERAND (arg1, 0),
10787 1))),
10788 build_int_cst (TREE_TYPE (arg0), 0));
10790 return NULL_TREE;
10792 case UNORDERED_EXPR:
10793 case ORDERED_EXPR:
10794 case UNLT_EXPR:
10795 case UNLE_EXPR:
10796 case UNGT_EXPR:
10797 case UNGE_EXPR:
10798 case UNEQ_EXPR:
10799 case LTGT_EXPR:
10800 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10802 t1 = fold_relational_const (code, type, arg0, arg1);
10803 if (t1 != NULL_TREE)
10804 return t1;
10807 /* If the first operand is NaN, the result is constant. */
10808 if (TREE_CODE (arg0) == REAL_CST
10809 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
10810 && (code != LTGT_EXPR || ! flag_trapping_math))
10812 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10813 ? integer_zero_node
10814 : integer_one_node;
10815 return omit_one_operand (type, t1, arg1);
10818 /* If the second operand is NaN, the result is constant. */
10819 if (TREE_CODE (arg1) == REAL_CST
10820 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
10821 && (code != LTGT_EXPR || ! flag_trapping_math))
10823 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10824 ? integer_zero_node
10825 : integer_one_node;
10826 return omit_one_operand (type, t1, arg0);
10829 /* Simplify unordered comparison of something with itself. */
10830 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
10831 && operand_equal_p (arg0, arg1, 0))
10832 return constant_boolean_node (1, type);
10834 if (code == LTGT_EXPR
10835 && !flag_trapping_math
10836 && operand_equal_p (arg0, arg1, 0))
10837 return constant_boolean_node (0, type);
10839 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10841 tree targ0 = strip_float_extensions (arg0);
10842 tree targ1 = strip_float_extensions (arg1);
10843 tree newtype = TREE_TYPE (targ0);
10845 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
10846 newtype = TREE_TYPE (targ1);
10848 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
10849 return fold_build2 (code, type, fold_convert (newtype, targ0),
10850 fold_convert (newtype, targ1));
10853 return NULL_TREE;
10855 case COMPOUND_EXPR:
10856 /* When pedantic, a compound expression can be neither an lvalue
10857 nor an integer constant expression. */
10858 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
10859 return NULL_TREE;
10860 /* Don't let (0, 0) be null pointer constant. */
10861 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
10862 : fold_convert (type, arg1);
10863 return pedantic_non_lvalue (tem);
10865 case COMPLEX_EXPR:
10866 if ((TREE_CODE (arg0) == REAL_CST
10867 && TREE_CODE (arg1) == REAL_CST)
10868 || (TREE_CODE (arg0) == INTEGER_CST
10869 && TREE_CODE (arg1) == INTEGER_CST))
10870 return build_complex (type, arg0, arg1);
10871 return NULL_TREE;
10873 case ASSERT_EXPR:
10874 /* An ASSERT_EXPR should never be passed to fold_binary. */
10875 gcc_unreachable ();
10877 default:
10878 return NULL_TREE;
10879 } /* switch (code) */
10882 /* Callback for walk_tree, looking for LABEL_EXPR.
10883 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10884 Do not check the sub-tree of GOTO_EXPR. */
10886 static tree
10887 contains_label_1 (tree *tp,
10888 int *walk_subtrees,
10889 void *data ATTRIBUTE_UNUSED)
10891 switch (TREE_CODE (*tp))
10893 case LABEL_EXPR:
10894 return *tp;
10895 case GOTO_EXPR:
10896 *walk_subtrees = 0;
10897 /* no break */
10898 default:
10899 return NULL_TREE;
10903 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
10904 accessible from outside the sub-tree. Returns NULL_TREE if no
10905 addressable label is found. */
10907 static bool
10908 contains_label_p (tree st)
10910 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
10913 /* Fold a ternary expression of code CODE and type TYPE with operands
10914 OP0, OP1, and OP2. Return the folded expression if folding is
10915 successful. Otherwise, return NULL_TREE. */
10917 tree
10918 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10920 tree tem;
10921 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
10922 enum tree_code_class kind = TREE_CODE_CLASS (code);
10924 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10925 && TREE_CODE_LENGTH (code) == 3);
10927 /* Strip any conversions that don't change the mode. This is safe
10928 for every expression, except for a comparison expression because
10929 its signedness is derived from its operands. So, in the latter
10930 case, only strip conversions that don't change the signedness.
10932 Note that this is done as an internal manipulation within the
10933 constant folder, in order to find the simplest representation of
10934 the arguments so that their form can be studied. In any cases,
10935 the appropriate type conversions should be put back in the tree
10936 that will get out of the constant folder. */
10937 if (op0)
10939 arg0 = op0;
10940 STRIP_NOPS (arg0);
10943 if (op1)
10945 arg1 = op1;
10946 STRIP_NOPS (arg1);
10949 switch (code)
10951 case COMPONENT_REF:
10952 if (TREE_CODE (arg0) == CONSTRUCTOR
10953 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
10955 unsigned HOST_WIDE_INT idx;
10956 tree field, value;
10957 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
10958 if (field == arg1)
10959 return value;
10961 return NULL_TREE;
10963 case COND_EXPR:
10964 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10965 so all simple results must be passed through pedantic_non_lvalue. */
10966 if (TREE_CODE (arg0) == INTEGER_CST)
10968 tree unused_op = integer_zerop (arg0) ? op1 : op2;
10969 tem = integer_zerop (arg0) ? op2 : op1;
10970 /* Only optimize constant conditions when the selected branch
10971 has the same type as the COND_EXPR. This avoids optimizing
10972 away "c ? x : throw", where the throw has a void type.
10973 Avoid throwing away that operand which contains label. */
10974 if ((!TREE_SIDE_EFFECTS (unused_op)
10975 || !contains_label_p (unused_op))
10976 && (! VOID_TYPE_P (TREE_TYPE (tem))
10977 || VOID_TYPE_P (type)))
10978 return pedantic_non_lvalue (tem);
10979 return NULL_TREE;
10981 if (operand_equal_p (arg1, op2, 0))
10982 return pedantic_omit_one_operand (type, arg1, arg0);
10984 /* If we have A op B ? A : C, we may be able to convert this to a
10985 simpler expression, depending on the operation and the values
10986 of B and C. Signed zeros prevent all of these transformations,
10987 for reasons given above each one.
10989 Also try swapping the arguments and inverting the conditional. */
10990 if (COMPARISON_CLASS_P (arg0)
10991 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10992 arg1, TREE_OPERAND (arg0, 1))
10993 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10995 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10996 if (tem)
10997 return tem;
11000 if (COMPARISON_CLASS_P (arg0)
11001 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11002 op2,
11003 TREE_OPERAND (arg0, 1))
11004 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11006 tem = invert_truthvalue (arg0);
11007 if (COMPARISON_CLASS_P (tem))
11009 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11010 if (tem)
11011 return tem;
11015 /* If the second operand is simpler than the third, swap them
11016 since that produces better jump optimization results. */
11017 if (truth_value_p (TREE_CODE (arg0))
11018 && tree_swap_operands_p (op1, op2, false))
11020 /* See if this can be inverted. If it can't, possibly because
11021 it was a floating-point inequality comparison, don't do
11022 anything. */
11023 tem = invert_truthvalue (arg0);
11025 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11026 return fold_build3 (code, type, tem, op2, op1);
11029 /* Convert A ? 1 : 0 to simply A. */
11030 if (integer_onep (op1)
11031 && integer_zerop (op2)
11032 /* If we try to convert OP0 to our type, the
11033 call to fold will try to move the conversion inside
11034 a COND, which will recurse. In that case, the COND_EXPR
11035 is probably the best choice, so leave it alone. */
11036 && type == TREE_TYPE (arg0))
11037 return pedantic_non_lvalue (arg0);
11039 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11040 over COND_EXPR in cases such as floating point comparisons. */
11041 if (integer_zerop (op1)
11042 && integer_onep (op2)
11043 && truth_value_p (TREE_CODE (arg0)))
11044 return pedantic_non_lvalue (fold_convert (type,
11045 invert_truthvalue (arg0)));
11047 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11048 if (TREE_CODE (arg0) == LT_EXPR
11049 && integer_zerop (TREE_OPERAND (arg0, 1))
11050 && integer_zerop (op2)
11051 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11052 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
11053 TREE_TYPE (tem), tem, arg1));
11055 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11056 already handled above. */
11057 if (TREE_CODE (arg0) == BIT_AND_EXPR
11058 && integer_onep (TREE_OPERAND (arg0, 1))
11059 && integer_zerop (op2)
11060 && integer_pow2p (arg1))
11062 tree tem = TREE_OPERAND (arg0, 0);
11063 STRIP_NOPS (tem);
11064 if (TREE_CODE (tem) == RSHIFT_EXPR
11065 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11066 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11067 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11068 return fold_build2 (BIT_AND_EXPR, type,
11069 TREE_OPERAND (tem, 0), arg1);
11072 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11073 is probably obsolete because the first operand should be a
11074 truth value (that's why we have the two cases above), but let's
11075 leave it in until we can confirm this for all front-ends. */
11076 if (integer_zerop (op2)
11077 && TREE_CODE (arg0) == NE_EXPR
11078 && integer_zerop (TREE_OPERAND (arg0, 1))
11079 && integer_pow2p (arg1)
11080 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11081 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11082 arg1, OEP_ONLY_CONST))
11083 return pedantic_non_lvalue (fold_convert (type,
11084 TREE_OPERAND (arg0, 0)));
11086 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11087 if (integer_zerop (op2)
11088 && truth_value_p (TREE_CODE (arg0))
11089 && truth_value_p (TREE_CODE (arg1)))
11090 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11091 fold_convert (type, arg0),
11092 arg1);
11094 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11095 if (integer_onep (op2)
11096 && truth_value_p (TREE_CODE (arg0))
11097 && truth_value_p (TREE_CODE (arg1)))
11099 /* Only perform transformation if ARG0 is easily inverted. */
11100 tem = invert_truthvalue (arg0);
11101 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11102 return fold_build2 (TRUTH_ORIF_EXPR, type,
11103 fold_convert (type, tem),
11104 arg1);
11107 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11108 if (integer_zerop (arg1)
11109 && truth_value_p (TREE_CODE (arg0))
11110 && truth_value_p (TREE_CODE (op2)))
11112 /* Only perform transformation if ARG0 is easily inverted. */
11113 tem = invert_truthvalue (arg0);
11114 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11115 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11116 fold_convert (type, tem),
11117 op2);
11120 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11121 if (integer_onep (arg1)
11122 && truth_value_p (TREE_CODE (arg0))
11123 && truth_value_p (TREE_CODE (op2)))
11124 return fold_build2 (TRUTH_ORIF_EXPR, type,
11125 fold_convert (type, arg0),
11126 op2);
11128 return NULL_TREE;
11130 case CALL_EXPR:
11131 /* Check for a built-in function. */
11132 if (TREE_CODE (op0) == ADDR_EXPR
11133 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11134 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11135 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11136 return NULL_TREE;
11138 case BIT_FIELD_REF:
11139 if (TREE_CODE (arg0) == VECTOR_CST
11140 && type == TREE_TYPE (TREE_TYPE (arg0))
11141 && host_integerp (arg1, 1)
11142 && host_integerp (op2, 1))
11144 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11145 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11147 if (width != 0
11148 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11149 && (idx % width) == 0
11150 && (idx = idx / width)
11151 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11153 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11154 while (idx-- > 0 && elements)
11155 elements = TREE_CHAIN (elements);
11156 if (elements)
11157 return TREE_VALUE (elements);
11158 else
11159 return fold_convert (type, integer_zero_node);
11162 return NULL_TREE;
11164 default:
11165 return NULL_TREE;
11166 } /* switch (code) */
11169 /* Perform constant folding and related simplification of EXPR.
11170 The related simplifications include x*1 => x, x*0 => 0, etc.,
11171 and application of the associative law.
11172 NOP_EXPR conversions may be removed freely (as long as we
11173 are careful not to change the type of the overall expression).
11174 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11175 but we can constant-fold them if they have constant operands. */
11177 #ifdef ENABLE_FOLD_CHECKING
11178 # define fold(x) fold_1 (x)
11179 static tree fold_1 (tree);
11180 static
11181 #endif
11182 tree
11183 fold (tree expr)
11185 const tree t = expr;
11186 enum tree_code code = TREE_CODE (t);
11187 enum tree_code_class kind = TREE_CODE_CLASS (code);
11188 tree tem;
11190 /* Return right away if a constant. */
11191 if (kind == tcc_constant)
11192 return t;
11194 if (IS_EXPR_CODE_CLASS (kind))
11196 tree type = TREE_TYPE (t);
11197 tree op0, op1, op2;
11199 switch (TREE_CODE_LENGTH (code))
11201 case 1:
11202 op0 = TREE_OPERAND (t, 0);
11203 tem = fold_unary (code, type, op0);
11204 return tem ? tem : expr;
11205 case 2:
11206 op0 = TREE_OPERAND (t, 0);
11207 op1 = TREE_OPERAND (t, 1);
11208 tem = fold_binary (code, type, op0, op1);
11209 return tem ? tem : expr;
11210 case 3:
11211 op0 = TREE_OPERAND (t, 0);
11212 op1 = TREE_OPERAND (t, 1);
11213 op2 = TREE_OPERAND (t, 2);
11214 tem = fold_ternary (code, type, op0, op1, op2);
11215 return tem ? tem : expr;
11216 default:
11217 break;
11221 switch (code)
11223 case CONST_DECL:
11224 return fold (DECL_INITIAL (t));
11226 default:
11227 return t;
11228 } /* switch (code) */
11231 #ifdef ENABLE_FOLD_CHECKING
11232 #undef fold
11234 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11235 static void fold_check_failed (tree, tree);
11236 void print_fold_checksum (tree);
11238 /* When --enable-checking=fold, compute a digest of expr before
11239 and after actual fold call to see if fold did not accidentally
11240 change original expr. */
11242 tree
11243 fold (tree expr)
11245 tree ret;
11246 struct md5_ctx ctx;
11247 unsigned char checksum_before[16], checksum_after[16];
11248 htab_t ht;
11250 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11251 md5_init_ctx (&ctx);
11252 fold_checksum_tree (expr, &ctx, ht);
11253 md5_finish_ctx (&ctx, checksum_before);
11254 htab_empty (ht);
11256 ret = fold_1 (expr);
11258 md5_init_ctx (&ctx);
11259 fold_checksum_tree (expr, &ctx, ht);
11260 md5_finish_ctx (&ctx, checksum_after);
11261 htab_delete (ht);
11263 if (memcmp (checksum_before, checksum_after, 16))
11264 fold_check_failed (expr, ret);
11266 return ret;
11269 void
11270 print_fold_checksum (tree expr)
11272 struct md5_ctx ctx;
11273 unsigned char checksum[16], cnt;
11274 htab_t ht;
11276 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11277 md5_init_ctx (&ctx);
11278 fold_checksum_tree (expr, &ctx, ht);
11279 md5_finish_ctx (&ctx, checksum);
11280 htab_delete (ht);
11281 for (cnt = 0; cnt < 16; ++cnt)
11282 fprintf (stderr, "%02x", checksum[cnt]);
11283 putc ('\n', stderr);
11286 static void
11287 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11289 internal_error ("fold check: original tree changed by fold");
11292 static void
11293 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11295 void **slot;
11296 enum tree_code code;
11297 struct tree_function_decl buf;
11298 int i, len;
11300 recursive_label:
11302 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11303 <= sizeof (struct tree_function_decl))
11304 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11305 if (expr == NULL)
11306 return;
11307 slot = htab_find_slot (ht, expr, INSERT);
11308 if (*slot != NULL)
11309 return;
11310 *slot = expr;
11311 code = TREE_CODE (expr);
11312 if (TREE_CODE_CLASS (code) == tcc_declaration
11313 && DECL_ASSEMBLER_NAME_SET_P (expr))
11315 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11316 memcpy ((char *) &buf, expr, tree_size (expr));
11317 expr = (tree) &buf;
11318 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11320 else if (TREE_CODE_CLASS (code) == tcc_type
11321 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11322 || TYPE_CACHED_VALUES_P (expr)
11323 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11325 /* Allow these fields to be modified. */
11326 memcpy ((char *) &buf, expr, tree_size (expr));
11327 expr = (tree) &buf;
11328 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11329 TYPE_POINTER_TO (expr) = NULL;
11330 TYPE_REFERENCE_TO (expr) = NULL;
11331 if (TYPE_CACHED_VALUES_P (expr))
11333 TYPE_CACHED_VALUES_P (expr) = 0;
11334 TYPE_CACHED_VALUES (expr) = NULL;
11337 md5_process_bytes (expr, tree_size (expr), ctx);
11338 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11339 if (TREE_CODE_CLASS (code) != tcc_type
11340 && TREE_CODE_CLASS (code) != tcc_declaration
11341 && code != TREE_LIST)
11342 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11343 switch (TREE_CODE_CLASS (code))
11345 case tcc_constant:
11346 switch (code)
11348 case STRING_CST:
11349 md5_process_bytes (TREE_STRING_POINTER (expr),
11350 TREE_STRING_LENGTH (expr), ctx);
11351 break;
11352 case COMPLEX_CST:
11353 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11354 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11355 break;
11356 case VECTOR_CST:
11357 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11358 break;
11359 default:
11360 break;
11362 break;
11363 case tcc_exceptional:
11364 switch (code)
11366 case TREE_LIST:
11367 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11368 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11369 expr = TREE_CHAIN (expr);
11370 goto recursive_label;
11371 break;
11372 case TREE_VEC:
11373 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11374 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11375 break;
11376 default:
11377 break;
11379 break;
11380 case tcc_expression:
11381 case tcc_reference:
11382 case tcc_comparison:
11383 case tcc_unary:
11384 case tcc_binary:
11385 case tcc_statement:
11386 len = TREE_CODE_LENGTH (code);
11387 for (i = 0; i < len; ++i)
11388 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11389 break;
11390 case tcc_declaration:
11391 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11392 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11393 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11395 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11396 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11397 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11398 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11399 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11401 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11402 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11404 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11406 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11407 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11408 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11410 break;
11411 case tcc_type:
11412 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11413 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11414 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11415 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11416 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11417 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11418 if (INTEGRAL_TYPE_P (expr)
11419 || SCALAR_FLOAT_TYPE_P (expr))
11421 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
11422 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
11424 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
11425 if (TREE_CODE (expr) == RECORD_TYPE
11426 || TREE_CODE (expr) == UNION_TYPE
11427 || TREE_CODE (expr) == QUAL_UNION_TYPE)
11428 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
11429 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
11430 break;
11431 default:
11432 break;
11436 #endif
11438 /* Fold a unary tree expression with code CODE of type TYPE with an
11439 operand OP0. Return a folded expression if successful. Otherwise,
11440 return a tree expression with code CODE of type TYPE with an
11441 operand OP0. */
11443 tree
11444 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
11446 tree tem;
11447 #ifdef ENABLE_FOLD_CHECKING
11448 unsigned char checksum_before[16], checksum_after[16];
11449 struct md5_ctx ctx;
11450 htab_t ht;
11452 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11453 md5_init_ctx (&ctx);
11454 fold_checksum_tree (op0, &ctx, ht);
11455 md5_finish_ctx (&ctx, checksum_before);
11456 htab_empty (ht);
11457 #endif
11459 tem = fold_unary (code, type, op0);
11460 if (!tem)
11461 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
11463 #ifdef ENABLE_FOLD_CHECKING
11464 md5_init_ctx (&ctx);
11465 fold_checksum_tree (op0, &ctx, ht);
11466 md5_finish_ctx (&ctx, checksum_after);
11467 htab_delete (ht);
11469 if (memcmp (checksum_before, checksum_after, 16))
11470 fold_check_failed (op0, tem);
11471 #endif
11472 return tem;
11475 /* Fold a binary tree expression with code CODE of type TYPE with
11476 operands OP0 and OP1. Return a folded expression if successful.
11477 Otherwise, return a tree expression with code CODE of type TYPE
11478 with operands OP0 and OP1. */
11480 tree
11481 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
11482 MEM_STAT_DECL)
11484 tree tem;
11485 #ifdef ENABLE_FOLD_CHECKING
11486 unsigned char checksum_before_op0[16],
11487 checksum_before_op1[16],
11488 checksum_after_op0[16],
11489 checksum_after_op1[16];
11490 struct md5_ctx ctx;
11491 htab_t ht;
11493 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11494 md5_init_ctx (&ctx);
11495 fold_checksum_tree (op0, &ctx, ht);
11496 md5_finish_ctx (&ctx, checksum_before_op0);
11497 htab_empty (ht);
11499 md5_init_ctx (&ctx);
11500 fold_checksum_tree (op1, &ctx, ht);
11501 md5_finish_ctx (&ctx, checksum_before_op1);
11502 htab_empty (ht);
11503 #endif
11505 tem = fold_binary (code, type, op0, op1);
11506 if (!tem)
11507 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
11509 #ifdef ENABLE_FOLD_CHECKING
11510 md5_init_ctx (&ctx);
11511 fold_checksum_tree (op0, &ctx, ht);
11512 md5_finish_ctx (&ctx, checksum_after_op0);
11513 htab_empty (ht);
11515 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11516 fold_check_failed (op0, tem);
11518 md5_init_ctx (&ctx);
11519 fold_checksum_tree (op1, &ctx, ht);
11520 md5_finish_ctx (&ctx, checksum_after_op1);
11521 htab_delete (ht);
11523 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11524 fold_check_failed (op1, tem);
11525 #endif
11526 return tem;
11529 /* Fold a ternary tree expression with code CODE of type TYPE with
11530 operands OP0, OP1, and OP2. Return a folded expression if
11531 successful. Otherwise, return a tree expression with code CODE of
11532 type TYPE with operands OP0, OP1, and OP2. */
11534 tree
11535 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
11536 MEM_STAT_DECL)
11538 tree tem;
11539 #ifdef ENABLE_FOLD_CHECKING
11540 unsigned char checksum_before_op0[16],
11541 checksum_before_op1[16],
11542 checksum_before_op2[16],
11543 checksum_after_op0[16],
11544 checksum_after_op1[16],
11545 checksum_after_op2[16];
11546 struct md5_ctx ctx;
11547 htab_t ht;
11549 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11550 md5_init_ctx (&ctx);
11551 fold_checksum_tree (op0, &ctx, ht);
11552 md5_finish_ctx (&ctx, checksum_before_op0);
11553 htab_empty (ht);
11555 md5_init_ctx (&ctx);
11556 fold_checksum_tree (op1, &ctx, ht);
11557 md5_finish_ctx (&ctx, checksum_before_op1);
11558 htab_empty (ht);
11560 md5_init_ctx (&ctx);
11561 fold_checksum_tree (op2, &ctx, ht);
11562 md5_finish_ctx (&ctx, checksum_before_op2);
11563 htab_empty (ht);
11564 #endif
11566 tem = fold_ternary (code, type, op0, op1, op2);
11567 if (!tem)
11568 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
11570 #ifdef ENABLE_FOLD_CHECKING
11571 md5_init_ctx (&ctx);
11572 fold_checksum_tree (op0, &ctx, ht);
11573 md5_finish_ctx (&ctx, checksum_after_op0);
11574 htab_empty (ht);
11576 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11577 fold_check_failed (op0, tem);
11579 md5_init_ctx (&ctx);
11580 fold_checksum_tree (op1, &ctx, ht);
11581 md5_finish_ctx (&ctx, checksum_after_op1);
11582 htab_empty (ht);
11584 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11585 fold_check_failed (op1, tem);
11587 md5_init_ctx (&ctx);
11588 fold_checksum_tree (op2, &ctx, ht);
11589 md5_finish_ctx (&ctx, checksum_after_op2);
11590 htab_delete (ht);
11592 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
11593 fold_check_failed (op2, tem);
11594 #endif
11595 return tem;
11598 /* Perform constant folding and related simplification of initializer
11599 expression EXPR. These behave identically to "fold_buildN" but ignore
11600 potential run-time traps and exceptions that fold must preserve. */
11602 #define START_FOLD_INIT \
11603 int saved_signaling_nans = flag_signaling_nans;\
11604 int saved_trapping_math = flag_trapping_math;\
11605 int saved_rounding_math = flag_rounding_math;\
11606 int saved_trapv = flag_trapv;\
11607 flag_signaling_nans = 0;\
11608 flag_trapping_math = 0;\
11609 flag_rounding_math = 0;\
11610 flag_trapv = 0
11612 #define END_FOLD_INIT \
11613 flag_signaling_nans = saved_signaling_nans;\
11614 flag_trapping_math = saved_trapping_math;\
11615 flag_rounding_math = saved_rounding_math;\
11616 flag_trapv = saved_trapv
11618 tree
11619 fold_build1_initializer (enum tree_code code, tree type, tree op)
11621 tree result;
11622 START_FOLD_INIT;
11624 result = fold_build1 (code, type, op);
11626 END_FOLD_INIT;
11627 return result;
11630 tree
11631 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
11633 tree result;
11634 START_FOLD_INIT;
11636 result = fold_build2 (code, type, op0, op1);
11638 END_FOLD_INIT;
11639 return result;
11642 tree
11643 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
11644 tree op2)
11646 tree result;
11647 START_FOLD_INIT;
11649 result = fold_build3 (code, type, op0, op1, op2);
11651 END_FOLD_INIT;
11652 return result;
11655 #undef START_FOLD_INIT
11656 #undef END_FOLD_INIT
11658 /* Determine if first argument is a multiple of second argument. Return 0 if
11659 it is not, or we cannot easily determined it to be.
11661 An example of the sort of thing we care about (at this point; this routine
11662 could surely be made more general, and expanded to do what the *_DIV_EXPR's
11663 fold cases do now) is discovering that
11665 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11667 is a multiple of
11669 SAVE_EXPR (J * 8)
11671 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
11673 This code also handles discovering that
11675 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11677 is a multiple of 8 so we don't have to worry about dealing with a
11678 possible remainder.
11680 Note that we *look* inside a SAVE_EXPR only to determine how it was
11681 calculated; it is not safe for fold to do much of anything else with the
11682 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
11683 at run time. For example, the latter example above *cannot* be implemented
11684 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
11685 evaluation time of the original SAVE_EXPR is not necessarily the same at
11686 the time the new expression is evaluated. The only optimization of this
11687 sort that would be valid is changing
11689 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
11691 divided by 8 to
11693 SAVE_EXPR (I) * SAVE_EXPR (J)
11695 (where the same SAVE_EXPR (J) is used in the original and the
11696 transformed version). */
11698 static int
11699 multiple_of_p (tree type, tree top, tree bottom)
11701 if (operand_equal_p (top, bottom, 0))
11702 return 1;
11704 if (TREE_CODE (type) != INTEGER_TYPE)
11705 return 0;
11707 switch (TREE_CODE (top))
11709 case BIT_AND_EXPR:
11710 /* Bitwise and provides a power of two multiple. If the mask is
11711 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
11712 if (!integer_pow2p (bottom))
11713 return 0;
11714 /* FALLTHRU */
11716 case MULT_EXPR:
11717 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11718 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11720 case PLUS_EXPR:
11721 case MINUS_EXPR:
11722 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11723 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11725 case LSHIFT_EXPR:
11726 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
11728 tree op1, t1;
11730 op1 = TREE_OPERAND (top, 1);
11731 /* const_binop may not detect overflow correctly,
11732 so check for it explicitly here. */
11733 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
11734 > TREE_INT_CST_LOW (op1)
11735 && TREE_INT_CST_HIGH (op1) == 0
11736 && 0 != (t1 = fold_convert (type,
11737 const_binop (LSHIFT_EXPR,
11738 size_one_node,
11739 op1, 0)))
11740 && ! TREE_OVERFLOW (t1))
11741 return multiple_of_p (type, t1, bottom);
11743 return 0;
11745 case NOP_EXPR:
11746 /* Can't handle conversions from non-integral or wider integral type. */
11747 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
11748 || (TYPE_PRECISION (type)
11749 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
11750 return 0;
11752 /* .. fall through ... */
11754 case SAVE_EXPR:
11755 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
11757 case INTEGER_CST:
11758 if (TREE_CODE (bottom) != INTEGER_CST
11759 || (TYPE_UNSIGNED (type)
11760 && (tree_int_cst_sgn (top) < 0
11761 || tree_int_cst_sgn (bottom) < 0)))
11762 return 0;
11763 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
11764 top, bottom, 0));
11766 default:
11767 return 0;
11771 /* Return true if `t' is known to be non-negative. */
11774 tree_expr_nonnegative_p (tree t)
11776 if (TYPE_UNSIGNED (TREE_TYPE (t)))
11777 return 1;
11779 switch (TREE_CODE (t))
11781 case SSA_NAME:
11782 /* Query VRP to see if it has recorded any information about
11783 the range of this object. */
11784 return ssa_name_nonnegative_p (t);
11786 case ABS_EXPR:
11787 /* We can't return 1 if flag_wrapv is set because
11788 ABS_EXPR<INT_MIN> = INT_MIN. */
11789 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
11790 return 1;
11791 break;
11793 case INTEGER_CST:
11794 return tree_int_cst_sgn (t) >= 0;
11796 case REAL_CST:
11797 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
11799 case PLUS_EXPR:
11800 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11801 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11802 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11804 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
11805 both unsigned and at least 2 bits shorter than the result. */
11806 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11807 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11808 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11810 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11811 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11812 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11813 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11815 unsigned int prec = MAX (TYPE_PRECISION (inner1),
11816 TYPE_PRECISION (inner2)) + 1;
11817 return prec < TYPE_PRECISION (TREE_TYPE (t));
11820 break;
11822 case MULT_EXPR:
11823 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11825 /* x * x for floating point x is always non-negative. */
11826 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
11827 return 1;
11828 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11829 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11832 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
11833 both unsigned and their total bits is shorter than the result. */
11834 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11835 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11836 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11838 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11839 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11840 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11841 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11842 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
11843 < TYPE_PRECISION (TREE_TYPE (t));
11845 return 0;
11847 case BIT_AND_EXPR:
11848 case MAX_EXPR:
11849 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11850 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11852 case BIT_IOR_EXPR:
11853 case BIT_XOR_EXPR:
11854 case MIN_EXPR:
11855 case RDIV_EXPR:
11856 case TRUNC_DIV_EXPR:
11857 case CEIL_DIV_EXPR:
11858 case FLOOR_DIV_EXPR:
11859 case ROUND_DIV_EXPR:
11860 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11861 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11863 case TRUNC_MOD_EXPR:
11864 case CEIL_MOD_EXPR:
11865 case FLOOR_MOD_EXPR:
11866 case ROUND_MOD_EXPR:
11867 case SAVE_EXPR:
11868 case NON_LVALUE_EXPR:
11869 case FLOAT_EXPR:
11870 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11872 case COMPOUND_EXPR:
11873 case MODIFY_EXPR:
11874 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11876 case BIND_EXPR:
11877 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
11879 case COND_EXPR:
11880 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
11881 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
11883 case NOP_EXPR:
11885 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11886 tree outer_type = TREE_TYPE (t);
11888 if (TREE_CODE (outer_type) == REAL_TYPE)
11890 if (TREE_CODE (inner_type) == REAL_TYPE)
11891 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11892 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11894 if (TYPE_UNSIGNED (inner_type))
11895 return 1;
11896 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11899 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
11901 if (TREE_CODE (inner_type) == REAL_TYPE)
11902 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
11903 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11904 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
11905 && TYPE_UNSIGNED (inner_type);
11908 break;
11910 case TARGET_EXPR:
11912 tree temp = TARGET_EXPR_SLOT (t);
11913 t = TARGET_EXPR_INITIAL (t);
11915 /* If the initializer is non-void, then it's a normal expression
11916 that will be assigned to the slot. */
11917 if (!VOID_TYPE_P (t))
11918 return tree_expr_nonnegative_p (t);
11920 /* Otherwise, the initializer sets the slot in some way. One common
11921 way is an assignment statement at the end of the initializer. */
11922 while (1)
11924 if (TREE_CODE (t) == BIND_EXPR)
11925 t = expr_last (BIND_EXPR_BODY (t));
11926 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
11927 || TREE_CODE (t) == TRY_CATCH_EXPR)
11928 t = expr_last (TREE_OPERAND (t, 0));
11929 else if (TREE_CODE (t) == STATEMENT_LIST)
11930 t = expr_last (t);
11931 else
11932 break;
11934 if (TREE_CODE (t) == MODIFY_EXPR
11935 && TREE_OPERAND (t, 0) == temp)
11936 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11938 return 0;
11941 case CALL_EXPR:
11943 tree fndecl = get_callee_fndecl (t);
11944 tree arglist = TREE_OPERAND (t, 1);
11945 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
11946 switch (DECL_FUNCTION_CODE (fndecl))
11948 CASE_FLT_FN (BUILT_IN_ACOS):
11949 CASE_FLT_FN (BUILT_IN_ACOSH):
11950 CASE_FLT_FN (BUILT_IN_CABS):
11951 CASE_FLT_FN (BUILT_IN_COSH):
11952 CASE_FLT_FN (BUILT_IN_ERFC):
11953 CASE_FLT_FN (BUILT_IN_EXP):
11954 CASE_FLT_FN (BUILT_IN_EXP10):
11955 CASE_FLT_FN (BUILT_IN_EXP2):
11956 CASE_FLT_FN (BUILT_IN_FABS):
11957 CASE_FLT_FN (BUILT_IN_FDIM):
11958 CASE_FLT_FN (BUILT_IN_HYPOT):
11959 CASE_FLT_FN (BUILT_IN_POW10):
11960 CASE_INT_FN (BUILT_IN_FFS):
11961 CASE_INT_FN (BUILT_IN_PARITY):
11962 CASE_INT_FN (BUILT_IN_POPCOUNT):
11963 /* Always true. */
11964 return 1;
11966 CASE_FLT_FN (BUILT_IN_SQRT):
11967 /* sqrt(-0.0) is -0.0. */
11968 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
11969 return 1;
11970 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11972 CASE_FLT_FN (BUILT_IN_ASINH):
11973 CASE_FLT_FN (BUILT_IN_ATAN):
11974 CASE_FLT_FN (BUILT_IN_ATANH):
11975 CASE_FLT_FN (BUILT_IN_CBRT):
11976 CASE_FLT_FN (BUILT_IN_CEIL):
11977 CASE_FLT_FN (BUILT_IN_ERF):
11978 CASE_FLT_FN (BUILT_IN_EXPM1):
11979 CASE_FLT_FN (BUILT_IN_FLOOR):
11980 CASE_FLT_FN (BUILT_IN_FMOD):
11981 CASE_FLT_FN (BUILT_IN_FREXP):
11982 CASE_FLT_FN (BUILT_IN_LCEIL):
11983 CASE_FLT_FN (BUILT_IN_LDEXP):
11984 CASE_FLT_FN (BUILT_IN_LFLOOR):
11985 CASE_FLT_FN (BUILT_IN_LLCEIL):
11986 CASE_FLT_FN (BUILT_IN_LLFLOOR):
11987 CASE_FLT_FN (BUILT_IN_LLRINT):
11988 CASE_FLT_FN (BUILT_IN_LLROUND):
11989 CASE_FLT_FN (BUILT_IN_LRINT):
11990 CASE_FLT_FN (BUILT_IN_LROUND):
11991 CASE_FLT_FN (BUILT_IN_MODF):
11992 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11993 CASE_FLT_FN (BUILT_IN_POW):
11994 CASE_FLT_FN (BUILT_IN_RINT):
11995 CASE_FLT_FN (BUILT_IN_ROUND):
11996 CASE_FLT_FN (BUILT_IN_SIGNBIT):
11997 CASE_FLT_FN (BUILT_IN_SINH):
11998 CASE_FLT_FN (BUILT_IN_TANH):
11999 CASE_FLT_FN (BUILT_IN_TRUNC):
12000 /* True if the 1st argument is nonnegative. */
12001 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12003 CASE_FLT_FN (BUILT_IN_FMAX):
12004 /* True if the 1st OR 2nd arguments are nonnegative. */
12005 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12006 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12008 CASE_FLT_FN (BUILT_IN_FMIN):
12009 /* True if the 1st AND 2nd arguments are nonnegative. */
12010 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12011 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12013 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12014 /* True if the 2nd argument is nonnegative. */
12015 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12017 default:
12018 break;
12022 /* ... fall through ... */
12024 default:
12025 if (truth_value_p (TREE_CODE (t)))
12026 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12027 return 1;
12030 /* We don't know sign of `t', so be conservative and return false. */
12031 return 0;
12034 /* Return true when T is an address and is known to be nonzero.
12035 For floating point we further ensure that T is not denormal.
12036 Similar logic is present in nonzero_address in rtlanal.h. */
12038 bool
12039 tree_expr_nonzero_p (tree t)
12041 tree type = TREE_TYPE (t);
12043 /* Doing something useful for floating point would need more work. */
12044 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12045 return false;
12047 switch (TREE_CODE (t))
12049 case SSA_NAME:
12050 /* Query VRP to see if it has recorded any information about
12051 the range of this object. */
12052 return ssa_name_nonzero_p (t);
12054 case ABS_EXPR:
12055 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12057 case INTEGER_CST:
12058 /* We used to test for !integer_zerop here. This does not work correctly
12059 if TREE_CONSTANT_OVERFLOW (t). */
12060 return (TREE_INT_CST_LOW (t) != 0
12061 || TREE_INT_CST_HIGH (t) != 0);
12063 case PLUS_EXPR:
12064 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12066 /* With the presence of negative values it is hard
12067 to say something. */
12068 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12069 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12070 return false;
12071 /* One of operands must be positive and the other non-negative. */
12072 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12073 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12075 break;
12077 case MULT_EXPR:
12078 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12080 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12081 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12083 break;
12085 case NOP_EXPR:
12087 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12088 tree outer_type = TREE_TYPE (t);
12090 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12091 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12093 break;
12095 case ADDR_EXPR:
12097 tree base = get_base_address (TREE_OPERAND (t, 0));
12099 if (!base)
12100 return false;
12102 /* Weak declarations may link to NULL. */
12103 if (VAR_OR_FUNCTION_DECL_P (base))
12104 return !DECL_WEAK (base);
12106 /* Constants are never weak. */
12107 if (CONSTANT_CLASS_P (base))
12108 return true;
12110 return false;
12113 case COND_EXPR:
12114 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12115 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12117 case MIN_EXPR:
12118 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12119 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12121 case MAX_EXPR:
12122 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12124 /* When both operands are nonzero, then MAX must be too. */
12125 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12126 return true;
12128 /* MAX where operand 0 is positive is positive. */
12129 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12131 /* MAX where operand 1 is positive is positive. */
12132 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12133 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12134 return true;
12135 break;
12137 case COMPOUND_EXPR:
12138 case MODIFY_EXPR:
12139 case BIND_EXPR:
12140 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
12142 case SAVE_EXPR:
12143 case NON_LVALUE_EXPR:
12144 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12146 case BIT_IOR_EXPR:
12147 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12148 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12150 case CALL_EXPR:
12151 return alloca_call_p (t);
12153 default:
12154 break;
12156 return false;
12159 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12160 attempt to fold the expression to a constant without modifying TYPE,
12161 OP0 or OP1.
12163 If the expression could be simplified to a constant, then return
12164 the constant. If the expression would not be simplified to a
12165 constant, then return NULL_TREE. */
12167 tree
12168 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12170 tree tem = fold_binary (code, type, op0, op1);
12171 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12174 /* Given the components of a unary expression CODE, TYPE and OP0,
12175 attempt to fold the expression to a constant without modifying
12176 TYPE or OP0.
12178 If the expression could be simplified to a constant, then return
12179 the constant. If the expression would not be simplified to a
12180 constant, then return NULL_TREE. */
12182 tree
12183 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12185 tree tem = fold_unary (code, type, op0);
12186 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12189 /* If EXP represents referencing an element in a constant string
12190 (either via pointer arithmetic or array indexing), return the
12191 tree representing the value accessed, otherwise return NULL. */
12193 tree
12194 fold_read_from_constant_string (tree exp)
12196 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
12198 tree exp1 = TREE_OPERAND (exp, 0);
12199 tree index;
12200 tree string;
12202 if (TREE_CODE (exp) == INDIRECT_REF)
12203 string = string_constant (exp1, &index);
12204 else
12206 tree low_bound = array_ref_low_bound (exp);
12207 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12209 /* Optimize the special-case of a zero lower bound.
12211 We convert the low_bound to sizetype to avoid some problems
12212 with constant folding. (E.g. suppose the lower bound is 1,
12213 and its mode is QI. Without the conversion,l (ARRAY
12214 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12215 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12216 if (! integer_zerop (low_bound))
12217 index = size_diffop (index, fold_convert (sizetype, low_bound));
12219 string = exp1;
12222 if (string
12223 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
12224 && TREE_CODE (string) == STRING_CST
12225 && TREE_CODE (index) == INTEGER_CST
12226 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12227 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12228 == MODE_INT)
12229 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12230 return fold_convert (TREE_TYPE (exp),
12231 build_int_cst (NULL_TREE,
12232 (TREE_STRING_POINTER (string)
12233 [TREE_INT_CST_LOW (index)])));
12235 return NULL;
12238 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12239 an integer constant or real constant.
12241 TYPE is the type of the result. */
12243 static tree
12244 fold_negate_const (tree arg0, tree type)
12246 tree t = NULL_TREE;
12248 switch (TREE_CODE (arg0))
12250 case INTEGER_CST:
12252 unsigned HOST_WIDE_INT low;
12253 HOST_WIDE_INT high;
12254 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12255 TREE_INT_CST_HIGH (arg0),
12256 &low, &high);
12257 t = build_int_cst_wide (type, low, high);
12258 t = force_fit_type (t, 1,
12259 (overflow | TREE_OVERFLOW (arg0))
12260 && !TYPE_UNSIGNED (type),
12261 TREE_CONSTANT_OVERFLOW (arg0));
12262 break;
12265 case REAL_CST:
12266 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12267 break;
12269 default:
12270 gcc_unreachable ();
12273 return t;
12276 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12277 an integer constant or real constant.
12279 TYPE is the type of the result. */
12281 tree
12282 fold_abs_const (tree arg0, tree type)
12284 tree t = NULL_TREE;
12286 switch (TREE_CODE (arg0))
12288 case INTEGER_CST:
12289 /* If the value is unsigned, then the absolute value is
12290 the same as the ordinary value. */
12291 if (TYPE_UNSIGNED (type))
12292 t = arg0;
12293 /* Similarly, if the value is non-negative. */
12294 else if (INT_CST_LT (integer_minus_one_node, arg0))
12295 t = arg0;
12296 /* If the value is negative, then the absolute value is
12297 its negation. */
12298 else
12300 unsigned HOST_WIDE_INT low;
12301 HOST_WIDE_INT high;
12302 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12303 TREE_INT_CST_HIGH (arg0),
12304 &low, &high);
12305 t = build_int_cst_wide (type, low, high);
12306 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
12307 TREE_CONSTANT_OVERFLOW (arg0));
12309 break;
12311 case REAL_CST:
12312 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
12313 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12314 else
12315 t = arg0;
12316 break;
12318 default:
12319 gcc_unreachable ();
12322 return t;
12325 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
12326 constant. TYPE is the type of the result. */
12328 static tree
12329 fold_not_const (tree arg0, tree type)
12331 tree t = NULL_TREE;
12333 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
12335 t = build_int_cst_wide (type,
12336 ~ TREE_INT_CST_LOW (arg0),
12337 ~ TREE_INT_CST_HIGH (arg0));
12338 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
12339 TREE_CONSTANT_OVERFLOW (arg0));
12341 return t;
12344 /* Given CODE, a relational operator, the target type, TYPE and two
12345 constant operands OP0 and OP1, return the result of the
12346 relational operation. If the result is not a compile time
12347 constant, then return NULL_TREE. */
12349 static tree
12350 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
12352 int result, invert;
12354 /* From here on, the only cases we handle are when the result is
12355 known to be a constant. */
12357 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
12359 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
12360 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
12362 /* Handle the cases where either operand is a NaN. */
12363 if (real_isnan (c0) || real_isnan (c1))
12365 switch (code)
12367 case EQ_EXPR:
12368 case ORDERED_EXPR:
12369 result = 0;
12370 break;
12372 case NE_EXPR:
12373 case UNORDERED_EXPR:
12374 case UNLT_EXPR:
12375 case UNLE_EXPR:
12376 case UNGT_EXPR:
12377 case UNGE_EXPR:
12378 case UNEQ_EXPR:
12379 result = 1;
12380 break;
12382 case LT_EXPR:
12383 case LE_EXPR:
12384 case GT_EXPR:
12385 case GE_EXPR:
12386 case LTGT_EXPR:
12387 if (flag_trapping_math)
12388 return NULL_TREE;
12389 result = 0;
12390 break;
12392 default:
12393 gcc_unreachable ();
12396 return constant_boolean_node (result, type);
12399 return constant_boolean_node (real_compare (code, c0, c1), type);
12402 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
12404 To compute GT, swap the arguments and do LT.
12405 To compute GE, do LT and invert the result.
12406 To compute LE, swap the arguments, do LT and invert the result.
12407 To compute NE, do EQ and invert the result.
12409 Therefore, the code below must handle only EQ and LT. */
12411 if (code == LE_EXPR || code == GT_EXPR)
12413 tree tem = op0;
12414 op0 = op1;
12415 op1 = tem;
12416 code = swap_tree_comparison (code);
12419 /* Note that it is safe to invert for real values here because we
12420 have already handled the one case that it matters. */
12422 invert = 0;
12423 if (code == NE_EXPR || code == GE_EXPR)
12425 invert = 1;
12426 code = invert_tree_comparison (code, false);
12429 /* Compute a result for LT or EQ if args permit;
12430 Otherwise return T. */
12431 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
12433 if (code == EQ_EXPR)
12434 result = tree_int_cst_equal (op0, op1);
12435 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
12436 result = INT_CST_LT_UNSIGNED (op0, op1);
12437 else
12438 result = INT_CST_LT (op0, op1);
12440 else
12441 return NULL_TREE;
12443 if (invert)
12444 result ^= 1;
12445 return constant_boolean_node (result, type);
12448 /* Build an expression for the a clean point containing EXPR with type TYPE.
12449 Don't build a cleanup point expression for EXPR which don't have side
12450 effects. */
12452 tree
12453 fold_build_cleanup_point_expr (tree type, tree expr)
12455 /* If the expression does not have side effects then we don't have to wrap
12456 it with a cleanup point expression. */
12457 if (!TREE_SIDE_EFFECTS (expr))
12458 return expr;
12460 /* If the expression is a return, check to see if the expression inside the
12461 return has no side effects or the right hand side of the modify expression
12462 inside the return. If either don't have side effects set we don't need to
12463 wrap the expression in a cleanup point expression. Note we don't check the
12464 left hand side of the modify because it should always be a return decl. */
12465 if (TREE_CODE (expr) == RETURN_EXPR)
12467 tree op = TREE_OPERAND (expr, 0);
12468 if (!op || !TREE_SIDE_EFFECTS (op))
12469 return expr;
12470 op = TREE_OPERAND (op, 1);
12471 if (!TREE_SIDE_EFFECTS (op))
12472 return expr;
12475 return build1 (CLEANUP_POINT_EXPR, type, expr);
12478 /* Build an expression for the address of T. Folds away INDIRECT_REF to
12479 avoid confusing the gimplify process. */
12481 tree
12482 build_fold_addr_expr_with_type (tree t, tree ptrtype)
12484 /* The size of the object is not relevant when talking about its address. */
12485 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12486 t = TREE_OPERAND (t, 0);
12488 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
12489 if (TREE_CODE (t) == INDIRECT_REF
12490 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
12492 t = TREE_OPERAND (t, 0);
12493 if (TREE_TYPE (t) != ptrtype)
12494 t = build1 (NOP_EXPR, ptrtype, t);
12496 else
12498 tree base = t;
12500 while (handled_component_p (base))
12501 base = TREE_OPERAND (base, 0);
12502 if (DECL_P (base))
12503 TREE_ADDRESSABLE (base) = 1;
12505 t = build1 (ADDR_EXPR, ptrtype, t);
12508 return t;
12511 tree
12512 build_fold_addr_expr (tree t)
12514 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
12517 /* Given a pointer value OP0 and a type TYPE, return a simplified version
12518 of an indirection through OP0, or NULL_TREE if no simplification is
12519 possible. */
12521 tree
12522 fold_indirect_ref_1 (tree type, tree op0)
12524 tree sub = op0;
12525 tree subtype;
12527 STRIP_NOPS (sub);
12528 subtype = TREE_TYPE (sub);
12529 if (!POINTER_TYPE_P (subtype))
12530 return NULL_TREE;
12532 if (TREE_CODE (sub) == ADDR_EXPR)
12534 tree op = TREE_OPERAND (sub, 0);
12535 tree optype = TREE_TYPE (op);
12536 /* *&p => p; make sure to handle *&"str"[cst] here. */
12537 if (type == optype)
12539 tree fop = fold_read_from_constant_string (op);
12540 if (fop)
12541 return fop;
12542 else
12543 return op;
12545 /* *(foo *)&fooarray => fooarray[0] */
12546 else if (TREE_CODE (optype) == ARRAY_TYPE
12547 && type == TREE_TYPE (optype))
12549 tree type_domain = TYPE_DOMAIN (optype);
12550 tree min_val = size_zero_node;
12551 if (type_domain && TYPE_MIN_VALUE (type_domain))
12552 min_val = TYPE_MIN_VALUE (type_domain);
12553 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
12555 /* *(foo *)&complexfoo => __real__ complexfoo */
12556 else if (TREE_CODE (optype) == COMPLEX_TYPE
12557 && type == TREE_TYPE (optype))
12558 return fold_build1 (REALPART_EXPR, type, op);
12561 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
12562 if (TREE_CODE (sub) == PLUS_EXPR
12563 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
12565 tree op00 = TREE_OPERAND (sub, 0);
12566 tree op01 = TREE_OPERAND (sub, 1);
12567 tree op00type;
12569 STRIP_NOPS (op00);
12570 op00type = TREE_TYPE (op00);
12571 if (TREE_CODE (op00) == ADDR_EXPR
12572 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
12573 && type == TREE_TYPE (TREE_TYPE (op00type)))
12575 tree size = TYPE_SIZE_UNIT (type);
12576 if (tree_int_cst_equal (size, op01))
12577 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
12581 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
12582 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
12583 && type == TREE_TYPE (TREE_TYPE (subtype)))
12585 tree type_domain;
12586 tree min_val = size_zero_node;
12587 sub = build_fold_indirect_ref (sub);
12588 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
12589 if (type_domain && TYPE_MIN_VALUE (type_domain))
12590 min_val = TYPE_MIN_VALUE (type_domain);
12591 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
12594 return NULL_TREE;
12597 /* Builds an expression for an indirection through T, simplifying some
12598 cases. */
12600 tree
12601 build_fold_indirect_ref (tree t)
12603 tree type = TREE_TYPE (TREE_TYPE (t));
12604 tree sub = fold_indirect_ref_1 (type, t);
12606 if (sub)
12607 return sub;
12608 else
12609 return build1 (INDIRECT_REF, type, t);
12612 /* Given an INDIRECT_REF T, return either T or a simplified version. */
12614 tree
12615 fold_indirect_ref (tree t)
12617 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
12619 if (sub)
12620 return sub;
12621 else
12622 return t;
12625 /* Strip non-trapping, non-side-effecting tree nodes from an expression
12626 whose result is ignored. The type of the returned tree need not be
12627 the same as the original expression. */
12629 tree
12630 fold_ignored_result (tree t)
12632 if (!TREE_SIDE_EFFECTS (t))
12633 return integer_zero_node;
12635 for (;;)
12636 switch (TREE_CODE_CLASS (TREE_CODE (t)))
12638 case tcc_unary:
12639 t = TREE_OPERAND (t, 0);
12640 break;
12642 case tcc_binary:
12643 case tcc_comparison:
12644 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12645 t = TREE_OPERAND (t, 0);
12646 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
12647 t = TREE_OPERAND (t, 1);
12648 else
12649 return t;
12650 break;
12652 case tcc_expression:
12653 switch (TREE_CODE (t))
12655 case COMPOUND_EXPR:
12656 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12657 return t;
12658 t = TREE_OPERAND (t, 0);
12659 break;
12661 case COND_EXPR:
12662 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
12663 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
12664 return t;
12665 t = TREE_OPERAND (t, 0);
12666 break;
12668 default:
12669 return t;
12671 break;
12673 default:
12674 return t;
12678 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
12679 This can only be applied to objects of a sizetype. */
12681 tree
12682 round_up (tree value, int divisor)
12684 tree div = NULL_TREE;
12686 gcc_assert (divisor > 0);
12687 if (divisor == 1)
12688 return value;
12690 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12691 have to do anything. Only do this when we are not given a const,
12692 because in that case, this check is more expensive than just
12693 doing it. */
12694 if (TREE_CODE (value) != INTEGER_CST)
12696 div = build_int_cst (TREE_TYPE (value), divisor);
12698 if (multiple_of_p (TREE_TYPE (value), value, div))
12699 return value;
12702 /* If divisor is a power of two, simplify this to bit manipulation. */
12703 if (divisor == (divisor & -divisor))
12705 tree t;
12707 t = build_int_cst (TREE_TYPE (value), divisor - 1);
12708 value = size_binop (PLUS_EXPR, value, t);
12709 t = build_int_cst (TREE_TYPE (value), -divisor);
12710 value = size_binop (BIT_AND_EXPR, value, t);
12712 else
12714 if (!div)
12715 div = build_int_cst (TREE_TYPE (value), divisor);
12716 value = size_binop (CEIL_DIV_EXPR, value, div);
12717 value = size_binop (MULT_EXPR, value, div);
12720 return value;
12723 /* Likewise, but round down. */
12725 tree
12726 round_down (tree value, int divisor)
12728 tree div = NULL_TREE;
12730 gcc_assert (divisor > 0);
12731 if (divisor == 1)
12732 return value;
12734 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12735 have to do anything. Only do this when we are not given a const,
12736 because in that case, this check is more expensive than just
12737 doing it. */
12738 if (TREE_CODE (value) != INTEGER_CST)
12740 div = build_int_cst (TREE_TYPE (value), divisor);
12742 if (multiple_of_p (TREE_TYPE (value), value, div))
12743 return value;
12746 /* If divisor is a power of two, simplify this to bit manipulation. */
12747 if (divisor == (divisor & -divisor))
12749 tree t;
12751 t = build_int_cst (TREE_TYPE (value), -divisor);
12752 value = size_binop (BIT_AND_EXPR, value, t);
12754 else
12756 if (!div)
12757 div = build_int_cst (TREE_TYPE (value), divisor);
12758 value = size_binop (FLOOR_DIV_EXPR, value, div);
12759 value = size_binop (MULT_EXPR, value, div);
12762 return value;
12765 /* Returns the pointer to the base of the object addressed by EXP and
12766 extracts the information about the offset of the access, storing it
12767 to PBITPOS and POFFSET. */
12769 static tree
12770 split_address_to_core_and_offset (tree exp,
12771 HOST_WIDE_INT *pbitpos, tree *poffset)
12773 tree core;
12774 enum machine_mode mode;
12775 int unsignedp, volatilep;
12776 HOST_WIDE_INT bitsize;
12778 if (TREE_CODE (exp) == ADDR_EXPR)
12780 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
12781 poffset, &mode, &unsignedp, &volatilep,
12782 false);
12783 core = build_fold_addr_expr (core);
12785 else
12787 core = exp;
12788 *pbitpos = 0;
12789 *poffset = NULL_TREE;
12792 return core;
12795 /* Returns true if addresses of E1 and E2 differ by a constant, false
12796 otherwise. If they do, E1 - E2 is stored in *DIFF. */
12798 bool
12799 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
12801 tree core1, core2;
12802 HOST_WIDE_INT bitpos1, bitpos2;
12803 tree toffset1, toffset2, tdiff, type;
12805 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
12806 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
12808 if (bitpos1 % BITS_PER_UNIT != 0
12809 || bitpos2 % BITS_PER_UNIT != 0
12810 || !operand_equal_p (core1, core2, 0))
12811 return false;
12813 if (toffset1 && toffset2)
12815 type = TREE_TYPE (toffset1);
12816 if (type != TREE_TYPE (toffset2))
12817 toffset2 = fold_convert (type, toffset2);
12819 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
12820 if (!cst_and_fits_in_hwi (tdiff))
12821 return false;
12823 *diff = int_cst_value (tdiff);
12825 else if (toffset1 || toffset2)
12827 /* If only one of the offsets is non-constant, the difference cannot
12828 be a constant. */
12829 return false;
12831 else
12832 *diff = 0;
12834 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
12835 return true;
12838 /* Simplify the floating point expression EXP when the sign of the
12839 result is not significant. Return NULL_TREE if no simplification
12840 is possible. */
12842 tree
12843 fold_strip_sign_ops (tree exp)
12845 tree arg0, arg1;
12847 switch (TREE_CODE (exp))
12849 case ABS_EXPR:
12850 case NEGATE_EXPR:
12851 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12852 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
12854 case MULT_EXPR:
12855 case RDIV_EXPR:
12856 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
12857 return NULL_TREE;
12858 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12859 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
12860 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
12861 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
12862 arg0 ? arg0 : TREE_OPERAND (exp, 0),
12863 arg1 ? arg1 : TREE_OPERAND (exp, 1));
12864 break;
12866 default:
12867 break;
12869 return NULL_TREE;