* c-common.h: Remove the prototype for yyparse.
[official-gcc.git] / gcc / fold-const.c
blob7ef0fa11839af029be5a95c4a7f0d40313ff5f5c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
106 tree *, tree *);
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree range_predecessor (tree);
112 static tree range_successor (tree);
113 static tree make_range (tree, int *, tree *, tree *);
114 static tree build_range_check (tree, tree, int, tree, tree);
115 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
116 tree);
117 static tree fold_range_test (enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree fold_truthop (enum tree_code, tree, tree, tree);
121 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
124 static int multiple_of_p (tree, tree, tree);
125 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static bool fold_real_zero_addition_p (tree, tree, int);
129 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (tree, tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static int native_encode_expr (tree, unsigned char *, int);
138 static tree native_interpret_expr (tree, unsigned char *, int);
141 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
142 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
143 and SUM1. Then this yields nonzero if overflow occurred during the
144 addition.
146 Overflow occurs if A and B have the same sign, but A and SUM differ in
147 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
148 sign. */
149 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
151 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
152 We do that by representing the two-word integer in 4 words, with only
153 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
154 number. The value of the word is LOWPART + HIGHPART * BASE. */
156 #define LOWPART(x) \
157 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
158 #define HIGHPART(x) \
159 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
160 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
162 /* Unpack a two-word integer into 4 words.
163 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
164 WORDS points to the array of HOST_WIDE_INTs. */
166 static void
167 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
169 words[0] = LOWPART (low);
170 words[1] = HIGHPART (low);
171 words[2] = LOWPART (hi);
172 words[3] = HIGHPART (hi);
175 /* Pack an array of 4 words into a two-word integer.
176 WORDS points to the array of words.
177 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
179 static void
180 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
181 HOST_WIDE_INT *hi)
183 *low = words[0] + words[1] * BASE;
184 *hi = words[2] + words[3] * BASE;
187 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
188 in overflow of the value, when >0 we are only interested in signed
189 overflow, for <0 we are interested in any overflow. OVERFLOWED
190 indicates whether overflow has already occurred. CONST_OVERFLOWED
191 indicates whether constant overflow has already occurred. We force
192 T's value to be within range of T's type (by setting to 0 or 1 all
193 the bits outside the type's range). We set TREE_OVERFLOWED if,
194 OVERFLOWED is nonzero,
195 or OVERFLOWABLE is >0 and signed overflow occurs
196 or OVERFLOWABLE is <0 and any overflow occurs
197 We set TREE_CONSTANT_OVERFLOWED if,
198 CONST_OVERFLOWED is nonzero
199 or we set TREE_OVERFLOWED.
200 We return either the original T, or a copy. */
202 tree
203 force_fit_type (tree t, int overflowable,
204 bool overflowed, bool overflowed_const)
206 unsigned HOST_WIDE_INT low;
207 HOST_WIDE_INT high;
208 unsigned int prec;
209 int sign_extended_type;
211 gcc_assert (TREE_CODE (t) == INTEGER_CST);
213 low = TREE_INT_CST_LOW (t);
214 high = TREE_INT_CST_HIGH (t);
216 if (POINTER_TYPE_P (TREE_TYPE (t))
217 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
218 prec = POINTER_SIZE;
219 else
220 prec = TYPE_PRECISION (TREE_TYPE (t));
221 /* Size types *are* sign extended. */
222 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
223 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
224 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
226 /* First clear all bits that are beyond the type's precision. */
228 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
230 else if (prec > HOST_BITS_PER_WIDE_INT)
231 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
232 else
234 high = 0;
235 if (prec < HOST_BITS_PER_WIDE_INT)
236 low &= ~((HOST_WIDE_INT) (-1) << prec);
239 if (!sign_extended_type)
240 /* No sign extension */;
241 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
242 /* Correct width already. */;
243 else if (prec > HOST_BITS_PER_WIDE_INT)
245 /* Sign extend top half? */
246 if (high & ((unsigned HOST_WIDE_INT)1
247 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
248 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
250 else if (prec == HOST_BITS_PER_WIDE_INT)
252 if ((HOST_WIDE_INT)low < 0)
253 high = -1;
255 else
257 /* Sign extend bottom half? */
258 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
260 high = -1;
261 low |= (HOST_WIDE_INT)(-1) << prec;
265 /* If the value changed, return a new node. */
266 if (overflowed || overflowed_const
267 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
269 t = build_int_cst_wide (TREE_TYPE (t), low, high);
271 if (overflowed
272 || overflowable < 0
273 || (overflowable > 0 && sign_extended_type))
275 t = copy_node (t);
276 TREE_OVERFLOW (t) = 1;
277 TREE_CONSTANT_OVERFLOW (t) = 1;
279 else if (overflowed_const)
281 t = copy_node (t);
282 TREE_CONSTANT_OVERFLOW (t) = 1;
286 return t;
289 /* Add two doubleword integers with doubleword result.
290 Each argument is given as two `HOST_WIDE_INT' pieces.
291 One argument is L1 and H1; the other, L2 and H2.
292 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
295 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
296 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
297 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
299 unsigned HOST_WIDE_INT l;
300 HOST_WIDE_INT h;
302 l = l1 + l2;
303 h = h1 + h2 + (l < l1);
305 *lv = l;
306 *hv = h;
307 return OVERFLOW_SUM_SIGN (h1, h2, h);
310 /* Negate a doubleword integer with doubleword result.
311 Return nonzero if the operation overflows, assuming it's signed.
312 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
313 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
316 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
317 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
319 if (l1 == 0)
321 *lv = 0;
322 *hv = - h1;
323 return (*hv & h1) < 0;
325 else
327 *lv = -l1;
328 *hv = ~h1;
329 return 0;
333 /* Multiply two doubleword integers with doubleword result.
334 Return nonzero if the operation overflows, assuming it's signed.
335 Each argument is given as two `HOST_WIDE_INT' pieces.
336 One argument is L1 and H1; the other, L2 and H2.
337 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
340 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
341 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
342 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
344 HOST_WIDE_INT arg1[4];
345 HOST_WIDE_INT arg2[4];
346 HOST_WIDE_INT prod[4 * 2];
347 unsigned HOST_WIDE_INT carry;
348 int i, j, k;
349 unsigned HOST_WIDE_INT toplow, neglow;
350 HOST_WIDE_INT tophigh, neghigh;
352 encode (arg1, l1, h1);
353 encode (arg2, l2, h2);
355 memset (prod, 0, sizeof prod);
357 for (i = 0; i < 4; i++)
359 carry = 0;
360 for (j = 0; j < 4; j++)
362 k = i + j;
363 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
364 carry += arg1[i] * arg2[j];
365 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
366 carry += prod[k];
367 prod[k] = LOWPART (carry);
368 carry = HIGHPART (carry);
370 prod[i + 4] = carry;
373 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
375 /* Check for overflow by calculating the top half of the answer in full;
376 it should agree with the low half's sign bit. */
377 decode (prod + 4, &toplow, &tophigh);
378 if (h1 < 0)
380 neg_double (l2, h2, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 if (h2 < 0)
385 neg_double (l1, h1, &neglow, &neghigh);
386 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
388 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
391 /* Shift the doubleword integer in L1, H1 left by COUNT places
392 keeping only PREC bits of result.
393 Shift right if COUNT is negative.
394 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
395 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
397 void
398 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
399 HOST_WIDE_INT count, unsigned int prec,
400 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
402 unsigned HOST_WIDE_INT signmask;
404 if (count < 0)
406 rshift_double (l1, h1, -count, prec, lv, hv, arith);
407 return;
410 if (SHIFT_COUNT_TRUNCATED)
411 count %= prec;
413 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
415 /* Shifting by the host word size is undefined according to the
416 ANSI standard, so we must handle this as a special case. */
417 *hv = 0;
418 *lv = 0;
420 else if (count >= HOST_BITS_PER_WIDE_INT)
422 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
423 *lv = 0;
425 else
427 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
428 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
429 *lv = l1 << count;
432 /* Sign extend all bits that are beyond the precision. */
434 signmask = -((prec > HOST_BITS_PER_WIDE_INT
435 ? ((unsigned HOST_WIDE_INT) *hv
436 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
437 : (*lv >> (prec - 1))) & 1);
439 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
441 else if (prec >= HOST_BITS_PER_WIDE_INT)
443 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
444 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
446 else
448 *hv = signmask;
449 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
450 *lv |= signmask << prec;
454 /* Shift the doubleword integer in L1, H1 right by COUNT places
455 keeping only PREC bits of result. COUNT must be positive.
456 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
457 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
459 void
460 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
461 HOST_WIDE_INT count, unsigned int prec,
462 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
463 int arith)
465 unsigned HOST_WIDE_INT signmask;
467 signmask = (arith
468 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
469 : 0);
471 if (SHIFT_COUNT_TRUNCATED)
472 count %= prec;
474 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
476 /* Shifting by the host word size is undefined according to the
477 ANSI standard, so we must handle this as a special case. */
478 *hv = 0;
479 *lv = 0;
481 else if (count >= HOST_BITS_PER_WIDE_INT)
483 *hv = 0;
484 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
486 else
488 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
489 *lv = ((l1 >> count)
490 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
493 /* Zero / sign extend all bits that are beyond the precision. */
495 if (count >= (HOST_WIDE_INT)prec)
497 *hv = signmask;
498 *lv = signmask;
500 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
502 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
504 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
505 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
507 else
509 *hv = signmask;
510 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
511 *lv |= signmask << (prec - count);
515 /* Rotate the doubleword integer in L1, H1 left by COUNT places
516 keeping only PREC bits of result.
517 Rotate right if COUNT is negative.
518 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
520 void
521 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
522 HOST_WIDE_INT count, unsigned int prec,
523 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
525 unsigned HOST_WIDE_INT s1l, s2l;
526 HOST_WIDE_INT s1h, s2h;
528 count %= prec;
529 if (count < 0)
530 count += prec;
532 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
533 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
534 *lv = s1l | s2l;
535 *hv = s1h | s2h;
538 /* Rotate the doubleword integer in L1, H1 left by COUNT places
539 keeping only PREC bits of result. COUNT must be positive.
540 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
542 void
543 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
544 HOST_WIDE_INT count, unsigned int prec,
545 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
547 unsigned HOST_WIDE_INT s1l, s2l;
548 HOST_WIDE_INT s1h, s2h;
550 count %= prec;
551 if (count < 0)
552 count += prec;
554 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
555 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
556 *lv = s1l | s2l;
557 *hv = s1h | s2h;
560 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
561 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
562 CODE is a tree code for a kind of division, one of
563 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
564 or EXACT_DIV_EXPR
565 It controls how the quotient is rounded to an integer.
566 Return nonzero if the operation overflows.
567 UNS nonzero says do unsigned division. */
570 div_and_round_double (enum tree_code code, int uns,
571 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
572 HOST_WIDE_INT hnum_orig,
573 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
574 HOST_WIDE_INT hden_orig,
575 unsigned HOST_WIDE_INT *lquo,
576 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
577 HOST_WIDE_INT *hrem)
579 int quo_neg = 0;
580 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
581 HOST_WIDE_INT den[4], quo[4];
582 int i, j;
583 unsigned HOST_WIDE_INT work;
584 unsigned HOST_WIDE_INT carry = 0;
585 unsigned HOST_WIDE_INT lnum = lnum_orig;
586 HOST_WIDE_INT hnum = hnum_orig;
587 unsigned HOST_WIDE_INT lden = lden_orig;
588 HOST_WIDE_INT hden = hden_orig;
589 int overflow = 0;
591 if (hden == 0 && lden == 0)
592 overflow = 1, lden = 1;
594 /* Calculate quotient sign and convert operands to unsigned. */
595 if (!uns)
597 if (hnum < 0)
599 quo_neg = ~ quo_neg;
600 /* (minimum integer) / (-1) is the only overflow case. */
601 if (neg_double (lnum, hnum, &lnum, &hnum)
602 && ((HOST_WIDE_INT) lden & hden) == -1)
603 overflow = 1;
605 if (hden < 0)
607 quo_neg = ~ quo_neg;
608 neg_double (lden, hden, &lden, &hden);
612 if (hnum == 0 && hden == 0)
613 { /* single precision */
614 *hquo = *hrem = 0;
615 /* This unsigned division rounds toward zero. */
616 *lquo = lnum / lden;
617 goto finish_up;
620 if (hnum == 0)
621 { /* trivial case: dividend < divisor */
622 /* hden != 0 already checked. */
623 *hquo = *lquo = 0;
624 *hrem = hnum;
625 *lrem = lnum;
626 goto finish_up;
629 memset (quo, 0, sizeof quo);
631 memset (num, 0, sizeof num); /* to zero 9th element */
632 memset (den, 0, sizeof den);
634 encode (num, lnum, hnum);
635 encode (den, lden, hden);
637 /* Special code for when the divisor < BASE. */
638 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
640 /* hnum != 0 already checked. */
641 for (i = 4 - 1; i >= 0; i--)
643 work = num[i] + carry * BASE;
644 quo[i] = work / lden;
645 carry = work % lden;
648 else
650 /* Full double precision division,
651 with thanks to Don Knuth's "Seminumerical Algorithms". */
652 int num_hi_sig, den_hi_sig;
653 unsigned HOST_WIDE_INT quo_est, scale;
655 /* Find the highest nonzero divisor digit. */
656 for (i = 4 - 1;; i--)
657 if (den[i] != 0)
659 den_hi_sig = i;
660 break;
663 /* Insure that the first digit of the divisor is at least BASE/2.
664 This is required by the quotient digit estimation algorithm. */
666 scale = BASE / (den[den_hi_sig] + 1);
667 if (scale > 1)
668 { /* scale divisor and dividend */
669 carry = 0;
670 for (i = 0; i <= 4 - 1; i++)
672 work = (num[i] * scale) + carry;
673 num[i] = LOWPART (work);
674 carry = HIGHPART (work);
677 num[4] = carry;
678 carry = 0;
679 for (i = 0; i <= 4 - 1; i++)
681 work = (den[i] * scale) + carry;
682 den[i] = LOWPART (work);
683 carry = HIGHPART (work);
684 if (den[i] != 0) den_hi_sig = i;
688 num_hi_sig = 4;
690 /* Main loop */
691 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
693 /* Guess the next quotient digit, quo_est, by dividing the first
694 two remaining dividend digits by the high order quotient digit.
695 quo_est is never low and is at most 2 high. */
696 unsigned HOST_WIDE_INT tmp;
698 num_hi_sig = i + den_hi_sig + 1;
699 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
700 if (num[num_hi_sig] != den[den_hi_sig])
701 quo_est = work / den[den_hi_sig];
702 else
703 quo_est = BASE - 1;
705 /* Refine quo_est so it's usually correct, and at most one high. */
706 tmp = work - quo_est * den[den_hi_sig];
707 if (tmp < BASE
708 && (den[den_hi_sig - 1] * quo_est
709 > (tmp * BASE + num[num_hi_sig - 2])))
710 quo_est--;
712 /* Try QUO_EST as the quotient digit, by multiplying the
713 divisor by QUO_EST and subtracting from the remaining dividend.
714 Keep in mind that QUO_EST is the I - 1st digit. */
716 carry = 0;
717 for (j = 0; j <= den_hi_sig; j++)
719 work = quo_est * den[j] + carry;
720 carry = HIGHPART (work);
721 work = num[i + j] - LOWPART (work);
722 num[i + j] = LOWPART (work);
723 carry += HIGHPART (work) != 0;
726 /* If quo_est was high by one, then num[i] went negative and
727 we need to correct things. */
728 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
730 quo_est--;
731 carry = 0; /* add divisor back in */
732 for (j = 0; j <= den_hi_sig; j++)
734 work = num[i + j] + den[j] + carry;
735 carry = HIGHPART (work);
736 num[i + j] = LOWPART (work);
739 num [num_hi_sig] += carry;
742 /* Store the quotient digit. */
743 quo[i] = quo_est;
747 decode (quo, lquo, hquo);
749 finish_up:
750 /* If result is negative, make it so. */
751 if (quo_neg)
752 neg_double (*lquo, *hquo, lquo, hquo);
754 /* Compute trial remainder: rem = num - (quo * den) */
755 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
756 neg_double (*lrem, *hrem, lrem, hrem);
757 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
759 switch (code)
761 case TRUNC_DIV_EXPR:
762 case TRUNC_MOD_EXPR: /* round toward zero */
763 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
764 return overflow;
766 case FLOOR_DIV_EXPR:
767 case FLOOR_MOD_EXPR: /* round toward negative infinity */
768 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
770 /* quo = quo - 1; */
771 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
772 lquo, hquo);
774 else
775 return overflow;
776 break;
778 case CEIL_DIV_EXPR:
779 case CEIL_MOD_EXPR: /* round toward positive infinity */
780 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
782 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
783 lquo, hquo);
785 else
786 return overflow;
787 break;
789 case ROUND_DIV_EXPR:
790 case ROUND_MOD_EXPR: /* round to closest integer */
792 unsigned HOST_WIDE_INT labs_rem = *lrem;
793 HOST_WIDE_INT habs_rem = *hrem;
794 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
795 HOST_WIDE_INT habs_den = hden, htwice;
797 /* Get absolute values. */
798 if (*hrem < 0)
799 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
800 if (hden < 0)
801 neg_double (lden, hden, &labs_den, &habs_den);
803 /* If (2 * abs (lrem) >= abs (lden)) */
804 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
805 labs_rem, habs_rem, &ltwice, &htwice);
807 if (((unsigned HOST_WIDE_INT) habs_den
808 < (unsigned HOST_WIDE_INT) htwice)
809 || (((unsigned HOST_WIDE_INT) habs_den
810 == (unsigned HOST_WIDE_INT) htwice)
811 && (labs_den < ltwice)))
813 if (*hquo < 0)
814 /* quo = quo - 1; */
815 add_double (*lquo, *hquo,
816 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
817 else
818 /* quo = quo + 1; */
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
820 lquo, hquo);
822 else
823 return overflow;
825 break;
827 default:
828 gcc_unreachable ();
831 /* Compute true remainder: rem = num - (quo * den) */
832 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
833 neg_double (*lrem, *hrem, lrem, hrem);
834 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
835 return overflow;
838 /* If ARG2 divides ARG1 with zero remainder, carries out the division
839 of type CODE and returns the quotient.
840 Otherwise returns NULL_TREE. */
842 static tree
843 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
845 unsigned HOST_WIDE_INT int1l, int2l;
846 HOST_WIDE_INT int1h, int2h;
847 unsigned HOST_WIDE_INT quol, reml;
848 HOST_WIDE_INT quoh, remh;
849 tree type = TREE_TYPE (arg1);
850 int uns = TYPE_UNSIGNED (type);
852 int1l = TREE_INT_CST_LOW (arg1);
853 int1h = TREE_INT_CST_HIGH (arg1);
854 int2l = TREE_INT_CST_LOW (arg2);
855 int2h = TREE_INT_CST_HIGH (arg2);
857 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
858 &quol, &quoh, &reml, &remh);
859 if (remh != 0 || reml != 0)
860 return NULL_TREE;
862 return build_int_cst_wide (type, quol, quoh);
865 /* Return true if the built-in mathematical function specified by CODE
866 is odd, i.e. -f(x) == f(-x). */
868 static bool
869 negate_mathfn_p (enum built_in_function code)
871 switch (code)
873 CASE_FLT_FN (BUILT_IN_ASIN):
874 CASE_FLT_FN (BUILT_IN_ASINH):
875 CASE_FLT_FN (BUILT_IN_ATAN):
876 CASE_FLT_FN (BUILT_IN_ATANH):
877 CASE_FLT_FN (BUILT_IN_CBRT):
878 CASE_FLT_FN (BUILT_IN_SIN):
879 CASE_FLT_FN (BUILT_IN_SINH):
880 CASE_FLT_FN (BUILT_IN_TAN):
881 CASE_FLT_FN (BUILT_IN_TANH):
882 return true;
884 default:
885 break;
887 return false;
890 /* Check whether we may negate an integer constant T without causing
891 overflow. */
893 bool
894 may_negate_without_overflow_p (tree t)
896 unsigned HOST_WIDE_INT val;
897 unsigned int prec;
898 tree type;
900 gcc_assert (TREE_CODE (t) == INTEGER_CST);
902 type = TREE_TYPE (t);
903 if (TYPE_UNSIGNED (type))
904 return false;
906 prec = TYPE_PRECISION (type);
907 if (prec > HOST_BITS_PER_WIDE_INT)
909 if (TREE_INT_CST_LOW (t) != 0)
910 return true;
911 prec -= HOST_BITS_PER_WIDE_INT;
912 val = TREE_INT_CST_HIGH (t);
914 else
915 val = TREE_INT_CST_LOW (t);
916 if (prec < HOST_BITS_PER_WIDE_INT)
917 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
918 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
921 /* Determine whether an expression T can be cheaply negated using
922 the function negate_expr. */
924 static bool
925 negate_expr_p (tree t)
927 tree type;
929 if (t == 0)
930 return false;
932 type = TREE_TYPE (t);
934 STRIP_SIGN_NOPS (t);
935 switch (TREE_CODE (t))
937 case INTEGER_CST:
938 if (TYPE_UNSIGNED (type) || ! flag_trapv)
939 return true;
941 /* Check that -CST will not overflow type. */
942 return may_negate_without_overflow_p (t);
943 case BIT_NOT_EXPR:
944 return INTEGRAL_TYPE_P (type);
946 case REAL_CST:
947 case NEGATE_EXPR:
948 return true;
950 case COMPLEX_CST:
951 return negate_expr_p (TREE_REALPART (t))
952 && negate_expr_p (TREE_IMAGPART (t));
954 case PLUS_EXPR:
955 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
956 return false;
957 /* -(A + B) -> (-B) - A. */
958 if (negate_expr_p (TREE_OPERAND (t, 1))
959 && reorder_operands_p (TREE_OPERAND (t, 0),
960 TREE_OPERAND (t, 1)))
961 return true;
962 /* -(A + B) -> (-A) - B. */
963 return negate_expr_p (TREE_OPERAND (t, 0));
965 case MINUS_EXPR:
966 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
967 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
968 && reorder_operands_p (TREE_OPERAND (t, 0),
969 TREE_OPERAND (t, 1));
971 case MULT_EXPR:
972 if (TYPE_UNSIGNED (TREE_TYPE (t)))
973 break;
975 /* Fall through. */
977 case RDIV_EXPR:
978 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
979 return negate_expr_p (TREE_OPERAND (t, 1))
980 || negate_expr_p (TREE_OPERAND (t, 0));
981 break;
983 case TRUNC_DIV_EXPR:
984 case ROUND_DIV_EXPR:
985 case FLOOR_DIV_EXPR:
986 case CEIL_DIV_EXPR:
987 case EXACT_DIV_EXPR:
988 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
989 break;
990 return negate_expr_p (TREE_OPERAND (t, 1))
991 || negate_expr_p (TREE_OPERAND (t, 0));
993 case NOP_EXPR:
994 /* Negate -((double)float) as (double)(-float). */
995 if (TREE_CODE (type) == REAL_TYPE)
997 tree tem = strip_float_extensions (t);
998 if (tem != t)
999 return negate_expr_p (tem);
1001 break;
1003 case CALL_EXPR:
1004 /* Negate -f(x) as f(-x). */
1005 if (negate_mathfn_p (builtin_mathfn_code (t)))
1006 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1007 break;
1009 case RSHIFT_EXPR:
1010 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1011 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1013 tree op1 = TREE_OPERAND (t, 1);
1014 if (TREE_INT_CST_HIGH (op1) == 0
1015 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1016 == TREE_INT_CST_LOW (op1))
1017 return true;
1019 break;
1021 default:
1022 break;
1024 return false;
1027 /* Given T, an expression, return the negation of T. Allow for T to be
1028 null, in which case return null. */
1030 static tree
1031 negate_expr (tree t)
1033 tree type;
1034 tree tem;
1036 if (t == 0)
1037 return 0;
1039 type = TREE_TYPE (t);
1040 STRIP_SIGN_NOPS (t);
1042 switch (TREE_CODE (t))
1044 /* Convert - (~A) to A + 1. */
1045 case BIT_NOT_EXPR:
1046 if (INTEGRAL_TYPE_P (type))
1047 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1048 build_int_cst (type, 1));
1049 break;
1051 case INTEGER_CST:
1052 tem = fold_negate_const (t, type);
1053 if (! TREE_OVERFLOW (tem)
1054 || TYPE_UNSIGNED (type)
1055 || ! flag_trapv)
1056 return tem;
1057 break;
1059 case REAL_CST:
1060 tem = fold_negate_const (t, type);
1061 /* Two's complement FP formats, such as c4x, may overflow. */
1062 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1063 return fold_convert (type, tem);
1064 break;
1066 case COMPLEX_CST:
1068 tree rpart = negate_expr (TREE_REALPART (t));
1069 tree ipart = negate_expr (TREE_IMAGPART (t));
1071 if ((TREE_CODE (rpart) == REAL_CST
1072 && TREE_CODE (ipart) == REAL_CST)
1073 || (TREE_CODE (rpart) == INTEGER_CST
1074 && TREE_CODE (ipart) == INTEGER_CST))
1075 return build_complex (type, rpart, ipart);
1077 break;
1079 case NEGATE_EXPR:
1080 return fold_convert (type, TREE_OPERAND (t, 0));
1082 case PLUS_EXPR:
1083 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1085 /* -(A + B) -> (-B) - A. */
1086 if (negate_expr_p (TREE_OPERAND (t, 1))
1087 && reorder_operands_p (TREE_OPERAND (t, 0),
1088 TREE_OPERAND (t, 1)))
1090 tem = negate_expr (TREE_OPERAND (t, 1));
1091 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1092 tem, TREE_OPERAND (t, 0));
1093 return fold_convert (type, tem);
1096 /* -(A + B) -> (-A) - B. */
1097 if (negate_expr_p (TREE_OPERAND (t, 0)))
1099 tem = negate_expr (TREE_OPERAND (t, 0));
1100 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1101 tem, TREE_OPERAND (t, 1));
1102 return fold_convert (type, tem);
1105 break;
1107 case MINUS_EXPR:
1108 /* - (A - B) -> B - A */
1109 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1110 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1111 return fold_convert (type,
1112 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1113 TREE_OPERAND (t, 1),
1114 TREE_OPERAND (t, 0)));
1115 break;
1117 case MULT_EXPR:
1118 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1119 break;
1121 /* Fall through. */
1123 case RDIV_EXPR:
1124 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1126 tem = TREE_OPERAND (t, 1);
1127 if (negate_expr_p (tem))
1128 return fold_convert (type,
1129 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1130 TREE_OPERAND (t, 0),
1131 negate_expr (tem)));
1132 tem = TREE_OPERAND (t, 0);
1133 if (negate_expr_p (tem))
1134 return fold_convert (type,
1135 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1136 negate_expr (tem),
1137 TREE_OPERAND (t, 1)));
1139 break;
1141 case TRUNC_DIV_EXPR:
1142 case ROUND_DIV_EXPR:
1143 case FLOOR_DIV_EXPR:
1144 case CEIL_DIV_EXPR:
1145 case EXACT_DIV_EXPR:
1146 if (!TYPE_UNSIGNED (TREE_TYPE (t)) && !flag_wrapv)
1148 tem = TREE_OPERAND (t, 1);
1149 if (negate_expr_p (tem))
1150 return fold_convert (type,
1151 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1152 TREE_OPERAND (t, 0),
1153 negate_expr (tem)));
1154 tem = TREE_OPERAND (t, 0);
1155 if (negate_expr_p (tem))
1156 return fold_convert (type,
1157 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1158 negate_expr (tem),
1159 TREE_OPERAND (t, 1)));
1161 break;
1163 case NOP_EXPR:
1164 /* Convert -((double)float) into (double)(-float). */
1165 if (TREE_CODE (type) == REAL_TYPE)
1167 tem = strip_float_extensions (t);
1168 if (tem != t && negate_expr_p (tem))
1169 return fold_convert (type, negate_expr (tem));
1171 break;
1173 case CALL_EXPR:
1174 /* Negate -f(x) as f(-x). */
1175 if (negate_mathfn_p (builtin_mathfn_code (t))
1176 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1178 tree fndecl, arg, arglist;
1180 fndecl = get_callee_fndecl (t);
1181 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1182 arglist = build_tree_list (NULL_TREE, arg);
1183 return build_function_call_expr (fndecl, arglist);
1185 break;
1187 case RSHIFT_EXPR:
1188 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1189 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1191 tree op1 = TREE_OPERAND (t, 1);
1192 if (TREE_INT_CST_HIGH (op1) == 0
1193 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1194 == TREE_INT_CST_LOW (op1))
1196 tree ntype = TYPE_UNSIGNED (type)
1197 ? lang_hooks.types.signed_type (type)
1198 : lang_hooks.types.unsigned_type (type);
1199 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1200 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1201 return fold_convert (type, temp);
1204 break;
1206 default:
1207 break;
1210 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1211 return fold_convert (type, tem);
1214 /* Split a tree IN into a constant, literal and variable parts that could be
1215 combined with CODE to make IN. "constant" means an expression with
1216 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1217 commutative arithmetic operation. Store the constant part into *CONP,
1218 the literal in *LITP and return the variable part. If a part isn't
1219 present, set it to null. If the tree does not decompose in this way,
1220 return the entire tree as the variable part and the other parts as null.
1222 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1223 case, we negate an operand that was subtracted. Except if it is a
1224 literal for which we use *MINUS_LITP instead.
1226 If NEGATE_P is true, we are negating all of IN, again except a literal
1227 for which we use *MINUS_LITP instead.
1229 If IN is itself a literal or constant, return it as appropriate.
1231 Note that we do not guarantee that any of the three values will be the
1232 same type as IN, but they will have the same signedness and mode. */
1234 static tree
1235 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1236 tree *minus_litp, int negate_p)
1238 tree var = 0;
1240 *conp = 0;
1241 *litp = 0;
1242 *minus_litp = 0;
1244 /* Strip any conversions that don't change the machine mode or signedness. */
1245 STRIP_SIGN_NOPS (in);
1247 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1248 *litp = in;
1249 else if (TREE_CODE (in) == code
1250 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1251 /* We can associate addition and subtraction together (even
1252 though the C standard doesn't say so) for integers because
1253 the value is not affected. For reals, the value might be
1254 affected, so we can't. */
1255 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1256 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1258 tree op0 = TREE_OPERAND (in, 0);
1259 tree op1 = TREE_OPERAND (in, 1);
1260 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1261 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1263 /* First see if either of the operands is a literal, then a constant. */
1264 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1265 *litp = op0, op0 = 0;
1266 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1267 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1269 if (op0 != 0 && TREE_CONSTANT (op0))
1270 *conp = op0, op0 = 0;
1271 else if (op1 != 0 && TREE_CONSTANT (op1))
1272 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1274 /* If we haven't dealt with either operand, this is not a case we can
1275 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1276 if (op0 != 0 && op1 != 0)
1277 var = in;
1278 else if (op0 != 0)
1279 var = op0;
1280 else
1281 var = op1, neg_var_p = neg1_p;
1283 /* Now do any needed negations. */
1284 if (neg_litp_p)
1285 *minus_litp = *litp, *litp = 0;
1286 if (neg_conp_p)
1287 *conp = negate_expr (*conp);
1288 if (neg_var_p)
1289 var = negate_expr (var);
1291 else if (TREE_CONSTANT (in))
1292 *conp = in;
1293 else
1294 var = in;
1296 if (negate_p)
1298 if (*litp)
1299 *minus_litp = *litp, *litp = 0;
1300 else if (*minus_litp)
1301 *litp = *minus_litp, *minus_litp = 0;
1302 *conp = negate_expr (*conp);
1303 var = negate_expr (var);
1306 return var;
1309 /* Re-associate trees split by the above function. T1 and T2 are either
1310 expressions to associate or null. Return the new expression, if any. If
1311 we build an operation, do it in TYPE and with CODE. */
1313 static tree
1314 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1316 if (t1 == 0)
1317 return t2;
1318 else if (t2 == 0)
1319 return t1;
1321 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1322 try to fold this since we will have infinite recursion. But do
1323 deal with any NEGATE_EXPRs. */
1324 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1325 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1327 if (code == PLUS_EXPR)
1329 if (TREE_CODE (t1) == NEGATE_EXPR)
1330 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1331 fold_convert (type, TREE_OPERAND (t1, 0)));
1332 else if (TREE_CODE (t2) == NEGATE_EXPR)
1333 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1334 fold_convert (type, TREE_OPERAND (t2, 0)));
1335 else if (integer_zerop (t2))
1336 return fold_convert (type, t1);
1338 else if (code == MINUS_EXPR)
1340 if (integer_zerop (t2))
1341 return fold_convert (type, t1);
1344 return build2 (code, type, fold_convert (type, t1),
1345 fold_convert (type, t2));
1348 return fold_build2 (code, type, fold_convert (type, t1),
1349 fold_convert (type, t2));
1352 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1353 to produce a new constant. Return NULL_TREE if we don't know how
1354 to evaluate CODE at compile-time.
1356 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1358 tree
1359 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1361 unsigned HOST_WIDE_INT int1l, int2l;
1362 HOST_WIDE_INT int1h, int2h;
1363 unsigned HOST_WIDE_INT low;
1364 HOST_WIDE_INT hi;
1365 unsigned HOST_WIDE_INT garbagel;
1366 HOST_WIDE_INT garbageh;
1367 tree t;
1368 tree type = TREE_TYPE (arg1);
1369 int uns = TYPE_UNSIGNED (type);
1370 int is_sizetype
1371 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1372 int overflow = 0;
1374 int1l = TREE_INT_CST_LOW (arg1);
1375 int1h = TREE_INT_CST_HIGH (arg1);
1376 int2l = TREE_INT_CST_LOW (arg2);
1377 int2h = TREE_INT_CST_HIGH (arg2);
1379 switch (code)
1381 case BIT_IOR_EXPR:
1382 low = int1l | int2l, hi = int1h | int2h;
1383 break;
1385 case BIT_XOR_EXPR:
1386 low = int1l ^ int2l, hi = int1h ^ int2h;
1387 break;
1389 case BIT_AND_EXPR:
1390 low = int1l & int2l, hi = int1h & int2h;
1391 break;
1393 case RSHIFT_EXPR:
1394 int2l = -int2l;
1395 case LSHIFT_EXPR:
1396 /* It's unclear from the C standard whether shifts can overflow.
1397 The following code ignores overflow; perhaps a C standard
1398 interpretation ruling is needed. */
1399 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1400 &low, &hi, !uns);
1401 break;
1403 case RROTATE_EXPR:
1404 int2l = - int2l;
1405 case LROTATE_EXPR:
1406 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1407 &low, &hi);
1408 break;
1410 case PLUS_EXPR:
1411 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1412 break;
1414 case MINUS_EXPR:
1415 neg_double (int2l, int2h, &low, &hi);
1416 add_double (int1l, int1h, low, hi, &low, &hi);
1417 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1418 break;
1420 case MULT_EXPR:
1421 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1422 break;
1424 case TRUNC_DIV_EXPR:
1425 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1426 case EXACT_DIV_EXPR:
1427 /* This is a shortcut for a common special case. */
1428 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1429 && ! TREE_CONSTANT_OVERFLOW (arg1)
1430 && ! TREE_CONSTANT_OVERFLOW (arg2)
1431 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1433 if (code == CEIL_DIV_EXPR)
1434 int1l += int2l - 1;
1436 low = int1l / int2l, hi = 0;
1437 break;
1440 /* ... fall through ... */
1442 case ROUND_DIV_EXPR:
1443 if (int2h == 0 && int2l == 0)
1444 return NULL_TREE;
1445 if (int2h == 0 && int2l == 1)
1447 low = int1l, hi = int1h;
1448 break;
1450 if (int1l == int2l && int1h == int2h
1451 && ! (int1l == 0 && int1h == 0))
1453 low = 1, hi = 0;
1454 break;
1456 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1457 &low, &hi, &garbagel, &garbageh);
1458 break;
1460 case TRUNC_MOD_EXPR:
1461 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1462 /* This is a shortcut for a common special case. */
1463 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1464 && ! TREE_CONSTANT_OVERFLOW (arg1)
1465 && ! TREE_CONSTANT_OVERFLOW (arg2)
1466 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1468 if (code == CEIL_MOD_EXPR)
1469 int1l += int2l - 1;
1470 low = int1l % int2l, hi = 0;
1471 break;
1474 /* ... fall through ... */
1476 case ROUND_MOD_EXPR:
1477 if (int2h == 0 && int2l == 0)
1478 return NULL_TREE;
1479 overflow = div_and_round_double (code, uns,
1480 int1l, int1h, int2l, int2h,
1481 &garbagel, &garbageh, &low, &hi);
1482 break;
1484 case MIN_EXPR:
1485 case MAX_EXPR:
1486 if (uns)
1487 low = (((unsigned HOST_WIDE_INT) int1h
1488 < (unsigned HOST_WIDE_INT) int2h)
1489 || (((unsigned HOST_WIDE_INT) int1h
1490 == (unsigned HOST_WIDE_INT) int2h)
1491 && int1l < int2l));
1492 else
1493 low = (int1h < int2h
1494 || (int1h == int2h && int1l < int2l));
1496 if (low == (code == MIN_EXPR))
1497 low = int1l, hi = int1h;
1498 else
1499 low = int2l, hi = int2h;
1500 break;
1502 default:
1503 return NULL_TREE;
1506 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1508 if (notrunc)
1510 /* Propagate overflow flags ourselves. */
1511 if (((!uns || is_sizetype) && overflow)
1512 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1514 t = copy_node (t);
1515 TREE_OVERFLOW (t) = 1;
1516 TREE_CONSTANT_OVERFLOW (t) = 1;
1518 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1520 t = copy_node (t);
1521 TREE_CONSTANT_OVERFLOW (t) = 1;
1524 else
1525 t = force_fit_type (t, 1,
1526 ((!uns || is_sizetype) && overflow)
1527 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1528 TREE_CONSTANT_OVERFLOW (arg1)
1529 | TREE_CONSTANT_OVERFLOW (arg2));
1531 return t;
1534 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1535 constant. We assume ARG1 and ARG2 have the same data type, or at least
1536 are the same kind of constant and the same machine mode.
1538 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1540 static tree
1541 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1543 STRIP_NOPS (arg1);
1544 STRIP_NOPS (arg2);
1546 if (TREE_CODE (arg1) == INTEGER_CST)
1547 return int_const_binop (code, arg1, arg2, notrunc);
1549 if (TREE_CODE (arg1) == REAL_CST)
1551 enum machine_mode mode;
1552 REAL_VALUE_TYPE d1;
1553 REAL_VALUE_TYPE d2;
1554 REAL_VALUE_TYPE value;
1555 REAL_VALUE_TYPE result;
1556 bool inexact;
1557 tree t, type;
1559 /* The following codes are handled by real_arithmetic. */
1560 switch (code)
1562 case PLUS_EXPR:
1563 case MINUS_EXPR:
1564 case MULT_EXPR:
1565 case RDIV_EXPR:
1566 case MIN_EXPR:
1567 case MAX_EXPR:
1568 break;
1570 default:
1571 return NULL_TREE;
1574 d1 = TREE_REAL_CST (arg1);
1575 d2 = TREE_REAL_CST (arg2);
1577 type = TREE_TYPE (arg1);
1578 mode = TYPE_MODE (type);
1580 /* Don't perform operation if we honor signaling NaNs and
1581 either operand is a NaN. */
1582 if (HONOR_SNANS (mode)
1583 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1584 return NULL_TREE;
1586 /* Don't perform operation if it would raise a division
1587 by zero exception. */
1588 if (code == RDIV_EXPR
1589 && REAL_VALUES_EQUAL (d2, dconst0)
1590 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1591 return NULL_TREE;
1593 /* If either operand is a NaN, just return it. Otherwise, set up
1594 for floating-point trap; we return an overflow. */
1595 if (REAL_VALUE_ISNAN (d1))
1596 return arg1;
1597 else if (REAL_VALUE_ISNAN (d2))
1598 return arg2;
1600 inexact = real_arithmetic (&value, code, &d1, &d2);
1601 real_convert (&result, mode, &value);
1603 /* Don't constant fold this floating point operation if
1604 the result has overflowed and flag_trapping_math. */
1606 if (flag_trapping_math
1607 && MODE_HAS_INFINITIES (mode)
1608 && REAL_VALUE_ISINF (result)
1609 && !REAL_VALUE_ISINF (d1)
1610 && !REAL_VALUE_ISINF (d2))
1611 return NULL_TREE;
1613 /* Don't constant fold this floating point operation if the
1614 result may dependent upon the run-time rounding mode and
1615 flag_rounding_math is set, or if GCC's software emulation
1616 is unable to accurately represent the result. */
1618 if ((flag_rounding_math
1619 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1620 && !flag_unsafe_math_optimizations))
1621 && (inexact || !real_identical (&result, &value)))
1622 return NULL_TREE;
1624 t = build_real (type, result);
1626 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1627 TREE_CONSTANT_OVERFLOW (t)
1628 = TREE_OVERFLOW (t)
1629 | TREE_CONSTANT_OVERFLOW (arg1)
1630 | TREE_CONSTANT_OVERFLOW (arg2);
1631 return t;
1634 if (TREE_CODE (arg1) == COMPLEX_CST)
1636 tree type = TREE_TYPE (arg1);
1637 tree r1 = TREE_REALPART (arg1);
1638 tree i1 = TREE_IMAGPART (arg1);
1639 tree r2 = TREE_REALPART (arg2);
1640 tree i2 = TREE_IMAGPART (arg2);
1641 tree t;
1643 switch (code)
1645 case PLUS_EXPR:
1646 t = build_complex (type,
1647 const_binop (PLUS_EXPR, r1, r2, notrunc),
1648 const_binop (PLUS_EXPR, i1, i2, notrunc));
1649 break;
1651 case MINUS_EXPR:
1652 t = build_complex (type,
1653 const_binop (MINUS_EXPR, r1, r2, notrunc),
1654 const_binop (MINUS_EXPR, i1, i2, notrunc));
1655 break;
1657 case MULT_EXPR:
1658 t = build_complex (type,
1659 const_binop (MINUS_EXPR,
1660 const_binop (MULT_EXPR,
1661 r1, r2, notrunc),
1662 const_binop (MULT_EXPR,
1663 i1, i2, notrunc),
1664 notrunc),
1665 const_binop (PLUS_EXPR,
1666 const_binop (MULT_EXPR,
1667 r1, i2, notrunc),
1668 const_binop (MULT_EXPR,
1669 i1, r2, notrunc),
1670 notrunc));
1671 break;
1673 case RDIV_EXPR:
1675 tree t1, t2, real, imag;
1676 tree magsquared
1677 = const_binop (PLUS_EXPR,
1678 const_binop (MULT_EXPR, r2, r2, notrunc),
1679 const_binop (MULT_EXPR, i2, i2, notrunc),
1680 notrunc);
1682 t1 = const_binop (PLUS_EXPR,
1683 const_binop (MULT_EXPR, r1, r2, notrunc),
1684 const_binop (MULT_EXPR, i1, i2, notrunc),
1685 notrunc);
1686 t2 = const_binop (MINUS_EXPR,
1687 const_binop (MULT_EXPR, i1, r2, notrunc),
1688 const_binop (MULT_EXPR, r1, i2, notrunc),
1689 notrunc);
1691 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1693 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1694 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1696 else
1698 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1699 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1700 if (!real || !imag)
1701 return NULL_TREE;
1704 t = build_complex (type, real, imag);
1706 break;
1708 default:
1709 return NULL_TREE;
1711 return t;
1713 return NULL_TREE;
1716 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1717 indicates which particular sizetype to create. */
1719 tree
1720 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1722 return build_int_cst (sizetype_tab[(int) kind], number);
1725 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1726 is a tree code. The type of the result is taken from the operands.
1727 Both must be the same type integer type and it must be a size type.
1728 If the operands are constant, so is the result. */
1730 tree
1731 size_binop (enum tree_code code, tree arg0, tree arg1)
1733 tree type = TREE_TYPE (arg0);
1735 if (arg0 == error_mark_node || arg1 == error_mark_node)
1736 return error_mark_node;
1738 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1739 && type == TREE_TYPE (arg1));
1741 /* Handle the special case of two integer constants faster. */
1742 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1744 /* And some specific cases even faster than that. */
1745 if (code == PLUS_EXPR && integer_zerop (arg0))
1746 return arg1;
1747 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1748 && integer_zerop (arg1))
1749 return arg0;
1750 else if (code == MULT_EXPR && integer_onep (arg0))
1751 return arg1;
1753 /* Handle general case of two integer constants. */
1754 return int_const_binop (code, arg0, arg1, 0);
1757 return fold_build2 (code, type, arg0, arg1);
1760 /* Given two values, either both of sizetype or both of bitsizetype,
1761 compute the difference between the two values. Return the value
1762 in signed type corresponding to the type of the operands. */
1764 tree
1765 size_diffop (tree arg0, tree arg1)
1767 tree type = TREE_TYPE (arg0);
1768 tree ctype;
1770 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1771 && type == TREE_TYPE (arg1));
1773 /* If the type is already signed, just do the simple thing. */
1774 if (!TYPE_UNSIGNED (type))
1775 return size_binop (MINUS_EXPR, arg0, arg1);
1777 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1779 /* If either operand is not a constant, do the conversions to the signed
1780 type and subtract. The hardware will do the right thing with any
1781 overflow in the subtraction. */
1782 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1783 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1784 fold_convert (ctype, arg1));
1786 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1787 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1788 overflow) and negate (which can't either). Special-case a result
1789 of zero while we're here. */
1790 if (tree_int_cst_equal (arg0, arg1))
1791 return build_int_cst (ctype, 0);
1792 else if (tree_int_cst_lt (arg1, arg0))
1793 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1794 else
1795 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1796 fold_convert (ctype, size_binop (MINUS_EXPR,
1797 arg1, arg0)));
1800 /* A subroutine of fold_convert_const handling conversions of an
1801 INTEGER_CST to another integer type. */
1803 static tree
1804 fold_convert_const_int_from_int (tree type, tree arg1)
1806 tree t;
1808 /* Given an integer constant, make new constant with new type,
1809 appropriately sign-extended or truncated. */
1810 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1811 TREE_INT_CST_HIGH (arg1));
1813 t = force_fit_type (t,
1814 /* Don't set the overflow when
1815 converting a pointer */
1816 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1817 (TREE_INT_CST_HIGH (arg1) < 0
1818 && (TYPE_UNSIGNED (type)
1819 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1820 | TREE_OVERFLOW (arg1),
1821 TREE_CONSTANT_OVERFLOW (arg1));
1823 return t;
1826 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1827 to an integer type. */
1829 static tree
1830 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1832 int overflow = 0;
1833 tree t;
1835 /* The following code implements the floating point to integer
1836 conversion rules required by the Java Language Specification,
1837 that IEEE NaNs are mapped to zero and values that overflow
1838 the target precision saturate, i.e. values greater than
1839 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1840 are mapped to INT_MIN. These semantics are allowed by the
1841 C and C++ standards that simply state that the behavior of
1842 FP-to-integer conversion is unspecified upon overflow. */
1844 HOST_WIDE_INT high, low;
1845 REAL_VALUE_TYPE r;
1846 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1848 switch (code)
1850 case FIX_TRUNC_EXPR:
1851 real_trunc (&r, VOIDmode, &x);
1852 break;
1854 case FIX_CEIL_EXPR:
1855 real_ceil (&r, VOIDmode, &x);
1856 break;
1858 case FIX_FLOOR_EXPR:
1859 real_floor (&r, VOIDmode, &x);
1860 break;
1862 case FIX_ROUND_EXPR:
1863 real_round (&r, VOIDmode, &x);
1864 break;
1866 default:
1867 gcc_unreachable ();
1870 /* If R is NaN, return zero and show we have an overflow. */
1871 if (REAL_VALUE_ISNAN (r))
1873 overflow = 1;
1874 high = 0;
1875 low = 0;
1878 /* See if R is less than the lower bound or greater than the
1879 upper bound. */
1881 if (! overflow)
1883 tree lt = TYPE_MIN_VALUE (type);
1884 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1885 if (REAL_VALUES_LESS (r, l))
1887 overflow = 1;
1888 high = TREE_INT_CST_HIGH (lt);
1889 low = TREE_INT_CST_LOW (lt);
1893 if (! overflow)
1895 tree ut = TYPE_MAX_VALUE (type);
1896 if (ut)
1898 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1899 if (REAL_VALUES_LESS (u, r))
1901 overflow = 1;
1902 high = TREE_INT_CST_HIGH (ut);
1903 low = TREE_INT_CST_LOW (ut);
1908 if (! overflow)
1909 REAL_VALUE_TO_INT (&low, &high, r);
1911 t = build_int_cst_wide (type, low, high);
1913 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1914 TREE_CONSTANT_OVERFLOW (arg1));
1915 return t;
1918 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1919 to another floating point type. */
1921 static tree
1922 fold_convert_const_real_from_real (tree type, tree arg1)
1924 REAL_VALUE_TYPE value;
1925 tree t;
1927 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1928 t = build_real (type, value);
1930 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1931 TREE_CONSTANT_OVERFLOW (t)
1932 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1933 return t;
1936 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1937 type TYPE. If no simplification can be done return NULL_TREE. */
1939 static tree
1940 fold_convert_const (enum tree_code code, tree type, tree arg1)
1942 if (TREE_TYPE (arg1) == type)
1943 return arg1;
1945 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1947 if (TREE_CODE (arg1) == INTEGER_CST)
1948 return fold_convert_const_int_from_int (type, arg1);
1949 else if (TREE_CODE (arg1) == REAL_CST)
1950 return fold_convert_const_int_from_real (code, type, arg1);
1952 else if (TREE_CODE (type) == REAL_TYPE)
1954 if (TREE_CODE (arg1) == INTEGER_CST)
1955 return build_real_from_int_cst (type, arg1);
1956 if (TREE_CODE (arg1) == REAL_CST)
1957 return fold_convert_const_real_from_real (type, arg1);
1959 return NULL_TREE;
1962 /* Construct a vector of zero elements of vector type TYPE. */
1964 static tree
1965 build_zero_vector (tree type)
1967 tree elem, list;
1968 int i, units;
1970 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1971 units = TYPE_VECTOR_SUBPARTS (type);
1973 list = NULL_TREE;
1974 for (i = 0; i < units; i++)
1975 list = tree_cons (NULL_TREE, elem, list);
1976 return build_vector (type, list);
1979 /* Convert expression ARG to type TYPE. Used by the middle-end for
1980 simple conversions in preference to calling the front-end's convert. */
1982 tree
1983 fold_convert (tree type, tree arg)
1985 tree orig = TREE_TYPE (arg);
1986 tree tem;
1988 if (type == orig)
1989 return arg;
1991 if (TREE_CODE (arg) == ERROR_MARK
1992 || TREE_CODE (type) == ERROR_MARK
1993 || TREE_CODE (orig) == ERROR_MARK)
1994 return error_mark_node;
1996 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1997 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1998 TYPE_MAIN_VARIANT (orig)))
1999 return fold_build1 (NOP_EXPR, type, arg);
2001 switch (TREE_CODE (type))
2003 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2004 case POINTER_TYPE: case REFERENCE_TYPE:
2005 case OFFSET_TYPE:
2006 if (TREE_CODE (arg) == INTEGER_CST)
2008 tem = fold_convert_const (NOP_EXPR, type, arg);
2009 if (tem != NULL_TREE)
2010 return tem;
2012 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2013 || TREE_CODE (orig) == OFFSET_TYPE)
2014 return fold_build1 (NOP_EXPR, type, arg);
2015 if (TREE_CODE (orig) == COMPLEX_TYPE)
2017 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2018 return fold_convert (type, tem);
2020 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2021 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2022 return fold_build1 (NOP_EXPR, type, arg);
2024 case REAL_TYPE:
2025 if (TREE_CODE (arg) == INTEGER_CST)
2027 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2028 if (tem != NULL_TREE)
2029 return tem;
2031 else if (TREE_CODE (arg) == REAL_CST)
2033 tem = fold_convert_const (NOP_EXPR, type, arg);
2034 if (tem != NULL_TREE)
2035 return tem;
2038 switch (TREE_CODE (orig))
2040 case INTEGER_TYPE:
2041 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2042 case POINTER_TYPE: case REFERENCE_TYPE:
2043 return fold_build1 (FLOAT_EXPR, type, arg);
2045 case REAL_TYPE:
2046 return fold_build1 (NOP_EXPR, type, arg);
2048 case COMPLEX_TYPE:
2049 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2050 return fold_convert (type, tem);
2052 default:
2053 gcc_unreachable ();
2056 case COMPLEX_TYPE:
2057 switch (TREE_CODE (orig))
2059 case INTEGER_TYPE:
2060 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2061 case POINTER_TYPE: case REFERENCE_TYPE:
2062 case REAL_TYPE:
2063 return build2 (COMPLEX_EXPR, type,
2064 fold_convert (TREE_TYPE (type), arg),
2065 fold_convert (TREE_TYPE (type), integer_zero_node));
2066 case COMPLEX_TYPE:
2068 tree rpart, ipart;
2070 if (TREE_CODE (arg) == COMPLEX_EXPR)
2072 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2073 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2074 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2077 arg = save_expr (arg);
2078 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2079 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2080 rpart = fold_convert (TREE_TYPE (type), rpart);
2081 ipart = fold_convert (TREE_TYPE (type), ipart);
2082 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2085 default:
2086 gcc_unreachable ();
2089 case VECTOR_TYPE:
2090 if (integer_zerop (arg))
2091 return build_zero_vector (type);
2092 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2093 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2094 || TREE_CODE (orig) == VECTOR_TYPE);
2095 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2097 case VOID_TYPE:
2098 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2100 default:
2101 gcc_unreachable ();
2105 /* Return false if expr can be assumed not to be an lvalue, true
2106 otherwise. */
2108 static bool
2109 maybe_lvalue_p (tree x)
2111 /* We only need to wrap lvalue tree codes. */
2112 switch (TREE_CODE (x))
2114 case VAR_DECL:
2115 case PARM_DECL:
2116 case RESULT_DECL:
2117 case LABEL_DECL:
2118 case FUNCTION_DECL:
2119 case SSA_NAME:
2121 case COMPONENT_REF:
2122 case INDIRECT_REF:
2123 case ALIGN_INDIRECT_REF:
2124 case MISALIGNED_INDIRECT_REF:
2125 case ARRAY_REF:
2126 case ARRAY_RANGE_REF:
2127 case BIT_FIELD_REF:
2128 case OBJ_TYPE_REF:
2130 case REALPART_EXPR:
2131 case IMAGPART_EXPR:
2132 case PREINCREMENT_EXPR:
2133 case PREDECREMENT_EXPR:
2134 case SAVE_EXPR:
2135 case TRY_CATCH_EXPR:
2136 case WITH_CLEANUP_EXPR:
2137 case COMPOUND_EXPR:
2138 case MODIFY_EXPR:
2139 case TARGET_EXPR:
2140 case COND_EXPR:
2141 case BIND_EXPR:
2142 case MIN_EXPR:
2143 case MAX_EXPR:
2144 break;
2146 default:
2147 /* Assume the worst for front-end tree codes. */
2148 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2149 break;
2150 return false;
2153 return true;
2156 /* Return an expr equal to X but certainly not valid as an lvalue. */
2158 tree
2159 non_lvalue (tree x)
2161 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2162 us. */
2163 if (in_gimple_form)
2164 return x;
2166 if (! maybe_lvalue_p (x))
2167 return x;
2168 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2171 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2172 Zero means allow extended lvalues. */
2174 int pedantic_lvalues;
2176 /* When pedantic, return an expr equal to X but certainly not valid as a
2177 pedantic lvalue. Otherwise, return X. */
2179 static tree
2180 pedantic_non_lvalue (tree x)
2182 if (pedantic_lvalues)
2183 return non_lvalue (x);
2184 else
2185 return x;
2188 /* Given a tree comparison code, return the code that is the logical inverse
2189 of the given code. It is not safe to do this for floating-point
2190 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2191 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2193 enum tree_code
2194 invert_tree_comparison (enum tree_code code, bool honor_nans)
2196 if (honor_nans && flag_trapping_math)
2197 return ERROR_MARK;
2199 switch (code)
2201 case EQ_EXPR:
2202 return NE_EXPR;
2203 case NE_EXPR:
2204 return EQ_EXPR;
2205 case GT_EXPR:
2206 return honor_nans ? UNLE_EXPR : LE_EXPR;
2207 case GE_EXPR:
2208 return honor_nans ? UNLT_EXPR : LT_EXPR;
2209 case LT_EXPR:
2210 return honor_nans ? UNGE_EXPR : GE_EXPR;
2211 case LE_EXPR:
2212 return honor_nans ? UNGT_EXPR : GT_EXPR;
2213 case LTGT_EXPR:
2214 return UNEQ_EXPR;
2215 case UNEQ_EXPR:
2216 return LTGT_EXPR;
2217 case UNGT_EXPR:
2218 return LE_EXPR;
2219 case UNGE_EXPR:
2220 return LT_EXPR;
2221 case UNLT_EXPR:
2222 return GE_EXPR;
2223 case UNLE_EXPR:
2224 return GT_EXPR;
2225 case ORDERED_EXPR:
2226 return UNORDERED_EXPR;
2227 case UNORDERED_EXPR:
2228 return ORDERED_EXPR;
2229 default:
2230 gcc_unreachable ();
2234 /* Similar, but return the comparison that results if the operands are
2235 swapped. This is safe for floating-point. */
2237 enum tree_code
2238 swap_tree_comparison (enum tree_code code)
2240 switch (code)
2242 case EQ_EXPR:
2243 case NE_EXPR:
2244 case ORDERED_EXPR:
2245 case UNORDERED_EXPR:
2246 case LTGT_EXPR:
2247 case UNEQ_EXPR:
2248 return code;
2249 case GT_EXPR:
2250 return LT_EXPR;
2251 case GE_EXPR:
2252 return LE_EXPR;
2253 case LT_EXPR:
2254 return GT_EXPR;
2255 case LE_EXPR:
2256 return GE_EXPR;
2257 case UNGT_EXPR:
2258 return UNLT_EXPR;
2259 case UNGE_EXPR:
2260 return UNLE_EXPR;
2261 case UNLT_EXPR:
2262 return UNGT_EXPR;
2263 case UNLE_EXPR:
2264 return UNGE_EXPR;
2265 default:
2266 gcc_unreachable ();
2271 /* Convert a comparison tree code from an enum tree_code representation
2272 into a compcode bit-based encoding. This function is the inverse of
2273 compcode_to_comparison. */
2275 static enum comparison_code
2276 comparison_to_compcode (enum tree_code code)
2278 switch (code)
2280 case LT_EXPR:
2281 return COMPCODE_LT;
2282 case EQ_EXPR:
2283 return COMPCODE_EQ;
2284 case LE_EXPR:
2285 return COMPCODE_LE;
2286 case GT_EXPR:
2287 return COMPCODE_GT;
2288 case NE_EXPR:
2289 return COMPCODE_NE;
2290 case GE_EXPR:
2291 return COMPCODE_GE;
2292 case ORDERED_EXPR:
2293 return COMPCODE_ORD;
2294 case UNORDERED_EXPR:
2295 return COMPCODE_UNORD;
2296 case UNLT_EXPR:
2297 return COMPCODE_UNLT;
2298 case UNEQ_EXPR:
2299 return COMPCODE_UNEQ;
2300 case UNLE_EXPR:
2301 return COMPCODE_UNLE;
2302 case UNGT_EXPR:
2303 return COMPCODE_UNGT;
2304 case LTGT_EXPR:
2305 return COMPCODE_LTGT;
2306 case UNGE_EXPR:
2307 return COMPCODE_UNGE;
2308 default:
2309 gcc_unreachable ();
2313 /* Convert a compcode bit-based encoding of a comparison operator back
2314 to GCC's enum tree_code representation. This function is the
2315 inverse of comparison_to_compcode. */
2317 static enum tree_code
2318 compcode_to_comparison (enum comparison_code code)
2320 switch (code)
2322 case COMPCODE_LT:
2323 return LT_EXPR;
2324 case COMPCODE_EQ:
2325 return EQ_EXPR;
2326 case COMPCODE_LE:
2327 return LE_EXPR;
2328 case COMPCODE_GT:
2329 return GT_EXPR;
2330 case COMPCODE_NE:
2331 return NE_EXPR;
2332 case COMPCODE_GE:
2333 return GE_EXPR;
2334 case COMPCODE_ORD:
2335 return ORDERED_EXPR;
2336 case COMPCODE_UNORD:
2337 return UNORDERED_EXPR;
2338 case COMPCODE_UNLT:
2339 return UNLT_EXPR;
2340 case COMPCODE_UNEQ:
2341 return UNEQ_EXPR;
2342 case COMPCODE_UNLE:
2343 return UNLE_EXPR;
2344 case COMPCODE_UNGT:
2345 return UNGT_EXPR;
2346 case COMPCODE_LTGT:
2347 return LTGT_EXPR;
2348 case COMPCODE_UNGE:
2349 return UNGE_EXPR;
2350 default:
2351 gcc_unreachable ();
2355 /* Return a tree for the comparison which is the combination of
2356 doing the AND or OR (depending on CODE) of the two operations LCODE
2357 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2358 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2359 if this makes the transformation invalid. */
2361 tree
2362 combine_comparisons (enum tree_code code, enum tree_code lcode,
2363 enum tree_code rcode, tree truth_type,
2364 tree ll_arg, tree lr_arg)
2366 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2367 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2368 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2369 enum comparison_code compcode;
2371 switch (code)
2373 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2374 compcode = lcompcode & rcompcode;
2375 break;
2377 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2378 compcode = lcompcode | rcompcode;
2379 break;
2381 default:
2382 return NULL_TREE;
2385 if (!honor_nans)
2387 /* Eliminate unordered comparisons, as well as LTGT and ORD
2388 which are not used unless the mode has NaNs. */
2389 compcode &= ~COMPCODE_UNORD;
2390 if (compcode == COMPCODE_LTGT)
2391 compcode = COMPCODE_NE;
2392 else if (compcode == COMPCODE_ORD)
2393 compcode = COMPCODE_TRUE;
2395 else if (flag_trapping_math)
2397 /* Check that the original operation and the optimized ones will trap
2398 under the same condition. */
2399 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2400 && (lcompcode != COMPCODE_EQ)
2401 && (lcompcode != COMPCODE_ORD);
2402 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2403 && (rcompcode != COMPCODE_EQ)
2404 && (rcompcode != COMPCODE_ORD);
2405 bool trap = (compcode & COMPCODE_UNORD) == 0
2406 && (compcode != COMPCODE_EQ)
2407 && (compcode != COMPCODE_ORD);
2409 /* In a short-circuited boolean expression the LHS might be
2410 such that the RHS, if evaluated, will never trap. For
2411 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2412 if neither x nor y is NaN. (This is a mixed blessing: for
2413 example, the expression above will never trap, hence
2414 optimizing it to x < y would be invalid). */
2415 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2416 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2417 rtrap = false;
2419 /* If the comparison was short-circuited, and only the RHS
2420 trapped, we may now generate a spurious trap. */
2421 if (rtrap && !ltrap
2422 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2423 return NULL_TREE;
2425 /* If we changed the conditions that cause a trap, we lose. */
2426 if ((ltrap || rtrap) != trap)
2427 return NULL_TREE;
2430 if (compcode == COMPCODE_TRUE)
2431 return constant_boolean_node (true, truth_type);
2432 else if (compcode == COMPCODE_FALSE)
2433 return constant_boolean_node (false, truth_type);
2434 else
2435 return fold_build2 (compcode_to_comparison (compcode),
2436 truth_type, ll_arg, lr_arg);
2439 /* Return nonzero if CODE is a tree code that represents a truth value. */
2441 static int
2442 truth_value_p (enum tree_code code)
2444 return (TREE_CODE_CLASS (code) == tcc_comparison
2445 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2446 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2447 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2450 /* Return nonzero if two operands (typically of the same tree node)
2451 are necessarily equal. If either argument has side-effects this
2452 function returns zero. FLAGS modifies behavior as follows:
2454 If OEP_ONLY_CONST is set, only return nonzero for constants.
2455 This function tests whether the operands are indistinguishable;
2456 it does not test whether they are equal using C's == operation.
2457 The distinction is important for IEEE floating point, because
2458 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2459 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2461 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2462 even though it may hold multiple values during a function.
2463 This is because a GCC tree node guarantees that nothing else is
2464 executed between the evaluation of its "operands" (which may often
2465 be evaluated in arbitrary order). Hence if the operands themselves
2466 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2467 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2468 unset means assuming isochronic (or instantaneous) tree equivalence.
2469 Unless comparing arbitrary expression trees, such as from different
2470 statements, this flag can usually be left unset.
2472 If OEP_PURE_SAME is set, then pure functions with identical arguments
2473 are considered the same. It is used when the caller has other ways
2474 to ensure that global memory is unchanged in between. */
2477 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2479 /* If either is ERROR_MARK, they aren't equal. */
2480 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2481 return 0;
2483 /* If both types don't have the same signedness, then we can't consider
2484 them equal. We must check this before the STRIP_NOPS calls
2485 because they may change the signedness of the arguments. */
2486 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2487 return 0;
2489 STRIP_NOPS (arg0);
2490 STRIP_NOPS (arg1);
2492 /* In case both args are comparisons but with different comparison
2493 code, try to swap the comparison operands of one arg to produce
2494 a match and compare that variant. */
2495 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2496 && COMPARISON_CLASS_P (arg0)
2497 && COMPARISON_CLASS_P (arg1))
2499 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2501 if (TREE_CODE (arg0) == swap_code)
2502 return operand_equal_p (TREE_OPERAND (arg0, 0),
2503 TREE_OPERAND (arg1, 1), flags)
2504 && operand_equal_p (TREE_OPERAND (arg0, 1),
2505 TREE_OPERAND (arg1, 0), flags);
2508 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2509 /* This is needed for conversions and for COMPONENT_REF.
2510 Might as well play it safe and always test this. */
2511 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2512 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2513 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2514 return 0;
2516 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2517 We don't care about side effects in that case because the SAVE_EXPR
2518 takes care of that for us. In all other cases, two expressions are
2519 equal if they have no side effects. If we have two identical
2520 expressions with side effects that should be treated the same due
2521 to the only side effects being identical SAVE_EXPR's, that will
2522 be detected in the recursive calls below. */
2523 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2524 && (TREE_CODE (arg0) == SAVE_EXPR
2525 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2526 return 1;
2528 /* Next handle constant cases, those for which we can return 1 even
2529 if ONLY_CONST is set. */
2530 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2531 switch (TREE_CODE (arg0))
2533 case INTEGER_CST:
2534 return (! TREE_CONSTANT_OVERFLOW (arg0)
2535 && ! TREE_CONSTANT_OVERFLOW (arg1)
2536 && tree_int_cst_equal (arg0, arg1));
2538 case REAL_CST:
2539 return (! TREE_CONSTANT_OVERFLOW (arg0)
2540 && ! TREE_CONSTANT_OVERFLOW (arg1)
2541 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2542 TREE_REAL_CST (arg1)));
2544 case VECTOR_CST:
2546 tree v1, v2;
2548 if (TREE_CONSTANT_OVERFLOW (arg0)
2549 || TREE_CONSTANT_OVERFLOW (arg1))
2550 return 0;
2552 v1 = TREE_VECTOR_CST_ELTS (arg0);
2553 v2 = TREE_VECTOR_CST_ELTS (arg1);
2554 while (v1 && v2)
2556 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2557 flags))
2558 return 0;
2559 v1 = TREE_CHAIN (v1);
2560 v2 = TREE_CHAIN (v2);
2563 return v1 == v2;
2566 case COMPLEX_CST:
2567 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2568 flags)
2569 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2570 flags));
2572 case STRING_CST:
2573 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2574 && ! memcmp (TREE_STRING_POINTER (arg0),
2575 TREE_STRING_POINTER (arg1),
2576 TREE_STRING_LENGTH (arg0)));
2578 case ADDR_EXPR:
2579 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2581 default:
2582 break;
2585 if (flags & OEP_ONLY_CONST)
2586 return 0;
2588 /* Define macros to test an operand from arg0 and arg1 for equality and a
2589 variant that allows null and views null as being different from any
2590 non-null value. In the latter case, if either is null, the both
2591 must be; otherwise, do the normal comparison. */
2592 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2593 TREE_OPERAND (arg1, N), flags)
2595 #define OP_SAME_WITH_NULL(N) \
2596 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2597 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2599 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2601 case tcc_unary:
2602 /* Two conversions are equal only if signedness and modes match. */
2603 switch (TREE_CODE (arg0))
2605 case NOP_EXPR:
2606 case CONVERT_EXPR:
2607 case FIX_CEIL_EXPR:
2608 case FIX_TRUNC_EXPR:
2609 case FIX_FLOOR_EXPR:
2610 case FIX_ROUND_EXPR:
2611 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2612 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2613 return 0;
2614 break;
2615 default:
2616 break;
2619 return OP_SAME (0);
2622 case tcc_comparison:
2623 case tcc_binary:
2624 if (OP_SAME (0) && OP_SAME (1))
2625 return 1;
2627 /* For commutative ops, allow the other order. */
2628 return (commutative_tree_code (TREE_CODE (arg0))
2629 && operand_equal_p (TREE_OPERAND (arg0, 0),
2630 TREE_OPERAND (arg1, 1), flags)
2631 && operand_equal_p (TREE_OPERAND (arg0, 1),
2632 TREE_OPERAND (arg1, 0), flags));
2634 case tcc_reference:
2635 /* If either of the pointer (or reference) expressions we are
2636 dereferencing contain a side effect, these cannot be equal. */
2637 if (TREE_SIDE_EFFECTS (arg0)
2638 || TREE_SIDE_EFFECTS (arg1))
2639 return 0;
2641 switch (TREE_CODE (arg0))
2643 case INDIRECT_REF:
2644 case ALIGN_INDIRECT_REF:
2645 case MISALIGNED_INDIRECT_REF:
2646 case REALPART_EXPR:
2647 case IMAGPART_EXPR:
2648 return OP_SAME (0);
2650 case ARRAY_REF:
2651 case ARRAY_RANGE_REF:
2652 /* Operands 2 and 3 may be null. */
2653 return (OP_SAME (0)
2654 && OP_SAME (1)
2655 && OP_SAME_WITH_NULL (2)
2656 && OP_SAME_WITH_NULL (3));
2658 case COMPONENT_REF:
2659 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2660 may be NULL when we're called to compare MEM_EXPRs. */
2661 return OP_SAME_WITH_NULL (0)
2662 && OP_SAME (1)
2663 && OP_SAME_WITH_NULL (2);
2665 case BIT_FIELD_REF:
2666 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2668 default:
2669 return 0;
2672 case tcc_expression:
2673 switch (TREE_CODE (arg0))
2675 case ADDR_EXPR:
2676 case TRUTH_NOT_EXPR:
2677 return OP_SAME (0);
2679 case TRUTH_ANDIF_EXPR:
2680 case TRUTH_ORIF_EXPR:
2681 return OP_SAME (0) && OP_SAME (1);
2683 case TRUTH_AND_EXPR:
2684 case TRUTH_OR_EXPR:
2685 case TRUTH_XOR_EXPR:
2686 if (OP_SAME (0) && OP_SAME (1))
2687 return 1;
2689 /* Otherwise take into account this is a commutative operation. */
2690 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2691 TREE_OPERAND (arg1, 1), flags)
2692 && operand_equal_p (TREE_OPERAND (arg0, 1),
2693 TREE_OPERAND (arg1, 0), flags));
2695 case CALL_EXPR:
2696 /* If the CALL_EXPRs call different functions, then they
2697 clearly can not be equal. */
2698 if (!OP_SAME (0))
2699 return 0;
2702 unsigned int cef = call_expr_flags (arg0);
2703 if (flags & OEP_PURE_SAME)
2704 cef &= ECF_CONST | ECF_PURE;
2705 else
2706 cef &= ECF_CONST;
2707 if (!cef)
2708 return 0;
2711 /* Now see if all the arguments are the same. operand_equal_p
2712 does not handle TREE_LIST, so we walk the operands here
2713 feeding them to operand_equal_p. */
2714 arg0 = TREE_OPERAND (arg0, 1);
2715 arg1 = TREE_OPERAND (arg1, 1);
2716 while (arg0 && arg1)
2718 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2719 flags))
2720 return 0;
2722 arg0 = TREE_CHAIN (arg0);
2723 arg1 = TREE_CHAIN (arg1);
2726 /* If we get here and both argument lists are exhausted
2727 then the CALL_EXPRs are equal. */
2728 return ! (arg0 || arg1);
2730 default:
2731 return 0;
2734 case tcc_declaration:
2735 /* Consider __builtin_sqrt equal to sqrt. */
2736 return (TREE_CODE (arg0) == FUNCTION_DECL
2737 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2738 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2739 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2741 default:
2742 return 0;
2745 #undef OP_SAME
2746 #undef OP_SAME_WITH_NULL
2749 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2750 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2752 When in doubt, return 0. */
2754 static int
2755 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2757 int unsignedp1, unsignedpo;
2758 tree primarg0, primarg1, primother;
2759 unsigned int correct_width;
2761 if (operand_equal_p (arg0, arg1, 0))
2762 return 1;
2764 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2765 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2766 return 0;
2768 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2769 and see if the inner values are the same. This removes any
2770 signedness comparison, which doesn't matter here. */
2771 primarg0 = arg0, primarg1 = arg1;
2772 STRIP_NOPS (primarg0);
2773 STRIP_NOPS (primarg1);
2774 if (operand_equal_p (primarg0, primarg1, 0))
2775 return 1;
2777 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2778 actual comparison operand, ARG0.
2780 First throw away any conversions to wider types
2781 already present in the operands. */
2783 primarg1 = get_narrower (arg1, &unsignedp1);
2784 primother = get_narrower (other, &unsignedpo);
2786 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2787 if (unsignedp1 == unsignedpo
2788 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2789 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2791 tree type = TREE_TYPE (arg0);
2793 /* Make sure shorter operand is extended the right way
2794 to match the longer operand. */
2795 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2796 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2798 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2799 return 1;
2802 return 0;
2805 /* See if ARG is an expression that is either a comparison or is performing
2806 arithmetic on comparisons. The comparisons must only be comparing
2807 two different values, which will be stored in *CVAL1 and *CVAL2; if
2808 they are nonzero it means that some operands have already been found.
2809 No variables may be used anywhere else in the expression except in the
2810 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2811 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2813 If this is true, return 1. Otherwise, return zero. */
2815 static int
2816 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2818 enum tree_code code = TREE_CODE (arg);
2819 enum tree_code_class class = TREE_CODE_CLASS (code);
2821 /* We can handle some of the tcc_expression cases here. */
2822 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2823 class = tcc_unary;
2824 else if (class == tcc_expression
2825 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2826 || code == COMPOUND_EXPR))
2827 class = tcc_binary;
2829 else if (class == tcc_expression && code == SAVE_EXPR
2830 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2832 /* If we've already found a CVAL1 or CVAL2, this expression is
2833 two complex to handle. */
2834 if (*cval1 || *cval2)
2835 return 0;
2837 class = tcc_unary;
2838 *save_p = 1;
2841 switch (class)
2843 case tcc_unary:
2844 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2846 case tcc_binary:
2847 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2848 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2849 cval1, cval2, save_p));
2851 case tcc_constant:
2852 return 1;
2854 case tcc_expression:
2855 if (code == COND_EXPR)
2856 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2857 cval1, cval2, save_p)
2858 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2859 cval1, cval2, save_p)
2860 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2861 cval1, cval2, save_p));
2862 return 0;
2864 case tcc_comparison:
2865 /* First see if we can handle the first operand, then the second. For
2866 the second operand, we know *CVAL1 can't be zero. It must be that
2867 one side of the comparison is each of the values; test for the
2868 case where this isn't true by failing if the two operands
2869 are the same. */
2871 if (operand_equal_p (TREE_OPERAND (arg, 0),
2872 TREE_OPERAND (arg, 1), 0))
2873 return 0;
2875 if (*cval1 == 0)
2876 *cval1 = TREE_OPERAND (arg, 0);
2877 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2879 else if (*cval2 == 0)
2880 *cval2 = TREE_OPERAND (arg, 0);
2881 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2883 else
2884 return 0;
2886 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2888 else if (*cval2 == 0)
2889 *cval2 = TREE_OPERAND (arg, 1);
2890 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2892 else
2893 return 0;
2895 return 1;
2897 default:
2898 return 0;
2902 /* ARG is a tree that is known to contain just arithmetic operations and
2903 comparisons. Evaluate the operations in the tree substituting NEW0 for
2904 any occurrence of OLD0 as an operand of a comparison and likewise for
2905 NEW1 and OLD1. */
2907 static tree
2908 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2910 tree type = TREE_TYPE (arg);
2911 enum tree_code code = TREE_CODE (arg);
2912 enum tree_code_class class = TREE_CODE_CLASS (code);
2914 /* We can handle some of the tcc_expression cases here. */
2915 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2916 class = tcc_unary;
2917 else if (class == tcc_expression
2918 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2919 class = tcc_binary;
2921 switch (class)
2923 case tcc_unary:
2924 return fold_build1 (code, type,
2925 eval_subst (TREE_OPERAND (arg, 0),
2926 old0, new0, old1, new1));
2928 case tcc_binary:
2929 return fold_build2 (code, type,
2930 eval_subst (TREE_OPERAND (arg, 0),
2931 old0, new0, old1, new1),
2932 eval_subst (TREE_OPERAND (arg, 1),
2933 old0, new0, old1, new1));
2935 case tcc_expression:
2936 switch (code)
2938 case SAVE_EXPR:
2939 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2941 case COMPOUND_EXPR:
2942 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2944 case COND_EXPR:
2945 return fold_build3 (code, type,
2946 eval_subst (TREE_OPERAND (arg, 0),
2947 old0, new0, old1, new1),
2948 eval_subst (TREE_OPERAND (arg, 1),
2949 old0, new0, old1, new1),
2950 eval_subst (TREE_OPERAND (arg, 2),
2951 old0, new0, old1, new1));
2952 default:
2953 break;
2955 /* Fall through - ??? */
2957 case tcc_comparison:
2959 tree arg0 = TREE_OPERAND (arg, 0);
2960 tree arg1 = TREE_OPERAND (arg, 1);
2962 /* We need to check both for exact equality and tree equality. The
2963 former will be true if the operand has a side-effect. In that
2964 case, we know the operand occurred exactly once. */
2966 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2967 arg0 = new0;
2968 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2969 arg0 = new1;
2971 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2972 arg1 = new0;
2973 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2974 arg1 = new1;
2976 return fold_build2 (code, type, arg0, arg1);
2979 default:
2980 return arg;
2984 /* Return a tree for the case when the result of an expression is RESULT
2985 converted to TYPE and OMITTED was previously an operand of the expression
2986 but is now not needed (e.g., we folded OMITTED * 0).
2988 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2989 the conversion of RESULT to TYPE. */
2991 tree
2992 omit_one_operand (tree type, tree result, tree omitted)
2994 tree t = fold_convert (type, result);
2996 if (TREE_SIDE_EFFECTS (omitted))
2997 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2999 return non_lvalue (t);
3002 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3004 static tree
3005 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3007 tree t = fold_convert (type, result);
3009 if (TREE_SIDE_EFFECTS (omitted))
3010 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3012 return pedantic_non_lvalue (t);
3015 /* Return a tree for the case when the result of an expression is RESULT
3016 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3017 of the expression but are now not needed.
3019 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3020 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3021 evaluated before OMITTED2. Otherwise, if neither has side effects,
3022 just do the conversion of RESULT to TYPE. */
3024 tree
3025 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3027 tree t = fold_convert (type, result);
3029 if (TREE_SIDE_EFFECTS (omitted2))
3030 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3031 if (TREE_SIDE_EFFECTS (omitted1))
3032 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3034 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3038 /* Return a simplified tree node for the truth-negation of ARG. This
3039 never alters ARG itself. We assume that ARG is an operation that
3040 returns a truth value (0 or 1).
3042 FIXME: one would think we would fold the result, but it causes
3043 problems with the dominator optimizer. */
3044 tree
3045 invert_truthvalue (tree arg)
3047 tree type = TREE_TYPE (arg);
3048 enum tree_code code = TREE_CODE (arg);
3050 if (code == ERROR_MARK)
3051 return arg;
3053 /* If this is a comparison, we can simply invert it, except for
3054 floating-point non-equality comparisons, in which case we just
3055 enclose a TRUTH_NOT_EXPR around what we have. */
3057 if (TREE_CODE_CLASS (code) == tcc_comparison)
3059 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3060 if (FLOAT_TYPE_P (op_type)
3061 && flag_trapping_math
3062 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3063 && code != NE_EXPR && code != EQ_EXPR)
3064 return build1 (TRUTH_NOT_EXPR, type, arg);
3065 else
3067 code = invert_tree_comparison (code,
3068 HONOR_NANS (TYPE_MODE (op_type)));
3069 if (code == ERROR_MARK)
3070 return build1 (TRUTH_NOT_EXPR, type, arg);
3071 else
3072 return build2 (code, type,
3073 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3077 switch (code)
3079 case INTEGER_CST:
3080 return constant_boolean_node (integer_zerop (arg), type);
3082 case TRUTH_AND_EXPR:
3083 return build2 (TRUTH_OR_EXPR, type,
3084 invert_truthvalue (TREE_OPERAND (arg, 0)),
3085 invert_truthvalue (TREE_OPERAND (arg, 1)));
3087 case TRUTH_OR_EXPR:
3088 return build2 (TRUTH_AND_EXPR, type,
3089 invert_truthvalue (TREE_OPERAND (arg, 0)),
3090 invert_truthvalue (TREE_OPERAND (arg, 1)));
3092 case TRUTH_XOR_EXPR:
3093 /* Here we can invert either operand. We invert the first operand
3094 unless the second operand is a TRUTH_NOT_EXPR in which case our
3095 result is the XOR of the first operand with the inside of the
3096 negation of the second operand. */
3098 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3099 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3100 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3101 else
3102 return build2 (TRUTH_XOR_EXPR, type,
3103 invert_truthvalue (TREE_OPERAND (arg, 0)),
3104 TREE_OPERAND (arg, 1));
3106 case TRUTH_ANDIF_EXPR:
3107 return build2 (TRUTH_ORIF_EXPR, type,
3108 invert_truthvalue (TREE_OPERAND (arg, 0)),
3109 invert_truthvalue (TREE_OPERAND (arg, 1)));
3111 case TRUTH_ORIF_EXPR:
3112 return build2 (TRUTH_ANDIF_EXPR, type,
3113 invert_truthvalue (TREE_OPERAND (arg, 0)),
3114 invert_truthvalue (TREE_OPERAND (arg, 1)));
3116 case TRUTH_NOT_EXPR:
3117 return TREE_OPERAND (arg, 0);
3119 case COND_EXPR:
3121 tree arg1 = TREE_OPERAND (arg, 1);
3122 tree arg2 = TREE_OPERAND (arg, 2);
3123 /* A COND_EXPR may have a throw as one operand, which
3124 then has void type. Just leave void operands
3125 as they are. */
3126 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3127 VOID_TYPE_P (TREE_TYPE (arg1))
3128 ? arg1 : invert_truthvalue (arg1),
3129 VOID_TYPE_P (TREE_TYPE (arg2))
3130 ? arg2 : invert_truthvalue (arg2));
3133 case COMPOUND_EXPR:
3134 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3135 invert_truthvalue (TREE_OPERAND (arg, 1)));
3137 case NON_LVALUE_EXPR:
3138 return invert_truthvalue (TREE_OPERAND (arg, 0));
3140 case NOP_EXPR:
3141 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3142 break;
3144 case CONVERT_EXPR:
3145 case FLOAT_EXPR:
3146 return build1 (TREE_CODE (arg), type,
3147 invert_truthvalue (TREE_OPERAND (arg, 0)));
3149 case BIT_AND_EXPR:
3150 if (!integer_onep (TREE_OPERAND (arg, 1)))
3151 break;
3152 return build2 (EQ_EXPR, type, arg,
3153 build_int_cst (type, 0));
3155 case SAVE_EXPR:
3156 return build1 (TRUTH_NOT_EXPR, type, arg);
3158 case CLEANUP_POINT_EXPR:
3159 return build1 (CLEANUP_POINT_EXPR, type,
3160 invert_truthvalue (TREE_OPERAND (arg, 0)));
3162 default:
3163 break;
3165 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3166 return build1 (TRUTH_NOT_EXPR, type, arg);
3169 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3170 operands are another bit-wise operation with a common input. If so,
3171 distribute the bit operations to save an operation and possibly two if
3172 constants are involved. For example, convert
3173 (A | B) & (A | C) into A | (B & C)
3174 Further simplification will occur if B and C are constants.
3176 If this optimization cannot be done, 0 will be returned. */
3178 static tree
3179 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3181 tree common;
3182 tree left, right;
3184 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3185 || TREE_CODE (arg0) == code
3186 || (TREE_CODE (arg0) != BIT_AND_EXPR
3187 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3188 return 0;
3190 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3192 common = TREE_OPERAND (arg0, 0);
3193 left = TREE_OPERAND (arg0, 1);
3194 right = TREE_OPERAND (arg1, 1);
3196 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3198 common = TREE_OPERAND (arg0, 0);
3199 left = TREE_OPERAND (arg0, 1);
3200 right = TREE_OPERAND (arg1, 0);
3202 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3204 common = TREE_OPERAND (arg0, 1);
3205 left = TREE_OPERAND (arg0, 0);
3206 right = TREE_OPERAND (arg1, 1);
3208 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3210 common = TREE_OPERAND (arg0, 1);
3211 left = TREE_OPERAND (arg0, 0);
3212 right = TREE_OPERAND (arg1, 0);
3214 else
3215 return 0;
3217 return fold_build2 (TREE_CODE (arg0), type, common,
3218 fold_build2 (code, type, left, right));
3221 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3222 with code CODE. This optimization is unsafe. */
3223 static tree
3224 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3226 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3227 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3229 /* (A / C) +- (B / C) -> (A +- B) / C. */
3230 if (mul0 == mul1
3231 && operand_equal_p (TREE_OPERAND (arg0, 1),
3232 TREE_OPERAND (arg1, 1), 0))
3233 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3234 fold_build2 (code, type,
3235 TREE_OPERAND (arg0, 0),
3236 TREE_OPERAND (arg1, 0)),
3237 TREE_OPERAND (arg0, 1));
3239 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3240 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3241 TREE_OPERAND (arg1, 0), 0)
3242 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3243 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3245 REAL_VALUE_TYPE r0, r1;
3246 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3247 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3248 if (!mul0)
3249 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3250 if (!mul1)
3251 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3252 real_arithmetic (&r0, code, &r0, &r1);
3253 return fold_build2 (MULT_EXPR, type,
3254 TREE_OPERAND (arg0, 0),
3255 build_real (type, r0));
3258 return NULL_TREE;
3261 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3262 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3264 static tree
3265 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3266 int unsignedp)
3268 tree result;
3270 if (bitpos == 0)
3272 tree size = TYPE_SIZE (TREE_TYPE (inner));
3273 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3274 || POINTER_TYPE_P (TREE_TYPE (inner)))
3275 && host_integerp (size, 0)
3276 && tree_low_cst (size, 0) == bitsize)
3277 return fold_convert (type, inner);
3280 result = build3 (BIT_FIELD_REF, type, inner,
3281 size_int (bitsize), bitsize_int (bitpos));
3283 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3285 return result;
3288 /* Optimize a bit-field compare.
3290 There are two cases: First is a compare against a constant and the
3291 second is a comparison of two items where the fields are at the same
3292 bit position relative to the start of a chunk (byte, halfword, word)
3293 large enough to contain it. In these cases we can avoid the shift
3294 implicit in bitfield extractions.
3296 For constants, we emit a compare of the shifted constant with the
3297 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3298 compared. For two fields at the same position, we do the ANDs with the
3299 similar mask and compare the result of the ANDs.
3301 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3302 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3303 are the left and right operands of the comparison, respectively.
3305 If the optimization described above can be done, we return the resulting
3306 tree. Otherwise we return zero. */
3308 static tree
3309 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3310 tree lhs, tree rhs)
3312 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3313 tree type = TREE_TYPE (lhs);
3314 tree signed_type, unsigned_type;
3315 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3316 enum machine_mode lmode, rmode, nmode;
3317 int lunsignedp, runsignedp;
3318 int lvolatilep = 0, rvolatilep = 0;
3319 tree linner, rinner = NULL_TREE;
3320 tree mask;
3321 tree offset;
3323 /* Get all the information about the extractions being done. If the bit size
3324 if the same as the size of the underlying object, we aren't doing an
3325 extraction at all and so can do nothing. We also don't want to
3326 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3327 then will no longer be able to replace it. */
3328 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3329 &lunsignedp, &lvolatilep, false);
3330 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3331 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3332 return 0;
3334 if (!const_p)
3336 /* If this is not a constant, we can only do something if bit positions,
3337 sizes, and signedness are the same. */
3338 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3339 &runsignedp, &rvolatilep, false);
3341 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3342 || lunsignedp != runsignedp || offset != 0
3343 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3344 return 0;
3347 /* See if we can find a mode to refer to this field. We should be able to,
3348 but fail if we can't. */
3349 nmode = get_best_mode (lbitsize, lbitpos,
3350 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3351 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3352 TYPE_ALIGN (TREE_TYPE (rinner))),
3353 word_mode, lvolatilep || rvolatilep);
3354 if (nmode == VOIDmode)
3355 return 0;
3357 /* Set signed and unsigned types of the precision of this mode for the
3358 shifts below. */
3359 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3360 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3362 /* Compute the bit position and size for the new reference and our offset
3363 within it. If the new reference is the same size as the original, we
3364 won't optimize anything, so return zero. */
3365 nbitsize = GET_MODE_BITSIZE (nmode);
3366 nbitpos = lbitpos & ~ (nbitsize - 1);
3367 lbitpos -= nbitpos;
3368 if (nbitsize == lbitsize)
3369 return 0;
3371 if (BYTES_BIG_ENDIAN)
3372 lbitpos = nbitsize - lbitsize - lbitpos;
3374 /* Make the mask to be used against the extracted field. */
3375 mask = build_int_cst (unsigned_type, -1);
3376 mask = force_fit_type (mask, 0, false, false);
3377 mask = fold_convert (unsigned_type, mask);
3378 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3379 mask = const_binop (RSHIFT_EXPR, mask,
3380 size_int (nbitsize - lbitsize - lbitpos), 0);
3382 if (! const_p)
3383 /* If not comparing with constant, just rework the comparison
3384 and return. */
3385 return build2 (code, compare_type,
3386 build2 (BIT_AND_EXPR, unsigned_type,
3387 make_bit_field_ref (linner, unsigned_type,
3388 nbitsize, nbitpos, 1),
3389 mask),
3390 build2 (BIT_AND_EXPR, unsigned_type,
3391 make_bit_field_ref (rinner, unsigned_type,
3392 nbitsize, nbitpos, 1),
3393 mask));
3395 /* Otherwise, we are handling the constant case. See if the constant is too
3396 big for the field. Warn and return a tree of for 0 (false) if so. We do
3397 this not only for its own sake, but to avoid having to test for this
3398 error case below. If we didn't, we might generate wrong code.
3400 For unsigned fields, the constant shifted right by the field length should
3401 be all zero. For signed fields, the high-order bits should agree with
3402 the sign bit. */
3404 if (lunsignedp)
3406 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3407 fold_convert (unsigned_type, rhs),
3408 size_int (lbitsize), 0)))
3410 warning (0, "comparison is always %d due to width of bit-field",
3411 code == NE_EXPR);
3412 return constant_boolean_node (code == NE_EXPR, compare_type);
3415 else
3417 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3418 size_int (lbitsize - 1), 0);
3419 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3421 warning (0, "comparison is always %d due to width of bit-field",
3422 code == NE_EXPR);
3423 return constant_boolean_node (code == NE_EXPR, compare_type);
3427 /* Single-bit compares should always be against zero. */
3428 if (lbitsize == 1 && ! integer_zerop (rhs))
3430 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3431 rhs = build_int_cst (type, 0);
3434 /* Make a new bitfield reference, shift the constant over the
3435 appropriate number of bits and mask it with the computed mask
3436 (in case this was a signed field). If we changed it, make a new one. */
3437 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3438 if (lvolatilep)
3440 TREE_SIDE_EFFECTS (lhs) = 1;
3441 TREE_THIS_VOLATILE (lhs) = 1;
3444 rhs = const_binop (BIT_AND_EXPR,
3445 const_binop (LSHIFT_EXPR,
3446 fold_convert (unsigned_type, rhs),
3447 size_int (lbitpos), 0),
3448 mask, 0);
3450 return build2 (code, compare_type,
3451 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3452 rhs);
3455 /* Subroutine for fold_truthop: decode a field reference.
3457 If EXP is a comparison reference, we return the innermost reference.
3459 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3460 set to the starting bit number.
3462 If the innermost field can be completely contained in a mode-sized
3463 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3465 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3466 otherwise it is not changed.
3468 *PUNSIGNEDP is set to the signedness of the field.
3470 *PMASK is set to the mask used. This is either contained in a
3471 BIT_AND_EXPR or derived from the width of the field.
3473 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3475 Return 0 if this is not a component reference or is one that we can't
3476 do anything with. */
3478 static tree
3479 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3480 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3481 int *punsignedp, int *pvolatilep,
3482 tree *pmask, tree *pand_mask)
3484 tree outer_type = 0;
3485 tree and_mask = 0;
3486 tree mask, inner, offset;
3487 tree unsigned_type;
3488 unsigned int precision;
3490 /* All the optimizations using this function assume integer fields.
3491 There are problems with FP fields since the type_for_size call
3492 below can fail for, e.g., XFmode. */
3493 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3494 return 0;
3496 /* We are interested in the bare arrangement of bits, so strip everything
3497 that doesn't affect the machine mode. However, record the type of the
3498 outermost expression if it may matter below. */
3499 if (TREE_CODE (exp) == NOP_EXPR
3500 || TREE_CODE (exp) == CONVERT_EXPR
3501 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3502 outer_type = TREE_TYPE (exp);
3503 STRIP_NOPS (exp);
3505 if (TREE_CODE (exp) == BIT_AND_EXPR)
3507 and_mask = TREE_OPERAND (exp, 1);
3508 exp = TREE_OPERAND (exp, 0);
3509 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3510 if (TREE_CODE (and_mask) != INTEGER_CST)
3511 return 0;
3514 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3515 punsignedp, pvolatilep, false);
3516 if ((inner == exp && and_mask == 0)
3517 || *pbitsize < 0 || offset != 0
3518 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3519 return 0;
3521 /* If the number of bits in the reference is the same as the bitsize of
3522 the outer type, then the outer type gives the signedness. Otherwise
3523 (in case of a small bitfield) the signedness is unchanged. */
3524 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3525 *punsignedp = TYPE_UNSIGNED (outer_type);
3527 /* Compute the mask to access the bitfield. */
3528 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3529 precision = TYPE_PRECISION (unsigned_type);
3531 mask = build_int_cst (unsigned_type, -1);
3532 mask = force_fit_type (mask, 0, false, false);
3534 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3535 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3537 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3538 if (and_mask != 0)
3539 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3540 fold_convert (unsigned_type, and_mask), mask);
3542 *pmask = mask;
3543 *pand_mask = and_mask;
3544 return inner;
3547 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3548 bit positions. */
3550 static int
3551 all_ones_mask_p (tree mask, int size)
3553 tree type = TREE_TYPE (mask);
3554 unsigned int precision = TYPE_PRECISION (type);
3555 tree tmask;
3557 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3558 tmask = force_fit_type (tmask, 0, false, false);
3560 return
3561 tree_int_cst_equal (mask,
3562 const_binop (RSHIFT_EXPR,
3563 const_binop (LSHIFT_EXPR, tmask,
3564 size_int (precision - size),
3566 size_int (precision - size), 0));
3569 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3570 represents the sign bit of EXP's type. If EXP represents a sign
3571 or zero extension, also test VAL against the unextended type.
3572 The return value is the (sub)expression whose sign bit is VAL,
3573 or NULL_TREE otherwise. */
3575 static tree
3576 sign_bit_p (tree exp, tree val)
3578 unsigned HOST_WIDE_INT mask_lo, lo;
3579 HOST_WIDE_INT mask_hi, hi;
3580 int width;
3581 tree t;
3583 /* Tree EXP must have an integral type. */
3584 t = TREE_TYPE (exp);
3585 if (! INTEGRAL_TYPE_P (t))
3586 return NULL_TREE;
3588 /* Tree VAL must be an integer constant. */
3589 if (TREE_CODE (val) != INTEGER_CST
3590 || TREE_CONSTANT_OVERFLOW (val))
3591 return NULL_TREE;
3593 width = TYPE_PRECISION (t);
3594 if (width > HOST_BITS_PER_WIDE_INT)
3596 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3597 lo = 0;
3599 mask_hi = ((unsigned HOST_WIDE_INT) -1
3600 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3601 mask_lo = -1;
3603 else
3605 hi = 0;
3606 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3608 mask_hi = 0;
3609 mask_lo = ((unsigned HOST_WIDE_INT) -1
3610 >> (HOST_BITS_PER_WIDE_INT - width));
3613 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3614 treat VAL as if it were unsigned. */
3615 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3616 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3617 return exp;
3619 /* Handle extension from a narrower type. */
3620 if (TREE_CODE (exp) == NOP_EXPR
3621 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3622 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3624 return NULL_TREE;
3627 /* Subroutine for fold_truthop: determine if an operand is simple enough
3628 to be evaluated unconditionally. */
3630 static int
3631 simple_operand_p (tree exp)
3633 /* Strip any conversions that don't change the machine mode. */
3634 STRIP_NOPS (exp);
3636 return (CONSTANT_CLASS_P (exp)
3637 || TREE_CODE (exp) == SSA_NAME
3638 || (DECL_P (exp)
3639 && ! TREE_ADDRESSABLE (exp)
3640 && ! TREE_THIS_VOLATILE (exp)
3641 && ! DECL_NONLOCAL (exp)
3642 /* Don't regard global variables as simple. They may be
3643 allocated in ways unknown to the compiler (shared memory,
3644 #pragma weak, etc). */
3645 && ! TREE_PUBLIC (exp)
3646 && ! DECL_EXTERNAL (exp)
3647 /* Loading a static variable is unduly expensive, but global
3648 registers aren't expensive. */
3649 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3652 /* The following functions are subroutines to fold_range_test and allow it to
3653 try to change a logical combination of comparisons into a range test.
3655 For example, both
3656 X == 2 || X == 3 || X == 4 || X == 5
3658 X >= 2 && X <= 5
3659 are converted to
3660 (unsigned) (X - 2) <= 3
3662 We describe each set of comparisons as being either inside or outside
3663 a range, using a variable named like IN_P, and then describe the
3664 range with a lower and upper bound. If one of the bounds is omitted,
3665 it represents either the highest or lowest value of the type.
3667 In the comments below, we represent a range by two numbers in brackets
3668 preceded by a "+" to designate being inside that range, or a "-" to
3669 designate being outside that range, so the condition can be inverted by
3670 flipping the prefix. An omitted bound is represented by a "-". For
3671 example, "- [-, 10]" means being outside the range starting at the lowest
3672 possible value and ending at 10, in other words, being greater than 10.
3673 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3674 always false.
3676 We set up things so that the missing bounds are handled in a consistent
3677 manner so neither a missing bound nor "true" and "false" need to be
3678 handled using a special case. */
3680 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3681 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3682 and UPPER1_P are nonzero if the respective argument is an upper bound
3683 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3684 must be specified for a comparison. ARG1 will be converted to ARG0's
3685 type if both are specified. */
3687 static tree
3688 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3689 tree arg1, int upper1_p)
3691 tree tem;
3692 int result;
3693 int sgn0, sgn1;
3695 /* If neither arg represents infinity, do the normal operation.
3696 Else, if not a comparison, return infinity. Else handle the special
3697 comparison rules. Note that most of the cases below won't occur, but
3698 are handled for consistency. */
3700 if (arg0 != 0 && arg1 != 0)
3702 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3703 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3704 STRIP_NOPS (tem);
3705 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3708 if (TREE_CODE_CLASS (code) != tcc_comparison)
3709 return 0;
3711 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3712 for neither. In real maths, we cannot assume open ended ranges are
3713 the same. But, this is computer arithmetic, where numbers are finite.
3714 We can therefore make the transformation of any unbounded range with
3715 the value Z, Z being greater than any representable number. This permits
3716 us to treat unbounded ranges as equal. */
3717 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3718 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3719 switch (code)
3721 case EQ_EXPR:
3722 result = sgn0 == sgn1;
3723 break;
3724 case NE_EXPR:
3725 result = sgn0 != sgn1;
3726 break;
3727 case LT_EXPR:
3728 result = sgn0 < sgn1;
3729 break;
3730 case LE_EXPR:
3731 result = sgn0 <= sgn1;
3732 break;
3733 case GT_EXPR:
3734 result = sgn0 > sgn1;
3735 break;
3736 case GE_EXPR:
3737 result = sgn0 >= sgn1;
3738 break;
3739 default:
3740 gcc_unreachable ();
3743 return constant_boolean_node (result, type);
3746 /* Given EXP, a logical expression, set the range it is testing into
3747 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3748 actually being tested. *PLOW and *PHIGH will be made of the same type
3749 as the returned expression. If EXP is not a comparison, we will most
3750 likely not be returning a useful value and range. */
3752 static tree
3753 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3755 enum tree_code code;
3756 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3757 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3758 int in_p, n_in_p;
3759 tree low, high, n_low, n_high;
3761 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3762 and see if we can refine the range. Some of the cases below may not
3763 happen, but it doesn't seem worth worrying about this. We "continue"
3764 the outer loop when we've changed something; otherwise we "break"
3765 the switch, which will "break" the while. */
3767 in_p = 0;
3768 low = high = build_int_cst (TREE_TYPE (exp), 0);
3770 while (1)
3772 code = TREE_CODE (exp);
3773 exp_type = TREE_TYPE (exp);
3775 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3777 if (TREE_CODE_LENGTH (code) > 0)
3778 arg0 = TREE_OPERAND (exp, 0);
3779 if (TREE_CODE_CLASS (code) == tcc_comparison
3780 || TREE_CODE_CLASS (code) == tcc_unary
3781 || TREE_CODE_CLASS (code) == tcc_binary)
3782 arg0_type = TREE_TYPE (arg0);
3783 if (TREE_CODE_CLASS (code) == tcc_binary
3784 || TREE_CODE_CLASS (code) == tcc_comparison
3785 || (TREE_CODE_CLASS (code) == tcc_expression
3786 && TREE_CODE_LENGTH (code) > 1))
3787 arg1 = TREE_OPERAND (exp, 1);
3790 switch (code)
3792 case TRUTH_NOT_EXPR:
3793 in_p = ! in_p, exp = arg0;
3794 continue;
3796 case EQ_EXPR: case NE_EXPR:
3797 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3798 /* We can only do something if the range is testing for zero
3799 and if the second operand is an integer constant. Note that
3800 saying something is "in" the range we make is done by
3801 complementing IN_P since it will set in the initial case of
3802 being not equal to zero; "out" is leaving it alone. */
3803 if (low == 0 || high == 0
3804 || ! integer_zerop (low) || ! integer_zerop (high)
3805 || TREE_CODE (arg1) != INTEGER_CST)
3806 break;
3808 switch (code)
3810 case NE_EXPR: /* - [c, c] */
3811 low = high = arg1;
3812 break;
3813 case EQ_EXPR: /* + [c, c] */
3814 in_p = ! in_p, low = high = arg1;
3815 break;
3816 case GT_EXPR: /* - [-, c] */
3817 low = 0, high = arg1;
3818 break;
3819 case GE_EXPR: /* + [c, -] */
3820 in_p = ! in_p, low = arg1, high = 0;
3821 break;
3822 case LT_EXPR: /* - [c, -] */
3823 low = arg1, high = 0;
3824 break;
3825 case LE_EXPR: /* + [-, c] */
3826 in_p = ! in_p, low = 0, high = arg1;
3827 break;
3828 default:
3829 gcc_unreachable ();
3832 /* If this is an unsigned comparison, we also know that EXP is
3833 greater than or equal to zero. We base the range tests we make
3834 on that fact, so we record it here so we can parse existing
3835 range tests. We test arg0_type since often the return type
3836 of, e.g. EQ_EXPR, is boolean. */
3837 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3839 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3840 in_p, low, high, 1,
3841 build_int_cst (arg0_type, 0),
3842 NULL_TREE))
3843 break;
3845 in_p = n_in_p, low = n_low, high = n_high;
3847 /* If the high bound is missing, but we have a nonzero low
3848 bound, reverse the range so it goes from zero to the low bound
3849 minus 1. */
3850 if (high == 0 && low && ! integer_zerop (low))
3852 in_p = ! in_p;
3853 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3854 integer_one_node, 0);
3855 low = build_int_cst (arg0_type, 0);
3859 exp = arg0;
3860 continue;
3862 case NEGATE_EXPR:
3863 /* (-x) IN [a,b] -> x in [-b, -a] */
3864 n_low = range_binop (MINUS_EXPR, exp_type,
3865 build_int_cst (exp_type, 0),
3866 0, high, 1);
3867 n_high = range_binop (MINUS_EXPR, exp_type,
3868 build_int_cst (exp_type, 0),
3869 0, low, 0);
3870 low = n_low, high = n_high;
3871 exp = arg0;
3872 continue;
3874 case BIT_NOT_EXPR:
3875 /* ~ X -> -X - 1 */
3876 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3877 build_int_cst (exp_type, 1));
3878 continue;
3880 case PLUS_EXPR: case MINUS_EXPR:
3881 if (TREE_CODE (arg1) != INTEGER_CST)
3882 break;
3884 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3885 move a constant to the other side. */
3886 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3887 break;
3889 /* If EXP is signed, any overflow in the computation is undefined,
3890 so we don't worry about it so long as our computations on
3891 the bounds don't overflow. For unsigned, overflow is defined
3892 and this is exactly the right thing. */
3893 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3894 arg0_type, low, 0, arg1, 0);
3895 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3896 arg0_type, high, 1, arg1, 0);
3897 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3898 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3899 break;
3901 /* Check for an unsigned range which has wrapped around the maximum
3902 value thus making n_high < n_low, and normalize it. */
3903 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3905 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3906 integer_one_node, 0);
3907 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3908 integer_one_node, 0);
3910 /* If the range is of the form +/- [ x+1, x ], we won't
3911 be able to normalize it. But then, it represents the
3912 whole range or the empty set, so make it
3913 +/- [ -, - ]. */
3914 if (tree_int_cst_equal (n_low, low)
3915 && tree_int_cst_equal (n_high, high))
3916 low = high = 0;
3917 else
3918 in_p = ! in_p;
3920 else
3921 low = n_low, high = n_high;
3923 exp = arg0;
3924 continue;
3926 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3927 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3928 break;
3930 if (! INTEGRAL_TYPE_P (arg0_type)
3931 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3932 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3933 break;
3935 n_low = low, n_high = high;
3937 if (n_low != 0)
3938 n_low = fold_convert (arg0_type, n_low);
3940 if (n_high != 0)
3941 n_high = fold_convert (arg0_type, n_high);
3944 /* If we're converting arg0 from an unsigned type, to exp,
3945 a signed type, we will be doing the comparison as unsigned.
3946 The tests above have already verified that LOW and HIGH
3947 are both positive.
3949 So we have to ensure that we will handle large unsigned
3950 values the same way that the current signed bounds treat
3951 negative values. */
3953 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3955 tree high_positive;
3956 tree equiv_type = lang_hooks.types.type_for_mode
3957 (TYPE_MODE (arg0_type), 1);
3959 /* A range without an upper bound is, naturally, unbounded.
3960 Since convert would have cropped a very large value, use
3961 the max value for the destination type. */
3962 high_positive
3963 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3964 : TYPE_MAX_VALUE (arg0_type);
3966 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3967 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3968 fold_convert (arg0_type,
3969 high_positive),
3970 fold_convert (arg0_type,
3971 integer_one_node));
3973 /* If the low bound is specified, "and" the range with the
3974 range for which the original unsigned value will be
3975 positive. */
3976 if (low != 0)
3978 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3979 1, n_low, n_high, 1,
3980 fold_convert (arg0_type,
3981 integer_zero_node),
3982 high_positive))
3983 break;
3985 in_p = (n_in_p == in_p);
3987 else
3989 /* Otherwise, "or" the range with the range of the input
3990 that will be interpreted as negative. */
3991 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3992 0, n_low, n_high, 1,
3993 fold_convert (arg0_type,
3994 integer_zero_node),
3995 high_positive))
3996 break;
3998 in_p = (in_p != n_in_p);
4002 exp = arg0;
4003 low = n_low, high = n_high;
4004 continue;
4006 default:
4007 break;
4010 break;
4013 /* If EXP is a constant, we can evaluate whether this is true or false. */
4014 if (TREE_CODE (exp) == INTEGER_CST)
4016 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4017 exp, 0, low, 0))
4018 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4019 exp, 1, high, 1)));
4020 low = high = 0;
4021 exp = 0;
4024 *pin_p = in_p, *plow = low, *phigh = high;
4025 return exp;
4028 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4029 type, TYPE, return an expression to test if EXP is in (or out of, depending
4030 on IN_P) the range. Return 0 if the test couldn't be created. */
4032 static tree
4033 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4035 tree etype = TREE_TYPE (exp);
4036 tree value;
4038 #ifdef HAVE_canonicalize_funcptr_for_compare
4039 /* Disable this optimization for function pointer expressions
4040 on targets that require function pointer canonicalization. */
4041 if (HAVE_canonicalize_funcptr_for_compare
4042 && TREE_CODE (etype) == POINTER_TYPE
4043 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4044 return NULL_TREE;
4045 #endif
4047 if (! in_p)
4049 value = build_range_check (type, exp, 1, low, high);
4050 if (value != 0)
4051 return invert_truthvalue (value);
4053 return 0;
4056 if (low == 0 && high == 0)
4057 return build_int_cst (type, 1);
4059 if (low == 0)
4060 return fold_build2 (LE_EXPR, type, exp,
4061 fold_convert (etype, high));
4063 if (high == 0)
4064 return fold_build2 (GE_EXPR, type, exp,
4065 fold_convert (etype, low));
4067 if (operand_equal_p (low, high, 0))
4068 return fold_build2 (EQ_EXPR, type, exp,
4069 fold_convert (etype, low));
4071 if (integer_zerop (low))
4073 if (! TYPE_UNSIGNED (etype))
4075 etype = lang_hooks.types.unsigned_type (etype);
4076 high = fold_convert (etype, high);
4077 exp = fold_convert (etype, exp);
4079 return build_range_check (type, exp, 1, 0, high);
4082 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4083 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4085 unsigned HOST_WIDE_INT lo;
4086 HOST_WIDE_INT hi;
4087 int prec;
4089 prec = TYPE_PRECISION (etype);
4090 if (prec <= HOST_BITS_PER_WIDE_INT)
4092 hi = 0;
4093 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4095 else
4097 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4098 lo = (unsigned HOST_WIDE_INT) -1;
4101 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4103 if (TYPE_UNSIGNED (etype))
4105 etype = lang_hooks.types.signed_type (etype);
4106 exp = fold_convert (etype, exp);
4108 return fold_build2 (GT_EXPR, type, exp,
4109 build_int_cst (etype, 0));
4113 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4114 This requires wrap-around arithmetics for the type of the expression. */
4115 switch (TREE_CODE (etype))
4117 case INTEGER_TYPE:
4118 /* There is no requirement that LOW be within the range of ETYPE
4119 if the latter is a subtype. It must, however, be within the base
4120 type of ETYPE. So be sure we do the subtraction in that type. */
4121 if (TREE_TYPE (etype))
4122 etype = TREE_TYPE (etype);
4123 break;
4125 case ENUMERAL_TYPE:
4126 case BOOLEAN_TYPE:
4127 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4128 TYPE_UNSIGNED (etype));
4129 break;
4131 default:
4132 break;
4135 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4136 if (TREE_CODE (etype) == INTEGER_TYPE
4137 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4139 tree utype, minv, maxv;
4141 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4142 for the type in question, as we rely on this here. */
4143 utype = lang_hooks.types.unsigned_type (etype);
4144 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4145 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4146 integer_one_node, 1);
4147 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4149 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4150 minv, 1, maxv, 1)))
4151 etype = utype;
4152 else
4153 return 0;
4156 high = fold_convert (etype, high);
4157 low = fold_convert (etype, low);
4158 exp = fold_convert (etype, exp);
4160 value = const_binop (MINUS_EXPR, high, low, 0);
4162 if (value != 0 && !TREE_OVERFLOW (value))
4163 return build_range_check (type,
4164 fold_build2 (MINUS_EXPR, etype, exp, low),
4165 1, build_int_cst (etype, 0), value);
4167 return 0;
4170 /* Return the predecessor of VAL in its type, handling the infinite case. */
4172 static tree
4173 range_predecessor (tree val)
4175 tree type = TREE_TYPE (val);
4177 if (INTEGRAL_TYPE_P (type)
4178 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4179 return 0;
4180 else
4181 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4184 /* Return the successor of VAL in its type, handling the infinite case. */
4186 static tree
4187 range_successor (tree val)
4189 tree type = TREE_TYPE (val);
4191 if (INTEGRAL_TYPE_P (type)
4192 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4193 return 0;
4194 else
4195 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4198 /* Given two ranges, see if we can merge them into one. Return 1 if we
4199 can, 0 if we can't. Set the output range into the specified parameters. */
4201 static int
4202 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4203 tree high0, int in1_p, tree low1, tree high1)
4205 int no_overlap;
4206 int subset;
4207 int temp;
4208 tree tem;
4209 int in_p;
4210 tree low, high;
4211 int lowequal = ((low0 == 0 && low1 == 0)
4212 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4213 low0, 0, low1, 0)));
4214 int highequal = ((high0 == 0 && high1 == 0)
4215 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4216 high0, 1, high1, 1)));
4218 /* Make range 0 be the range that starts first, or ends last if they
4219 start at the same value. Swap them if it isn't. */
4220 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4221 low0, 0, low1, 0))
4222 || (lowequal
4223 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4224 high1, 1, high0, 1))))
4226 temp = in0_p, in0_p = in1_p, in1_p = temp;
4227 tem = low0, low0 = low1, low1 = tem;
4228 tem = high0, high0 = high1, high1 = tem;
4231 /* Now flag two cases, whether the ranges are disjoint or whether the
4232 second range is totally subsumed in the first. Note that the tests
4233 below are simplified by the ones above. */
4234 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4235 high0, 1, low1, 0));
4236 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4237 high1, 1, high0, 1));
4239 /* We now have four cases, depending on whether we are including or
4240 excluding the two ranges. */
4241 if (in0_p && in1_p)
4243 /* If they don't overlap, the result is false. If the second range
4244 is a subset it is the result. Otherwise, the range is from the start
4245 of the second to the end of the first. */
4246 if (no_overlap)
4247 in_p = 0, low = high = 0;
4248 else if (subset)
4249 in_p = 1, low = low1, high = high1;
4250 else
4251 in_p = 1, low = low1, high = high0;
4254 else if (in0_p && ! in1_p)
4256 /* If they don't overlap, the result is the first range. If they are
4257 equal, the result is false. If the second range is a subset of the
4258 first, and the ranges begin at the same place, we go from just after
4259 the end of the second range to the end of the first. If the second
4260 range is not a subset of the first, or if it is a subset and both
4261 ranges end at the same place, the range starts at the start of the
4262 first range and ends just before the second range.
4263 Otherwise, we can't describe this as a single range. */
4264 if (no_overlap)
4265 in_p = 1, low = low0, high = high0;
4266 else if (lowequal && highequal)
4267 in_p = 0, low = high = 0;
4268 else if (subset && lowequal)
4270 low = range_successor (high1);
4271 high = high0;
4272 in_p = (low != 0);
4274 else if (! subset || highequal)
4276 low = low0;
4277 high = range_predecessor (low1);
4278 in_p = (high != 0);
4280 else
4281 return 0;
4284 else if (! in0_p && in1_p)
4286 /* If they don't overlap, the result is the second range. If the second
4287 is a subset of the first, the result is false. Otherwise,
4288 the range starts just after the first range and ends at the
4289 end of the second. */
4290 if (no_overlap)
4291 in_p = 1, low = low1, high = high1;
4292 else if (subset || highequal)
4293 in_p = 0, low = high = 0;
4294 else
4296 low = range_successor (high0);
4297 high = high1;
4298 in_p = (low != 0);
4302 else
4304 /* The case where we are excluding both ranges. Here the complex case
4305 is if they don't overlap. In that case, the only time we have a
4306 range is if they are adjacent. If the second is a subset of the
4307 first, the result is the first. Otherwise, the range to exclude
4308 starts at the beginning of the first range and ends at the end of the
4309 second. */
4310 if (no_overlap)
4312 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4313 range_successor (high0),
4314 1, low1, 0)))
4315 in_p = 0, low = low0, high = high1;
4316 else
4318 /* Canonicalize - [min, x] into - [-, x]. */
4319 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4320 switch (TREE_CODE (TREE_TYPE (low0)))
4322 case ENUMERAL_TYPE:
4323 if (TYPE_PRECISION (TREE_TYPE (low0))
4324 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4325 break;
4326 /* FALLTHROUGH */
4327 case INTEGER_TYPE:
4328 if (tree_int_cst_equal (low0,
4329 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4330 low0 = 0;
4331 break;
4332 case POINTER_TYPE:
4333 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4334 && integer_zerop (low0))
4335 low0 = 0;
4336 break;
4337 default:
4338 break;
4341 /* Canonicalize - [x, max] into - [x, -]. */
4342 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4343 switch (TREE_CODE (TREE_TYPE (high1)))
4345 case ENUMERAL_TYPE:
4346 if (TYPE_PRECISION (TREE_TYPE (high1))
4347 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4348 break;
4349 /* FALLTHROUGH */
4350 case INTEGER_TYPE:
4351 if (tree_int_cst_equal (high1,
4352 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4353 high1 = 0;
4354 break;
4355 case POINTER_TYPE:
4356 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4357 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4358 high1, 1,
4359 integer_one_node, 1)))
4360 high1 = 0;
4361 break;
4362 default:
4363 break;
4366 /* The ranges might be also adjacent between the maximum and
4367 minimum values of the given type. For
4368 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4369 return + [x + 1, y - 1]. */
4370 if (low0 == 0 && high1 == 0)
4372 low = range_successor (high0);
4373 high = range_predecessor (low1);
4374 if (low == 0 || high == 0)
4375 return 0;
4377 in_p = 1;
4379 else
4380 return 0;
4383 else if (subset)
4384 in_p = 0, low = low0, high = high0;
4385 else
4386 in_p = 0, low = low0, high = high1;
4389 *pin_p = in_p, *plow = low, *phigh = high;
4390 return 1;
4394 /* Subroutine of fold, looking inside expressions of the form
4395 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4396 of the COND_EXPR. This function is being used also to optimize
4397 A op B ? C : A, by reversing the comparison first.
4399 Return a folded expression whose code is not a COND_EXPR
4400 anymore, or NULL_TREE if no folding opportunity is found. */
4402 static tree
4403 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4405 enum tree_code comp_code = TREE_CODE (arg0);
4406 tree arg00 = TREE_OPERAND (arg0, 0);
4407 tree arg01 = TREE_OPERAND (arg0, 1);
4408 tree arg1_type = TREE_TYPE (arg1);
4409 tree tem;
4411 STRIP_NOPS (arg1);
4412 STRIP_NOPS (arg2);
4414 /* If we have A op 0 ? A : -A, consider applying the following
4415 transformations:
4417 A == 0? A : -A same as -A
4418 A != 0? A : -A same as A
4419 A >= 0? A : -A same as abs (A)
4420 A > 0? A : -A same as abs (A)
4421 A <= 0? A : -A same as -abs (A)
4422 A < 0? A : -A same as -abs (A)
4424 None of these transformations work for modes with signed
4425 zeros. If A is +/-0, the first two transformations will
4426 change the sign of the result (from +0 to -0, or vice
4427 versa). The last four will fix the sign of the result,
4428 even though the original expressions could be positive or
4429 negative, depending on the sign of A.
4431 Note that all these transformations are correct if A is
4432 NaN, since the two alternatives (A and -A) are also NaNs. */
4433 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4434 ? real_zerop (arg01)
4435 : integer_zerop (arg01))
4436 && ((TREE_CODE (arg2) == NEGATE_EXPR
4437 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4438 /* In the case that A is of the form X-Y, '-A' (arg2) may
4439 have already been folded to Y-X, check for that. */
4440 || (TREE_CODE (arg1) == MINUS_EXPR
4441 && TREE_CODE (arg2) == MINUS_EXPR
4442 && operand_equal_p (TREE_OPERAND (arg1, 0),
4443 TREE_OPERAND (arg2, 1), 0)
4444 && operand_equal_p (TREE_OPERAND (arg1, 1),
4445 TREE_OPERAND (arg2, 0), 0))))
4446 switch (comp_code)
4448 case EQ_EXPR:
4449 case UNEQ_EXPR:
4450 tem = fold_convert (arg1_type, arg1);
4451 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4452 case NE_EXPR:
4453 case LTGT_EXPR:
4454 return pedantic_non_lvalue (fold_convert (type, arg1));
4455 case UNGE_EXPR:
4456 case UNGT_EXPR:
4457 if (flag_trapping_math)
4458 break;
4459 /* Fall through. */
4460 case GE_EXPR:
4461 case GT_EXPR:
4462 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4463 arg1 = fold_convert (lang_hooks.types.signed_type
4464 (TREE_TYPE (arg1)), arg1);
4465 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4466 return pedantic_non_lvalue (fold_convert (type, tem));
4467 case UNLE_EXPR:
4468 case UNLT_EXPR:
4469 if (flag_trapping_math)
4470 break;
4471 case LE_EXPR:
4472 case LT_EXPR:
4473 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4474 arg1 = fold_convert (lang_hooks.types.signed_type
4475 (TREE_TYPE (arg1)), arg1);
4476 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4477 return negate_expr (fold_convert (type, tem));
4478 default:
4479 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4480 break;
4483 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4484 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4485 both transformations are correct when A is NaN: A != 0
4486 is then true, and A == 0 is false. */
4488 if (integer_zerop (arg01) && integer_zerop (arg2))
4490 if (comp_code == NE_EXPR)
4491 return pedantic_non_lvalue (fold_convert (type, arg1));
4492 else if (comp_code == EQ_EXPR)
4493 return build_int_cst (type, 0);
4496 /* Try some transformations of A op B ? A : B.
4498 A == B? A : B same as B
4499 A != B? A : B same as A
4500 A >= B? A : B same as max (A, B)
4501 A > B? A : B same as max (B, A)
4502 A <= B? A : B same as min (A, B)
4503 A < B? A : B same as min (B, A)
4505 As above, these transformations don't work in the presence
4506 of signed zeros. For example, if A and B are zeros of
4507 opposite sign, the first two transformations will change
4508 the sign of the result. In the last four, the original
4509 expressions give different results for (A=+0, B=-0) and
4510 (A=-0, B=+0), but the transformed expressions do not.
4512 The first two transformations are correct if either A or B
4513 is a NaN. In the first transformation, the condition will
4514 be false, and B will indeed be chosen. In the case of the
4515 second transformation, the condition A != B will be true,
4516 and A will be chosen.
4518 The conversions to max() and min() are not correct if B is
4519 a number and A is not. The conditions in the original
4520 expressions will be false, so all four give B. The min()
4521 and max() versions would give a NaN instead. */
4522 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4523 /* Avoid these transformations if the COND_EXPR may be used
4524 as an lvalue in the C++ front-end. PR c++/19199. */
4525 && (in_gimple_form
4526 || strcmp (lang_hooks.name, "GNU C++") != 0
4527 || ! maybe_lvalue_p (arg1)
4528 || ! maybe_lvalue_p (arg2)))
4530 tree comp_op0 = arg00;
4531 tree comp_op1 = arg01;
4532 tree comp_type = TREE_TYPE (comp_op0);
4534 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4535 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4537 comp_type = type;
4538 comp_op0 = arg1;
4539 comp_op1 = arg2;
4542 switch (comp_code)
4544 case EQ_EXPR:
4545 return pedantic_non_lvalue (fold_convert (type, arg2));
4546 case NE_EXPR:
4547 return pedantic_non_lvalue (fold_convert (type, arg1));
4548 case LE_EXPR:
4549 case LT_EXPR:
4550 case UNLE_EXPR:
4551 case UNLT_EXPR:
4552 /* In C++ a ?: expression can be an lvalue, so put the
4553 operand which will be used if they are equal first
4554 so that we can convert this back to the
4555 corresponding COND_EXPR. */
4556 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4558 comp_op0 = fold_convert (comp_type, comp_op0);
4559 comp_op1 = fold_convert (comp_type, comp_op1);
4560 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4561 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4562 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4563 return pedantic_non_lvalue (fold_convert (type, tem));
4565 break;
4566 case GE_EXPR:
4567 case GT_EXPR:
4568 case UNGE_EXPR:
4569 case UNGT_EXPR:
4570 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4572 comp_op0 = fold_convert (comp_type, comp_op0);
4573 comp_op1 = fold_convert (comp_type, comp_op1);
4574 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4575 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4576 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4577 return pedantic_non_lvalue (fold_convert (type, tem));
4579 break;
4580 case UNEQ_EXPR:
4581 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4582 return pedantic_non_lvalue (fold_convert (type, arg2));
4583 break;
4584 case LTGT_EXPR:
4585 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4586 return pedantic_non_lvalue (fold_convert (type, arg1));
4587 break;
4588 default:
4589 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4590 break;
4594 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4595 we might still be able to simplify this. For example,
4596 if C1 is one less or one more than C2, this might have started
4597 out as a MIN or MAX and been transformed by this function.
4598 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4600 if (INTEGRAL_TYPE_P (type)
4601 && TREE_CODE (arg01) == INTEGER_CST
4602 && TREE_CODE (arg2) == INTEGER_CST)
4603 switch (comp_code)
4605 case EQ_EXPR:
4606 /* We can replace A with C1 in this case. */
4607 arg1 = fold_convert (type, arg01);
4608 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4610 case LT_EXPR:
4611 /* If C1 is C2 + 1, this is min(A, C2). */
4612 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4613 OEP_ONLY_CONST)
4614 && operand_equal_p (arg01,
4615 const_binop (PLUS_EXPR, arg2,
4616 integer_one_node, 0),
4617 OEP_ONLY_CONST))
4618 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4619 type, arg1, arg2));
4620 break;
4622 case LE_EXPR:
4623 /* If C1 is C2 - 1, this is min(A, C2). */
4624 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4625 OEP_ONLY_CONST)
4626 && operand_equal_p (arg01,
4627 const_binop (MINUS_EXPR, arg2,
4628 integer_one_node, 0),
4629 OEP_ONLY_CONST))
4630 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4631 type, arg1, arg2));
4632 break;
4634 case GT_EXPR:
4635 /* If C1 is C2 - 1, this is max(A, C2). */
4636 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4637 OEP_ONLY_CONST)
4638 && operand_equal_p (arg01,
4639 const_binop (MINUS_EXPR, arg2,
4640 integer_one_node, 0),
4641 OEP_ONLY_CONST))
4642 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4643 type, arg1, arg2));
4644 break;
4646 case GE_EXPR:
4647 /* If C1 is C2 + 1, this is max(A, C2). */
4648 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4649 OEP_ONLY_CONST)
4650 && operand_equal_p (arg01,
4651 const_binop (PLUS_EXPR, arg2,
4652 integer_one_node, 0),
4653 OEP_ONLY_CONST))
4654 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4655 type, arg1, arg2));
4656 break;
4657 case NE_EXPR:
4658 break;
4659 default:
4660 gcc_unreachable ();
4663 return NULL_TREE;
4668 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4669 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4670 #endif
4672 /* EXP is some logical combination of boolean tests. See if we can
4673 merge it into some range test. Return the new tree if so. */
4675 static tree
4676 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4678 int or_op = (code == TRUTH_ORIF_EXPR
4679 || code == TRUTH_OR_EXPR);
4680 int in0_p, in1_p, in_p;
4681 tree low0, low1, low, high0, high1, high;
4682 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4683 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4684 tree tem;
4686 /* If this is an OR operation, invert both sides; we will invert
4687 again at the end. */
4688 if (or_op)
4689 in0_p = ! in0_p, in1_p = ! in1_p;
4691 /* If both expressions are the same, if we can merge the ranges, and we
4692 can build the range test, return it or it inverted. If one of the
4693 ranges is always true or always false, consider it to be the same
4694 expression as the other. */
4695 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4696 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4697 in1_p, low1, high1)
4698 && 0 != (tem = (build_range_check (type,
4699 lhs != 0 ? lhs
4700 : rhs != 0 ? rhs : integer_zero_node,
4701 in_p, low, high))))
4702 return or_op ? invert_truthvalue (tem) : tem;
4704 /* On machines where the branch cost is expensive, if this is a
4705 short-circuited branch and the underlying object on both sides
4706 is the same, make a non-short-circuit operation. */
4707 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4708 && lhs != 0 && rhs != 0
4709 && (code == TRUTH_ANDIF_EXPR
4710 || code == TRUTH_ORIF_EXPR)
4711 && operand_equal_p (lhs, rhs, 0))
4713 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4714 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4715 which cases we can't do this. */
4716 if (simple_operand_p (lhs))
4717 return build2 (code == TRUTH_ANDIF_EXPR
4718 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4719 type, op0, op1);
4721 else if (lang_hooks.decls.global_bindings_p () == 0
4722 && ! CONTAINS_PLACEHOLDER_P (lhs))
4724 tree common = save_expr (lhs);
4726 if (0 != (lhs = build_range_check (type, common,
4727 or_op ? ! in0_p : in0_p,
4728 low0, high0))
4729 && (0 != (rhs = build_range_check (type, common,
4730 or_op ? ! in1_p : in1_p,
4731 low1, high1))))
4732 return build2 (code == TRUTH_ANDIF_EXPR
4733 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4734 type, lhs, rhs);
4738 return 0;
4741 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4742 bit value. Arrange things so the extra bits will be set to zero if and
4743 only if C is signed-extended to its full width. If MASK is nonzero,
4744 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4746 static tree
4747 unextend (tree c, int p, int unsignedp, tree mask)
4749 tree type = TREE_TYPE (c);
4750 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4751 tree temp;
4753 if (p == modesize || unsignedp)
4754 return c;
4756 /* We work by getting just the sign bit into the low-order bit, then
4757 into the high-order bit, then sign-extend. We then XOR that value
4758 with C. */
4759 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4760 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4762 /* We must use a signed type in order to get an arithmetic right shift.
4763 However, we must also avoid introducing accidental overflows, so that
4764 a subsequent call to integer_zerop will work. Hence we must
4765 do the type conversion here. At this point, the constant is either
4766 zero or one, and the conversion to a signed type can never overflow.
4767 We could get an overflow if this conversion is done anywhere else. */
4768 if (TYPE_UNSIGNED (type))
4769 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4771 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4772 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4773 if (mask != 0)
4774 temp = const_binop (BIT_AND_EXPR, temp,
4775 fold_convert (TREE_TYPE (c), mask), 0);
4776 /* If necessary, convert the type back to match the type of C. */
4777 if (TYPE_UNSIGNED (type))
4778 temp = fold_convert (type, temp);
4780 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4783 /* Find ways of folding logical expressions of LHS and RHS:
4784 Try to merge two comparisons to the same innermost item.
4785 Look for range tests like "ch >= '0' && ch <= '9'".
4786 Look for combinations of simple terms on machines with expensive branches
4787 and evaluate the RHS unconditionally.
4789 For example, if we have p->a == 2 && p->b == 4 and we can make an
4790 object large enough to span both A and B, we can do this with a comparison
4791 against the object ANDed with the a mask.
4793 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4794 operations to do this with one comparison.
4796 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4797 function and the one above.
4799 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4800 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4802 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4803 two operands.
4805 We return the simplified tree or 0 if no optimization is possible. */
4807 static tree
4808 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4810 /* If this is the "or" of two comparisons, we can do something if
4811 the comparisons are NE_EXPR. If this is the "and", we can do something
4812 if the comparisons are EQ_EXPR. I.e.,
4813 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4815 WANTED_CODE is this operation code. For single bit fields, we can
4816 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4817 comparison for one-bit fields. */
4819 enum tree_code wanted_code;
4820 enum tree_code lcode, rcode;
4821 tree ll_arg, lr_arg, rl_arg, rr_arg;
4822 tree ll_inner, lr_inner, rl_inner, rr_inner;
4823 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4824 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4825 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4826 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4827 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4828 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4829 enum machine_mode lnmode, rnmode;
4830 tree ll_mask, lr_mask, rl_mask, rr_mask;
4831 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4832 tree l_const, r_const;
4833 tree lntype, rntype, result;
4834 int first_bit, end_bit;
4835 int volatilep;
4837 /* Start by getting the comparison codes. Fail if anything is volatile.
4838 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4839 it were surrounded with a NE_EXPR. */
4841 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4842 return 0;
4844 lcode = TREE_CODE (lhs);
4845 rcode = TREE_CODE (rhs);
4847 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4849 lhs = build2 (NE_EXPR, truth_type, lhs,
4850 build_int_cst (TREE_TYPE (lhs), 0));
4851 lcode = NE_EXPR;
4854 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4856 rhs = build2 (NE_EXPR, truth_type, rhs,
4857 build_int_cst (TREE_TYPE (rhs), 0));
4858 rcode = NE_EXPR;
4861 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4862 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4863 return 0;
4865 ll_arg = TREE_OPERAND (lhs, 0);
4866 lr_arg = TREE_OPERAND (lhs, 1);
4867 rl_arg = TREE_OPERAND (rhs, 0);
4868 rr_arg = TREE_OPERAND (rhs, 1);
4870 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4871 if (simple_operand_p (ll_arg)
4872 && simple_operand_p (lr_arg))
4874 tree result;
4875 if (operand_equal_p (ll_arg, rl_arg, 0)
4876 && operand_equal_p (lr_arg, rr_arg, 0))
4878 result = combine_comparisons (code, lcode, rcode,
4879 truth_type, ll_arg, lr_arg);
4880 if (result)
4881 return result;
4883 else if (operand_equal_p (ll_arg, rr_arg, 0)
4884 && operand_equal_p (lr_arg, rl_arg, 0))
4886 result = combine_comparisons (code, lcode,
4887 swap_tree_comparison (rcode),
4888 truth_type, ll_arg, lr_arg);
4889 if (result)
4890 return result;
4894 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4895 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4897 /* If the RHS can be evaluated unconditionally and its operands are
4898 simple, it wins to evaluate the RHS unconditionally on machines
4899 with expensive branches. In this case, this isn't a comparison
4900 that can be merged. Avoid doing this if the RHS is a floating-point
4901 comparison since those can trap. */
4903 if (BRANCH_COST >= 2
4904 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4905 && simple_operand_p (rl_arg)
4906 && simple_operand_p (rr_arg))
4908 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4909 if (code == TRUTH_OR_EXPR
4910 && lcode == NE_EXPR && integer_zerop (lr_arg)
4911 && rcode == NE_EXPR && integer_zerop (rr_arg)
4912 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4913 return build2 (NE_EXPR, truth_type,
4914 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4915 ll_arg, rl_arg),
4916 build_int_cst (TREE_TYPE (ll_arg), 0));
4918 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4919 if (code == TRUTH_AND_EXPR
4920 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4921 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4922 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4923 return build2 (EQ_EXPR, truth_type,
4924 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4925 ll_arg, rl_arg),
4926 build_int_cst (TREE_TYPE (ll_arg), 0));
4928 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4929 return build2 (code, truth_type, lhs, rhs);
4932 /* See if the comparisons can be merged. Then get all the parameters for
4933 each side. */
4935 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4936 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4937 return 0;
4939 volatilep = 0;
4940 ll_inner = decode_field_reference (ll_arg,
4941 &ll_bitsize, &ll_bitpos, &ll_mode,
4942 &ll_unsignedp, &volatilep, &ll_mask,
4943 &ll_and_mask);
4944 lr_inner = decode_field_reference (lr_arg,
4945 &lr_bitsize, &lr_bitpos, &lr_mode,
4946 &lr_unsignedp, &volatilep, &lr_mask,
4947 &lr_and_mask);
4948 rl_inner = decode_field_reference (rl_arg,
4949 &rl_bitsize, &rl_bitpos, &rl_mode,
4950 &rl_unsignedp, &volatilep, &rl_mask,
4951 &rl_and_mask);
4952 rr_inner = decode_field_reference (rr_arg,
4953 &rr_bitsize, &rr_bitpos, &rr_mode,
4954 &rr_unsignedp, &volatilep, &rr_mask,
4955 &rr_and_mask);
4957 /* It must be true that the inner operation on the lhs of each
4958 comparison must be the same if we are to be able to do anything.
4959 Then see if we have constants. If not, the same must be true for
4960 the rhs's. */
4961 if (volatilep || ll_inner == 0 || rl_inner == 0
4962 || ! operand_equal_p (ll_inner, rl_inner, 0))
4963 return 0;
4965 if (TREE_CODE (lr_arg) == INTEGER_CST
4966 && TREE_CODE (rr_arg) == INTEGER_CST)
4967 l_const = lr_arg, r_const = rr_arg;
4968 else if (lr_inner == 0 || rr_inner == 0
4969 || ! operand_equal_p (lr_inner, rr_inner, 0))
4970 return 0;
4971 else
4972 l_const = r_const = 0;
4974 /* If either comparison code is not correct for our logical operation,
4975 fail. However, we can convert a one-bit comparison against zero into
4976 the opposite comparison against that bit being set in the field. */
4978 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4979 if (lcode != wanted_code)
4981 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4983 /* Make the left operand unsigned, since we are only interested
4984 in the value of one bit. Otherwise we are doing the wrong
4985 thing below. */
4986 ll_unsignedp = 1;
4987 l_const = ll_mask;
4989 else
4990 return 0;
4993 /* This is analogous to the code for l_const above. */
4994 if (rcode != wanted_code)
4996 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4998 rl_unsignedp = 1;
4999 r_const = rl_mask;
5001 else
5002 return 0;
5005 /* After this point all optimizations will generate bit-field
5006 references, which we might not want. */
5007 if (! lang_hooks.can_use_bit_fields_p ())
5008 return 0;
5010 /* See if we can find a mode that contains both fields being compared on
5011 the left. If we can't, fail. Otherwise, update all constants and masks
5012 to be relative to a field of that size. */
5013 first_bit = MIN (ll_bitpos, rl_bitpos);
5014 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5015 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5016 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5017 volatilep);
5018 if (lnmode == VOIDmode)
5019 return 0;
5021 lnbitsize = GET_MODE_BITSIZE (lnmode);
5022 lnbitpos = first_bit & ~ (lnbitsize - 1);
5023 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5024 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5026 if (BYTES_BIG_ENDIAN)
5028 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5029 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5032 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5033 size_int (xll_bitpos), 0);
5034 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5035 size_int (xrl_bitpos), 0);
5037 if (l_const)
5039 l_const = fold_convert (lntype, l_const);
5040 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5041 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5042 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5043 fold_build1 (BIT_NOT_EXPR,
5044 lntype, ll_mask),
5045 0)))
5047 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5049 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5052 if (r_const)
5054 r_const = fold_convert (lntype, r_const);
5055 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5056 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5057 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5058 fold_build1 (BIT_NOT_EXPR,
5059 lntype, rl_mask),
5060 0)))
5062 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5064 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5068 /* If the right sides are not constant, do the same for it. Also,
5069 disallow this optimization if a size or signedness mismatch occurs
5070 between the left and right sides. */
5071 if (l_const == 0)
5073 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5074 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5075 /* Make sure the two fields on the right
5076 correspond to the left without being swapped. */
5077 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5078 return 0;
5080 first_bit = MIN (lr_bitpos, rr_bitpos);
5081 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5082 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5083 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5084 volatilep);
5085 if (rnmode == VOIDmode)
5086 return 0;
5088 rnbitsize = GET_MODE_BITSIZE (rnmode);
5089 rnbitpos = first_bit & ~ (rnbitsize - 1);
5090 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5091 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5093 if (BYTES_BIG_ENDIAN)
5095 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5096 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5099 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5100 size_int (xlr_bitpos), 0);
5101 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5102 size_int (xrr_bitpos), 0);
5104 /* Make a mask that corresponds to both fields being compared.
5105 Do this for both items being compared. If the operands are the
5106 same size and the bits being compared are in the same position
5107 then we can do this by masking both and comparing the masked
5108 results. */
5109 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5110 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5111 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5113 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5114 ll_unsignedp || rl_unsignedp);
5115 if (! all_ones_mask_p (ll_mask, lnbitsize))
5116 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5118 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5119 lr_unsignedp || rr_unsignedp);
5120 if (! all_ones_mask_p (lr_mask, rnbitsize))
5121 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5123 return build2 (wanted_code, truth_type, lhs, rhs);
5126 /* There is still another way we can do something: If both pairs of
5127 fields being compared are adjacent, we may be able to make a wider
5128 field containing them both.
5130 Note that we still must mask the lhs/rhs expressions. Furthermore,
5131 the mask must be shifted to account for the shift done by
5132 make_bit_field_ref. */
5133 if ((ll_bitsize + ll_bitpos == rl_bitpos
5134 && lr_bitsize + lr_bitpos == rr_bitpos)
5135 || (ll_bitpos == rl_bitpos + rl_bitsize
5136 && lr_bitpos == rr_bitpos + rr_bitsize))
5138 tree type;
5140 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5141 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5142 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5143 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5145 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5146 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5147 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5148 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5150 /* Convert to the smaller type before masking out unwanted bits. */
5151 type = lntype;
5152 if (lntype != rntype)
5154 if (lnbitsize > rnbitsize)
5156 lhs = fold_convert (rntype, lhs);
5157 ll_mask = fold_convert (rntype, ll_mask);
5158 type = rntype;
5160 else if (lnbitsize < rnbitsize)
5162 rhs = fold_convert (lntype, rhs);
5163 lr_mask = fold_convert (lntype, lr_mask);
5164 type = lntype;
5168 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5169 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5171 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5172 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5174 return build2 (wanted_code, truth_type, lhs, rhs);
5177 return 0;
5180 /* Handle the case of comparisons with constants. If there is something in
5181 common between the masks, those bits of the constants must be the same.
5182 If not, the condition is always false. Test for this to avoid generating
5183 incorrect code below. */
5184 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5185 if (! integer_zerop (result)
5186 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5187 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5189 if (wanted_code == NE_EXPR)
5191 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5192 return constant_boolean_node (true, truth_type);
5194 else
5196 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5197 return constant_boolean_node (false, truth_type);
5201 /* Construct the expression we will return. First get the component
5202 reference we will make. Unless the mask is all ones the width of
5203 that field, perform the mask operation. Then compare with the
5204 merged constant. */
5205 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5206 ll_unsignedp || rl_unsignedp);
5208 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5209 if (! all_ones_mask_p (ll_mask, lnbitsize))
5210 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5212 return build2 (wanted_code, truth_type, result,
5213 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5216 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5217 constant. */
5219 static tree
5220 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5222 tree arg0 = op0;
5223 enum tree_code op_code;
5224 tree comp_const = op1;
5225 tree minmax_const;
5226 int consts_equal, consts_lt;
5227 tree inner;
5229 STRIP_SIGN_NOPS (arg0);
5231 op_code = TREE_CODE (arg0);
5232 minmax_const = TREE_OPERAND (arg0, 1);
5233 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5234 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5235 inner = TREE_OPERAND (arg0, 0);
5237 /* If something does not permit us to optimize, return the original tree. */
5238 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5239 || TREE_CODE (comp_const) != INTEGER_CST
5240 || TREE_CONSTANT_OVERFLOW (comp_const)
5241 || TREE_CODE (minmax_const) != INTEGER_CST
5242 || TREE_CONSTANT_OVERFLOW (minmax_const))
5243 return NULL_TREE;
5245 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5246 and GT_EXPR, doing the rest with recursive calls using logical
5247 simplifications. */
5248 switch (code)
5250 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5252 /* FIXME: We should be able to invert code without building a
5253 scratch tree node, but doing so would require us to
5254 duplicate a part of invert_truthvalue here. */
5255 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5256 tem = optimize_minmax_comparison (TREE_CODE (tem),
5257 TREE_TYPE (tem),
5258 TREE_OPERAND (tem, 0),
5259 TREE_OPERAND (tem, 1));
5260 return invert_truthvalue (tem);
5263 case GE_EXPR:
5264 return
5265 fold_build2 (TRUTH_ORIF_EXPR, type,
5266 optimize_minmax_comparison
5267 (EQ_EXPR, type, arg0, comp_const),
5268 optimize_minmax_comparison
5269 (GT_EXPR, type, arg0, comp_const));
5271 case EQ_EXPR:
5272 if (op_code == MAX_EXPR && consts_equal)
5273 /* MAX (X, 0) == 0 -> X <= 0 */
5274 return fold_build2 (LE_EXPR, type, inner, comp_const);
5276 else if (op_code == MAX_EXPR && consts_lt)
5277 /* MAX (X, 0) == 5 -> X == 5 */
5278 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5280 else if (op_code == MAX_EXPR)
5281 /* MAX (X, 0) == -1 -> false */
5282 return omit_one_operand (type, integer_zero_node, inner);
5284 else if (consts_equal)
5285 /* MIN (X, 0) == 0 -> X >= 0 */
5286 return fold_build2 (GE_EXPR, type, inner, comp_const);
5288 else if (consts_lt)
5289 /* MIN (X, 0) == 5 -> false */
5290 return omit_one_operand (type, integer_zero_node, inner);
5292 else
5293 /* MIN (X, 0) == -1 -> X == -1 */
5294 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5296 case GT_EXPR:
5297 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5298 /* MAX (X, 0) > 0 -> X > 0
5299 MAX (X, 0) > 5 -> X > 5 */
5300 return fold_build2 (GT_EXPR, type, inner, comp_const);
5302 else if (op_code == MAX_EXPR)
5303 /* MAX (X, 0) > -1 -> true */
5304 return omit_one_operand (type, integer_one_node, inner);
5306 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5307 /* MIN (X, 0) > 0 -> false
5308 MIN (X, 0) > 5 -> false */
5309 return omit_one_operand (type, integer_zero_node, inner);
5311 else
5312 /* MIN (X, 0) > -1 -> X > -1 */
5313 return fold_build2 (GT_EXPR, type, inner, comp_const);
5315 default:
5316 return NULL_TREE;
5320 /* T is an integer expression that is being multiplied, divided, or taken a
5321 modulus (CODE says which and what kind of divide or modulus) by a
5322 constant C. See if we can eliminate that operation by folding it with
5323 other operations already in T. WIDE_TYPE, if non-null, is a type that
5324 should be used for the computation if wider than our type.
5326 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5327 (X * 2) + (Y * 4). We must, however, be assured that either the original
5328 expression would not overflow or that overflow is undefined for the type
5329 in the language in question.
5331 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5332 the machine has a multiply-accumulate insn or that this is part of an
5333 addressing calculation.
5335 If we return a non-null expression, it is an equivalent form of the
5336 original computation, but need not be in the original type. */
5338 static tree
5339 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5341 /* To avoid exponential search depth, refuse to allow recursion past
5342 three levels. Beyond that (1) it's highly unlikely that we'll find
5343 something interesting and (2) we've probably processed it before
5344 when we built the inner expression. */
5346 static int depth;
5347 tree ret;
5349 if (depth > 3)
5350 return NULL;
5352 depth++;
5353 ret = extract_muldiv_1 (t, c, code, wide_type);
5354 depth--;
5356 return ret;
5359 static tree
5360 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5362 tree type = TREE_TYPE (t);
5363 enum tree_code tcode = TREE_CODE (t);
5364 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5365 > GET_MODE_SIZE (TYPE_MODE (type)))
5366 ? wide_type : type);
5367 tree t1, t2;
5368 int same_p = tcode == code;
5369 tree op0 = NULL_TREE, op1 = NULL_TREE;
5371 /* Don't deal with constants of zero here; they confuse the code below. */
5372 if (integer_zerop (c))
5373 return NULL_TREE;
5375 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5376 op0 = TREE_OPERAND (t, 0);
5378 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5379 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5381 /* Note that we need not handle conditional operations here since fold
5382 already handles those cases. So just do arithmetic here. */
5383 switch (tcode)
5385 case INTEGER_CST:
5386 /* For a constant, we can always simplify if we are a multiply
5387 or (for divide and modulus) if it is a multiple of our constant. */
5388 if (code == MULT_EXPR
5389 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5390 return const_binop (code, fold_convert (ctype, t),
5391 fold_convert (ctype, c), 0);
5392 break;
5394 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5395 /* If op0 is an expression ... */
5396 if ((COMPARISON_CLASS_P (op0)
5397 || UNARY_CLASS_P (op0)
5398 || BINARY_CLASS_P (op0)
5399 || EXPRESSION_CLASS_P (op0))
5400 /* ... and is unsigned, and its type is smaller than ctype,
5401 then we cannot pass through as widening. */
5402 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5403 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5404 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5405 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5406 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5407 /* ... or this is a truncation (t is narrower than op0),
5408 then we cannot pass through this narrowing. */
5409 || (GET_MODE_SIZE (TYPE_MODE (type))
5410 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5411 /* ... or signedness changes for division or modulus,
5412 then we cannot pass through this conversion. */
5413 || (code != MULT_EXPR
5414 && (TYPE_UNSIGNED (ctype)
5415 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5416 break;
5418 /* Pass the constant down and see if we can make a simplification. If
5419 we can, replace this expression with the inner simplification for
5420 possible later conversion to our or some other type. */
5421 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5422 && TREE_CODE (t2) == INTEGER_CST
5423 && ! TREE_CONSTANT_OVERFLOW (t2)
5424 && (0 != (t1 = extract_muldiv (op0, t2, code,
5425 code == MULT_EXPR
5426 ? ctype : NULL_TREE))))
5427 return t1;
5428 break;
5430 case ABS_EXPR:
5431 /* If widening the type changes it from signed to unsigned, then we
5432 must avoid building ABS_EXPR itself as unsigned. */
5433 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5435 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5436 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5438 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5439 return fold_convert (ctype, t1);
5441 break;
5443 /* FALLTHROUGH */
5444 case NEGATE_EXPR:
5445 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5446 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5447 break;
5449 case MIN_EXPR: case MAX_EXPR:
5450 /* If widening the type changes the signedness, then we can't perform
5451 this optimization as that changes the result. */
5452 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5453 break;
5455 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5456 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5457 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5459 if (tree_int_cst_sgn (c) < 0)
5460 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5462 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5463 fold_convert (ctype, t2));
5465 break;
5467 case LSHIFT_EXPR: case RSHIFT_EXPR:
5468 /* If the second operand is constant, this is a multiplication
5469 or floor division, by a power of two, so we can treat it that
5470 way unless the multiplier or divisor overflows. Signed
5471 left-shift overflow is implementation-defined rather than
5472 undefined in C90, so do not convert signed left shift into
5473 multiplication. */
5474 if (TREE_CODE (op1) == INTEGER_CST
5475 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5476 /* const_binop may not detect overflow correctly,
5477 so check for it explicitly here. */
5478 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5479 && TREE_INT_CST_HIGH (op1) == 0
5480 && 0 != (t1 = fold_convert (ctype,
5481 const_binop (LSHIFT_EXPR,
5482 size_one_node,
5483 op1, 0)))
5484 && ! TREE_OVERFLOW (t1))
5485 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5486 ? MULT_EXPR : FLOOR_DIV_EXPR,
5487 ctype, fold_convert (ctype, op0), t1),
5488 c, code, wide_type);
5489 break;
5491 case PLUS_EXPR: case MINUS_EXPR:
5492 /* See if we can eliminate the operation on both sides. If we can, we
5493 can return a new PLUS or MINUS. If we can't, the only remaining
5494 cases where we can do anything are if the second operand is a
5495 constant. */
5496 t1 = extract_muldiv (op0, c, code, wide_type);
5497 t2 = extract_muldiv (op1, c, code, wide_type);
5498 if (t1 != 0 && t2 != 0
5499 && (code == MULT_EXPR
5500 /* If not multiplication, we can only do this if both operands
5501 are divisible by c. */
5502 || (multiple_of_p (ctype, op0, c)
5503 && multiple_of_p (ctype, op1, c))))
5504 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5505 fold_convert (ctype, t2));
5507 /* If this was a subtraction, negate OP1 and set it to be an addition.
5508 This simplifies the logic below. */
5509 if (tcode == MINUS_EXPR)
5510 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5512 if (TREE_CODE (op1) != INTEGER_CST)
5513 break;
5515 /* If either OP1 or C are negative, this optimization is not safe for
5516 some of the division and remainder types while for others we need
5517 to change the code. */
5518 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5520 if (code == CEIL_DIV_EXPR)
5521 code = FLOOR_DIV_EXPR;
5522 else if (code == FLOOR_DIV_EXPR)
5523 code = CEIL_DIV_EXPR;
5524 else if (code != MULT_EXPR
5525 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5526 break;
5529 /* If it's a multiply or a division/modulus operation of a multiple
5530 of our constant, do the operation and verify it doesn't overflow. */
5531 if (code == MULT_EXPR
5532 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5534 op1 = const_binop (code, fold_convert (ctype, op1),
5535 fold_convert (ctype, c), 0);
5536 /* We allow the constant to overflow with wrapping semantics. */
5537 if (op1 == 0
5538 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5539 break;
5541 else
5542 break;
5544 /* If we have an unsigned type is not a sizetype, we cannot widen
5545 the operation since it will change the result if the original
5546 computation overflowed. */
5547 if (TYPE_UNSIGNED (ctype)
5548 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5549 && ctype != type)
5550 break;
5552 /* If we were able to eliminate our operation from the first side,
5553 apply our operation to the second side and reform the PLUS. */
5554 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5555 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5557 /* The last case is if we are a multiply. In that case, we can
5558 apply the distributive law to commute the multiply and addition
5559 if the multiplication of the constants doesn't overflow. */
5560 if (code == MULT_EXPR)
5561 return fold_build2 (tcode, ctype,
5562 fold_build2 (code, ctype,
5563 fold_convert (ctype, op0),
5564 fold_convert (ctype, c)),
5565 op1);
5567 break;
5569 case MULT_EXPR:
5570 /* We have a special case here if we are doing something like
5571 (C * 8) % 4 since we know that's zero. */
5572 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5573 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5574 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5575 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5576 return omit_one_operand (type, integer_zero_node, op0);
5578 /* ... fall through ... */
5580 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5581 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5582 /* If we can extract our operation from the LHS, do so and return a
5583 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5584 do something only if the second operand is a constant. */
5585 if (same_p
5586 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5587 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5588 fold_convert (ctype, op1));
5589 else if (tcode == MULT_EXPR && code == MULT_EXPR
5590 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5591 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5592 fold_convert (ctype, t1));
5593 else if (TREE_CODE (op1) != INTEGER_CST)
5594 return 0;
5596 /* If these are the same operation types, we can associate them
5597 assuming no overflow. */
5598 if (tcode == code
5599 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5600 fold_convert (ctype, c), 0))
5601 && ! TREE_OVERFLOW (t1))
5602 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5604 /* If these operations "cancel" each other, we have the main
5605 optimizations of this pass, which occur when either constant is a
5606 multiple of the other, in which case we replace this with either an
5607 operation or CODE or TCODE.
5609 If we have an unsigned type that is not a sizetype, we cannot do
5610 this since it will change the result if the original computation
5611 overflowed. */
5612 if ((! TYPE_UNSIGNED (ctype)
5613 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5614 && ! flag_wrapv
5615 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5616 || (tcode == MULT_EXPR
5617 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5618 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5620 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5621 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5622 fold_convert (ctype,
5623 const_binop (TRUNC_DIV_EXPR,
5624 op1, c, 0)));
5625 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5626 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5627 fold_convert (ctype,
5628 const_binop (TRUNC_DIV_EXPR,
5629 c, op1, 0)));
5631 break;
5633 default:
5634 break;
5637 return 0;
5640 /* Return a node which has the indicated constant VALUE (either 0 or
5641 1), and is of the indicated TYPE. */
5643 tree
5644 constant_boolean_node (int value, tree type)
5646 if (type == integer_type_node)
5647 return value ? integer_one_node : integer_zero_node;
5648 else if (type == boolean_type_node)
5649 return value ? boolean_true_node : boolean_false_node;
5650 else
5651 return build_int_cst (type, value);
5655 /* Return true if expr looks like an ARRAY_REF and set base and
5656 offset to the appropriate trees. If there is no offset,
5657 offset is set to NULL_TREE. Base will be canonicalized to
5658 something you can get the element type from using
5659 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5660 in bytes to the base. */
5662 static bool
5663 extract_array_ref (tree expr, tree *base, tree *offset)
5665 /* One canonical form is a PLUS_EXPR with the first
5666 argument being an ADDR_EXPR with a possible NOP_EXPR
5667 attached. */
5668 if (TREE_CODE (expr) == PLUS_EXPR)
5670 tree op0 = TREE_OPERAND (expr, 0);
5671 tree inner_base, dummy1;
5672 /* Strip NOP_EXPRs here because the C frontends and/or
5673 folders present us (int *)&x.a + 4B possibly. */
5674 STRIP_NOPS (op0);
5675 if (extract_array_ref (op0, &inner_base, &dummy1))
5677 *base = inner_base;
5678 if (dummy1 == NULL_TREE)
5679 *offset = TREE_OPERAND (expr, 1);
5680 else
5681 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5682 dummy1, TREE_OPERAND (expr, 1));
5683 return true;
5686 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5687 which we transform into an ADDR_EXPR with appropriate
5688 offset. For other arguments to the ADDR_EXPR we assume
5689 zero offset and as such do not care about the ADDR_EXPR
5690 type and strip possible nops from it. */
5691 else if (TREE_CODE (expr) == ADDR_EXPR)
5693 tree op0 = TREE_OPERAND (expr, 0);
5694 if (TREE_CODE (op0) == ARRAY_REF)
5696 tree idx = TREE_OPERAND (op0, 1);
5697 *base = TREE_OPERAND (op0, 0);
5698 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5699 array_ref_element_size (op0));
5701 else
5703 /* Handle array-to-pointer decay as &a. */
5704 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5705 *base = TREE_OPERAND (expr, 0);
5706 else
5707 *base = expr;
5708 *offset = NULL_TREE;
5710 return true;
5712 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5713 else if (SSA_VAR_P (expr)
5714 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5716 *base = expr;
5717 *offset = NULL_TREE;
5718 return true;
5721 return false;
5725 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5726 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5727 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5728 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5729 COND is the first argument to CODE; otherwise (as in the example
5730 given here), it is the second argument. TYPE is the type of the
5731 original expression. Return NULL_TREE if no simplification is
5732 possible. */
5734 static tree
5735 fold_binary_op_with_conditional_arg (enum tree_code code,
5736 tree type, tree op0, tree op1,
5737 tree cond, tree arg, int cond_first_p)
5739 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5740 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5741 tree test, true_value, false_value;
5742 tree lhs = NULL_TREE;
5743 tree rhs = NULL_TREE;
5745 /* This transformation is only worthwhile if we don't have to wrap
5746 arg in a SAVE_EXPR, and the operation can be simplified on at least
5747 one of the branches once its pushed inside the COND_EXPR. */
5748 if (!TREE_CONSTANT (arg))
5749 return NULL_TREE;
5751 if (TREE_CODE (cond) == COND_EXPR)
5753 test = TREE_OPERAND (cond, 0);
5754 true_value = TREE_OPERAND (cond, 1);
5755 false_value = TREE_OPERAND (cond, 2);
5756 /* If this operand throws an expression, then it does not make
5757 sense to try to perform a logical or arithmetic operation
5758 involving it. */
5759 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5760 lhs = true_value;
5761 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5762 rhs = false_value;
5764 else
5766 tree testtype = TREE_TYPE (cond);
5767 test = cond;
5768 true_value = constant_boolean_node (true, testtype);
5769 false_value = constant_boolean_node (false, testtype);
5772 arg = fold_convert (arg_type, arg);
5773 if (lhs == 0)
5775 true_value = fold_convert (cond_type, true_value);
5776 if (cond_first_p)
5777 lhs = fold_build2 (code, type, true_value, arg);
5778 else
5779 lhs = fold_build2 (code, type, arg, true_value);
5781 if (rhs == 0)
5783 false_value = fold_convert (cond_type, false_value);
5784 if (cond_first_p)
5785 rhs = fold_build2 (code, type, false_value, arg);
5786 else
5787 rhs = fold_build2 (code, type, arg, false_value);
5790 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5791 return fold_convert (type, test);
5795 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5797 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5798 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5799 ADDEND is the same as X.
5801 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5802 and finite. The problematic cases are when X is zero, and its mode
5803 has signed zeros. In the case of rounding towards -infinity,
5804 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5805 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5807 static bool
5808 fold_real_zero_addition_p (tree type, tree addend, int negate)
5810 if (!real_zerop (addend))
5811 return false;
5813 /* Don't allow the fold with -fsignaling-nans. */
5814 if (HONOR_SNANS (TYPE_MODE (type)))
5815 return false;
5817 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5818 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5819 return true;
5821 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5822 if (TREE_CODE (addend) == REAL_CST
5823 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5824 negate = !negate;
5826 /* The mode has signed zeros, and we have to honor their sign.
5827 In this situation, there is only one case we can return true for.
5828 X - 0 is the same as X unless rounding towards -infinity is
5829 supported. */
5830 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5833 /* Subroutine of fold() that checks comparisons of built-in math
5834 functions against real constants.
5836 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5837 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5838 is the type of the result and ARG0 and ARG1 are the operands of the
5839 comparison. ARG1 must be a TREE_REAL_CST.
5841 The function returns the constant folded tree if a simplification
5842 can be made, and NULL_TREE otherwise. */
5844 static tree
5845 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5846 tree type, tree arg0, tree arg1)
5848 REAL_VALUE_TYPE c;
5850 if (BUILTIN_SQRT_P (fcode))
5852 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5853 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5855 c = TREE_REAL_CST (arg1);
5856 if (REAL_VALUE_NEGATIVE (c))
5858 /* sqrt(x) < y is always false, if y is negative. */
5859 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5860 return omit_one_operand (type, integer_zero_node, arg);
5862 /* sqrt(x) > y is always true, if y is negative and we
5863 don't care about NaNs, i.e. negative values of x. */
5864 if (code == NE_EXPR || !HONOR_NANS (mode))
5865 return omit_one_operand (type, integer_one_node, arg);
5867 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5868 return fold_build2 (GE_EXPR, type, arg,
5869 build_real (TREE_TYPE (arg), dconst0));
5871 else if (code == GT_EXPR || code == GE_EXPR)
5873 REAL_VALUE_TYPE c2;
5875 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5876 real_convert (&c2, mode, &c2);
5878 if (REAL_VALUE_ISINF (c2))
5880 /* sqrt(x) > y is x == +Inf, when y is very large. */
5881 if (HONOR_INFINITIES (mode))
5882 return fold_build2 (EQ_EXPR, type, arg,
5883 build_real (TREE_TYPE (arg), c2));
5885 /* sqrt(x) > y is always false, when y is very large
5886 and we don't care about infinities. */
5887 return omit_one_operand (type, integer_zero_node, arg);
5890 /* sqrt(x) > c is the same as x > c*c. */
5891 return fold_build2 (code, type, arg,
5892 build_real (TREE_TYPE (arg), c2));
5894 else if (code == LT_EXPR || code == LE_EXPR)
5896 REAL_VALUE_TYPE c2;
5898 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5899 real_convert (&c2, mode, &c2);
5901 if (REAL_VALUE_ISINF (c2))
5903 /* sqrt(x) < y is always true, when y is a very large
5904 value and we don't care about NaNs or Infinities. */
5905 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5906 return omit_one_operand (type, integer_one_node, arg);
5908 /* sqrt(x) < y is x != +Inf when y is very large and we
5909 don't care about NaNs. */
5910 if (! HONOR_NANS (mode))
5911 return fold_build2 (NE_EXPR, type, arg,
5912 build_real (TREE_TYPE (arg), c2));
5914 /* sqrt(x) < y is x >= 0 when y is very large and we
5915 don't care about Infinities. */
5916 if (! HONOR_INFINITIES (mode))
5917 return fold_build2 (GE_EXPR, type, arg,
5918 build_real (TREE_TYPE (arg), dconst0));
5920 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5921 if (lang_hooks.decls.global_bindings_p () != 0
5922 || CONTAINS_PLACEHOLDER_P (arg))
5923 return NULL_TREE;
5925 arg = save_expr (arg);
5926 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5927 fold_build2 (GE_EXPR, type, arg,
5928 build_real (TREE_TYPE (arg),
5929 dconst0)),
5930 fold_build2 (NE_EXPR, type, arg,
5931 build_real (TREE_TYPE (arg),
5932 c2)));
5935 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5936 if (! HONOR_NANS (mode))
5937 return fold_build2 (code, type, arg,
5938 build_real (TREE_TYPE (arg), c2));
5940 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5941 if (lang_hooks.decls.global_bindings_p () == 0
5942 && ! CONTAINS_PLACEHOLDER_P (arg))
5944 arg = save_expr (arg);
5945 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5946 fold_build2 (GE_EXPR, type, arg,
5947 build_real (TREE_TYPE (arg),
5948 dconst0)),
5949 fold_build2 (code, type, arg,
5950 build_real (TREE_TYPE (arg),
5951 c2)));
5956 return NULL_TREE;
5959 /* Subroutine of fold() that optimizes comparisons against Infinities,
5960 either +Inf or -Inf.
5962 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5963 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5964 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5966 The function returns the constant folded tree if a simplification
5967 can be made, and NULL_TREE otherwise. */
5969 static tree
5970 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5972 enum machine_mode mode;
5973 REAL_VALUE_TYPE max;
5974 tree temp;
5975 bool neg;
5977 mode = TYPE_MODE (TREE_TYPE (arg0));
5979 /* For negative infinity swap the sense of the comparison. */
5980 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5981 if (neg)
5982 code = swap_tree_comparison (code);
5984 switch (code)
5986 case GT_EXPR:
5987 /* x > +Inf is always false, if with ignore sNANs. */
5988 if (HONOR_SNANS (mode))
5989 return NULL_TREE;
5990 return omit_one_operand (type, integer_zero_node, arg0);
5992 case LE_EXPR:
5993 /* x <= +Inf is always true, if we don't case about NaNs. */
5994 if (! HONOR_NANS (mode))
5995 return omit_one_operand (type, integer_one_node, arg0);
5997 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5998 if (lang_hooks.decls.global_bindings_p () == 0
5999 && ! CONTAINS_PLACEHOLDER_P (arg0))
6001 arg0 = save_expr (arg0);
6002 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6004 break;
6006 case EQ_EXPR:
6007 case GE_EXPR:
6008 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6009 real_maxval (&max, neg, mode);
6010 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6011 arg0, build_real (TREE_TYPE (arg0), max));
6013 case LT_EXPR:
6014 /* x < +Inf is always equal to x <= DBL_MAX. */
6015 real_maxval (&max, neg, mode);
6016 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6017 arg0, build_real (TREE_TYPE (arg0), max));
6019 case NE_EXPR:
6020 /* x != +Inf is always equal to !(x > DBL_MAX). */
6021 real_maxval (&max, neg, mode);
6022 if (! HONOR_NANS (mode))
6023 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6024 arg0, build_real (TREE_TYPE (arg0), max));
6026 /* The transformation below creates non-gimple code and thus is
6027 not appropriate if we are in gimple form. */
6028 if (in_gimple_form)
6029 return NULL_TREE;
6031 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6032 arg0, build_real (TREE_TYPE (arg0), max));
6033 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6035 default:
6036 break;
6039 return NULL_TREE;
6042 /* Subroutine of fold() that optimizes comparisons of a division by
6043 a nonzero integer constant against an integer constant, i.e.
6044 X/C1 op C2.
6046 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6047 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6048 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6050 The function returns the constant folded tree if a simplification
6051 can be made, and NULL_TREE otherwise. */
6053 static tree
6054 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6056 tree prod, tmp, hi, lo;
6057 tree arg00 = TREE_OPERAND (arg0, 0);
6058 tree arg01 = TREE_OPERAND (arg0, 1);
6059 unsigned HOST_WIDE_INT lpart;
6060 HOST_WIDE_INT hpart;
6061 bool neg_overflow;
6062 int overflow;
6064 /* We have to do this the hard way to detect unsigned overflow.
6065 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6066 overflow = mul_double (TREE_INT_CST_LOW (arg01),
6067 TREE_INT_CST_HIGH (arg01),
6068 TREE_INT_CST_LOW (arg1),
6069 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
6070 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6071 prod = force_fit_type (prod, -1, overflow, false);
6072 neg_overflow = false;
6074 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
6076 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6077 lo = prod;
6079 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6080 overflow = add_double (TREE_INT_CST_LOW (prod),
6081 TREE_INT_CST_HIGH (prod),
6082 TREE_INT_CST_LOW (tmp),
6083 TREE_INT_CST_HIGH (tmp),
6084 &lpart, &hpart);
6085 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6086 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6087 TREE_CONSTANT_OVERFLOW (prod));
6089 else if (tree_int_cst_sgn (arg01) >= 0)
6091 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6092 switch (tree_int_cst_sgn (arg1))
6094 case -1:
6095 neg_overflow = true;
6096 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6097 hi = prod;
6098 break;
6100 case 0:
6101 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6102 hi = tmp;
6103 break;
6105 case 1:
6106 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6107 lo = prod;
6108 break;
6110 default:
6111 gcc_unreachable ();
6114 else
6116 /* A negative divisor reverses the relational operators. */
6117 code = swap_tree_comparison (code);
6119 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6120 switch (tree_int_cst_sgn (arg1))
6122 case -1:
6123 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6124 lo = prod;
6125 break;
6127 case 0:
6128 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6129 lo = tmp;
6130 break;
6132 case 1:
6133 neg_overflow = true;
6134 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6135 hi = prod;
6136 break;
6138 default:
6139 gcc_unreachable ();
6143 switch (code)
6145 case EQ_EXPR:
6146 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6147 return omit_one_operand (type, integer_zero_node, arg00);
6148 if (TREE_OVERFLOW (hi))
6149 return fold_build2 (GE_EXPR, type, arg00, lo);
6150 if (TREE_OVERFLOW (lo))
6151 return fold_build2 (LE_EXPR, type, arg00, hi);
6152 return build_range_check (type, arg00, 1, lo, hi);
6154 case NE_EXPR:
6155 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6156 return omit_one_operand (type, integer_one_node, arg00);
6157 if (TREE_OVERFLOW (hi))
6158 return fold_build2 (LT_EXPR, type, arg00, lo);
6159 if (TREE_OVERFLOW (lo))
6160 return fold_build2 (GT_EXPR, type, arg00, hi);
6161 return build_range_check (type, arg00, 0, lo, hi);
6163 case LT_EXPR:
6164 if (TREE_OVERFLOW (lo))
6166 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6167 return omit_one_operand (type, tmp, arg00);
6169 return fold_build2 (LT_EXPR, type, arg00, lo);
6171 case LE_EXPR:
6172 if (TREE_OVERFLOW (hi))
6174 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6175 return omit_one_operand (type, tmp, arg00);
6177 return fold_build2 (LE_EXPR, type, arg00, hi);
6179 case GT_EXPR:
6180 if (TREE_OVERFLOW (hi))
6182 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6183 return omit_one_operand (type, tmp, arg00);
6185 return fold_build2 (GT_EXPR, type, arg00, hi);
6187 case GE_EXPR:
6188 if (TREE_OVERFLOW (lo))
6190 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6191 return omit_one_operand (type, tmp, arg00);
6193 return fold_build2 (GE_EXPR, type, arg00, lo);
6195 default:
6196 break;
6199 return NULL_TREE;
6203 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6204 equality/inequality test, then return a simplified form of the test
6205 using a sign testing. Otherwise return NULL. TYPE is the desired
6206 result type. */
6208 static tree
6209 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6210 tree result_type)
6212 /* If this is testing a single bit, we can optimize the test. */
6213 if ((code == NE_EXPR || code == EQ_EXPR)
6214 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6215 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6217 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6218 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6219 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6221 if (arg00 != NULL_TREE
6222 /* This is only a win if casting to a signed type is cheap,
6223 i.e. when arg00's type is not a partial mode. */
6224 && TYPE_PRECISION (TREE_TYPE (arg00))
6225 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6227 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6228 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6229 result_type, fold_convert (stype, arg00),
6230 build_int_cst (stype, 0));
6234 return NULL_TREE;
6237 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6238 equality/inequality test, then return a simplified form of
6239 the test using shifts and logical operations. Otherwise return
6240 NULL. TYPE is the desired result type. */
6242 tree
6243 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6244 tree result_type)
6246 /* If this is testing a single bit, we can optimize the test. */
6247 if ((code == NE_EXPR || code == EQ_EXPR)
6248 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6249 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6251 tree inner = TREE_OPERAND (arg0, 0);
6252 tree type = TREE_TYPE (arg0);
6253 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6254 enum machine_mode operand_mode = TYPE_MODE (type);
6255 int ops_unsigned;
6256 tree signed_type, unsigned_type, intermediate_type;
6257 tree tem;
6259 /* First, see if we can fold the single bit test into a sign-bit
6260 test. */
6261 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6262 result_type);
6263 if (tem)
6264 return tem;
6266 /* Otherwise we have (A & C) != 0 where C is a single bit,
6267 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6268 Similarly for (A & C) == 0. */
6270 /* If INNER is a right shift of a constant and it plus BITNUM does
6271 not overflow, adjust BITNUM and INNER. */
6272 if (TREE_CODE (inner) == RSHIFT_EXPR
6273 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6274 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6275 && bitnum < TYPE_PRECISION (type)
6276 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6277 bitnum - TYPE_PRECISION (type)))
6279 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6280 inner = TREE_OPERAND (inner, 0);
6283 /* If we are going to be able to omit the AND below, we must do our
6284 operations as unsigned. If we must use the AND, we have a choice.
6285 Normally unsigned is faster, but for some machines signed is. */
6286 #ifdef LOAD_EXTEND_OP
6287 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6288 && !flag_syntax_only) ? 0 : 1;
6289 #else
6290 ops_unsigned = 1;
6291 #endif
6293 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6294 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6295 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6296 inner = fold_convert (intermediate_type, inner);
6298 if (bitnum != 0)
6299 inner = build2 (RSHIFT_EXPR, intermediate_type,
6300 inner, size_int (bitnum));
6302 if (code == EQ_EXPR)
6303 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6304 inner, integer_one_node);
6306 /* Put the AND last so it can combine with more things. */
6307 inner = build2 (BIT_AND_EXPR, intermediate_type,
6308 inner, integer_one_node);
6310 /* Make sure to return the proper type. */
6311 inner = fold_convert (result_type, inner);
6313 return inner;
6315 return NULL_TREE;
6318 /* Check whether we are allowed to reorder operands arg0 and arg1,
6319 such that the evaluation of arg1 occurs before arg0. */
6321 static bool
6322 reorder_operands_p (tree arg0, tree arg1)
6324 if (! flag_evaluation_order)
6325 return true;
6326 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6327 return true;
6328 return ! TREE_SIDE_EFFECTS (arg0)
6329 && ! TREE_SIDE_EFFECTS (arg1);
6332 /* Test whether it is preferable two swap two operands, ARG0 and
6333 ARG1, for example because ARG0 is an integer constant and ARG1
6334 isn't. If REORDER is true, only recommend swapping if we can
6335 evaluate the operands in reverse order. */
6337 bool
6338 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6340 STRIP_SIGN_NOPS (arg0);
6341 STRIP_SIGN_NOPS (arg1);
6343 if (TREE_CODE (arg1) == INTEGER_CST)
6344 return 0;
6345 if (TREE_CODE (arg0) == INTEGER_CST)
6346 return 1;
6348 if (TREE_CODE (arg1) == REAL_CST)
6349 return 0;
6350 if (TREE_CODE (arg0) == REAL_CST)
6351 return 1;
6353 if (TREE_CODE (arg1) == COMPLEX_CST)
6354 return 0;
6355 if (TREE_CODE (arg0) == COMPLEX_CST)
6356 return 1;
6358 if (TREE_CONSTANT (arg1))
6359 return 0;
6360 if (TREE_CONSTANT (arg0))
6361 return 1;
6363 if (optimize_size)
6364 return 0;
6366 if (reorder && flag_evaluation_order
6367 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6368 return 0;
6370 if (DECL_P (arg1))
6371 return 0;
6372 if (DECL_P (arg0))
6373 return 1;
6375 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6376 for commutative and comparison operators. Ensuring a canonical
6377 form allows the optimizers to find additional redundancies without
6378 having to explicitly check for both orderings. */
6379 if (TREE_CODE (arg0) == SSA_NAME
6380 && TREE_CODE (arg1) == SSA_NAME
6381 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6382 return 1;
6384 return 0;
6387 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6388 ARG0 is extended to a wider type. */
6390 static tree
6391 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6393 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6394 tree arg1_unw;
6395 tree shorter_type, outer_type;
6396 tree min, max;
6397 bool above, below;
6399 if (arg0_unw == arg0)
6400 return NULL_TREE;
6401 shorter_type = TREE_TYPE (arg0_unw);
6403 #ifdef HAVE_canonicalize_funcptr_for_compare
6404 /* Disable this optimization if we're casting a function pointer
6405 type on targets that require function pointer canonicalization. */
6406 if (HAVE_canonicalize_funcptr_for_compare
6407 && TREE_CODE (shorter_type) == POINTER_TYPE
6408 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6409 return NULL_TREE;
6410 #endif
6412 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6413 return NULL_TREE;
6415 arg1_unw = get_unwidened (arg1, shorter_type);
6417 /* If possible, express the comparison in the shorter mode. */
6418 if ((code == EQ_EXPR || code == NE_EXPR
6419 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6420 && (TREE_TYPE (arg1_unw) == shorter_type
6421 || (TREE_CODE (arg1_unw) == INTEGER_CST
6422 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6423 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6424 && int_fits_type_p (arg1_unw, shorter_type))))
6425 return fold_build2 (code, type, arg0_unw,
6426 fold_convert (shorter_type, arg1_unw));
6428 if (TREE_CODE (arg1_unw) != INTEGER_CST
6429 || TREE_CODE (shorter_type) != INTEGER_TYPE
6430 || !int_fits_type_p (arg1_unw, shorter_type))
6431 return NULL_TREE;
6433 /* If we are comparing with the integer that does not fit into the range
6434 of the shorter type, the result is known. */
6435 outer_type = TREE_TYPE (arg1_unw);
6436 min = lower_bound_in_type (outer_type, shorter_type);
6437 max = upper_bound_in_type (outer_type, shorter_type);
6439 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6440 max, arg1_unw));
6441 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6442 arg1_unw, min));
6444 switch (code)
6446 case EQ_EXPR:
6447 if (above || below)
6448 return omit_one_operand (type, integer_zero_node, arg0);
6449 break;
6451 case NE_EXPR:
6452 if (above || below)
6453 return omit_one_operand (type, integer_one_node, arg0);
6454 break;
6456 case LT_EXPR:
6457 case LE_EXPR:
6458 if (above)
6459 return omit_one_operand (type, integer_one_node, arg0);
6460 else if (below)
6461 return omit_one_operand (type, integer_zero_node, arg0);
6463 case GT_EXPR:
6464 case GE_EXPR:
6465 if (above)
6466 return omit_one_operand (type, integer_zero_node, arg0);
6467 else if (below)
6468 return omit_one_operand (type, integer_one_node, arg0);
6470 default:
6471 break;
6474 return NULL_TREE;
6477 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6478 ARG0 just the signedness is changed. */
6480 static tree
6481 fold_sign_changed_comparison (enum tree_code code, tree type,
6482 tree arg0, tree arg1)
6484 tree arg0_inner, tmp;
6485 tree inner_type, outer_type;
6487 if (TREE_CODE (arg0) != NOP_EXPR
6488 && TREE_CODE (arg0) != CONVERT_EXPR)
6489 return NULL_TREE;
6491 outer_type = TREE_TYPE (arg0);
6492 arg0_inner = TREE_OPERAND (arg0, 0);
6493 inner_type = TREE_TYPE (arg0_inner);
6495 #ifdef HAVE_canonicalize_funcptr_for_compare
6496 /* Disable this optimization if we're casting a function pointer
6497 type on targets that require function pointer canonicalization. */
6498 if (HAVE_canonicalize_funcptr_for_compare
6499 && TREE_CODE (inner_type) == POINTER_TYPE
6500 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6501 return NULL_TREE;
6502 #endif
6504 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6505 return NULL_TREE;
6507 if (TREE_CODE (arg1) != INTEGER_CST
6508 && !((TREE_CODE (arg1) == NOP_EXPR
6509 || TREE_CODE (arg1) == CONVERT_EXPR)
6510 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6511 return NULL_TREE;
6513 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6514 && code != NE_EXPR
6515 && code != EQ_EXPR)
6516 return NULL_TREE;
6518 if (TREE_CODE (arg1) == INTEGER_CST)
6520 tmp = build_int_cst_wide (inner_type,
6521 TREE_INT_CST_LOW (arg1),
6522 TREE_INT_CST_HIGH (arg1));
6523 arg1 = force_fit_type (tmp, 0,
6524 TREE_OVERFLOW (arg1),
6525 TREE_CONSTANT_OVERFLOW (arg1));
6527 else
6528 arg1 = fold_convert (inner_type, arg1);
6530 return fold_build2 (code, type, arg0_inner, arg1);
6533 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6534 step of the array. Reconstructs s and delta in the case of s * delta
6535 being an integer constant (and thus already folded).
6536 ADDR is the address. MULT is the multiplicative expression.
6537 If the function succeeds, the new address expression is returned. Otherwise
6538 NULL_TREE is returned. */
6540 static tree
6541 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6543 tree s, delta, step;
6544 tree ref = TREE_OPERAND (addr, 0), pref;
6545 tree ret, pos;
6546 tree itype;
6548 /* Canonicalize op1 into a possibly non-constant delta
6549 and an INTEGER_CST s. */
6550 if (TREE_CODE (op1) == MULT_EXPR)
6552 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6554 STRIP_NOPS (arg0);
6555 STRIP_NOPS (arg1);
6557 if (TREE_CODE (arg0) == INTEGER_CST)
6559 s = arg0;
6560 delta = arg1;
6562 else if (TREE_CODE (arg1) == INTEGER_CST)
6564 s = arg1;
6565 delta = arg0;
6567 else
6568 return NULL_TREE;
6570 else if (TREE_CODE (op1) == INTEGER_CST)
6572 delta = op1;
6573 s = NULL_TREE;
6575 else
6577 /* Simulate we are delta * 1. */
6578 delta = op1;
6579 s = integer_one_node;
6582 for (;; ref = TREE_OPERAND (ref, 0))
6584 if (TREE_CODE (ref) == ARRAY_REF)
6586 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6587 if (! itype)
6588 continue;
6590 step = array_ref_element_size (ref);
6591 if (TREE_CODE (step) != INTEGER_CST)
6592 continue;
6594 if (s)
6596 if (! tree_int_cst_equal (step, s))
6597 continue;
6599 else
6601 /* Try if delta is a multiple of step. */
6602 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6603 if (! tmp)
6604 continue;
6605 delta = tmp;
6608 break;
6611 if (!handled_component_p (ref))
6612 return NULL_TREE;
6615 /* We found the suitable array reference. So copy everything up to it,
6616 and replace the index. */
6618 pref = TREE_OPERAND (addr, 0);
6619 ret = copy_node (pref);
6620 pos = ret;
6622 while (pref != ref)
6624 pref = TREE_OPERAND (pref, 0);
6625 TREE_OPERAND (pos, 0) = copy_node (pref);
6626 pos = TREE_OPERAND (pos, 0);
6629 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6630 fold_convert (itype,
6631 TREE_OPERAND (pos, 1)),
6632 fold_convert (itype, delta));
6634 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6638 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6639 means A >= Y && A != MAX, but in this case we know that
6640 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6642 static tree
6643 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6645 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6647 if (TREE_CODE (bound) == LT_EXPR)
6648 a = TREE_OPERAND (bound, 0);
6649 else if (TREE_CODE (bound) == GT_EXPR)
6650 a = TREE_OPERAND (bound, 1);
6651 else
6652 return NULL_TREE;
6654 typea = TREE_TYPE (a);
6655 if (!INTEGRAL_TYPE_P (typea)
6656 && !POINTER_TYPE_P (typea))
6657 return NULL_TREE;
6659 if (TREE_CODE (ineq) == LT_EXPR)
6661 a1 = TREE_OPERAND (ineq, 1);
6662 y = TREE_OPERAND (ineq, 0);
6664 else if (TREE_CODE (ineq) == GT_EXPR)
6666 a1 = TREE_OPERAND (ineq, 0);
6667 y = TREE_OPERAND (ineq, 1);
6669 else
6670 return NULL_TREE;
6672 if (TREE_TYPE (a1) != typea)
6673 return NULL_TREE;
6675 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6676 if (!integer_onep (diff))
6677 return NULL_TREE;
6679 return fold_build2 (GE_EXPR, type, a, y);
6682 /* Fold a sum or difference of at least one multiplication.
6683 Returns the folded tree or NULL if no simplification could be made. */
6685 static tree
6686 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6688 tree arg00, arg01, arg10, arg11;
6689 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6691 /* (A * C) +- (B * C) -> (A+-B) * C.
6692 (A * C) +- A -> A * (C+-1).
6693 We are most concerned about the case where C is a constant,
6694 but other combinations show up during loop reduction. Since
6695 it is not difficult, try all four possibilities. */
6697 if (TREE_CODE (arg0) == MULT_EXPR)
6699 arg00 = TREE_OPERAND (arg0, 0);
6700 arg01 = TREE_OPERAND (arg0, 1);
6702 else
6704 arg00 = arg0;
6705 arg01 = fold_convert (type, integer_one_node);
6707 if (TREE_CODE (arg1) == MULT_EXPR)
6709 arg10 = TREE_OPERAND (arg1, 0);
6710 arg11 = TREE_OPERAND (arg1, 1);
6712 else
6714 arg10 = arg1;
6715 arg11 = fold_convert (type, integer_one_node);
6717 same = NULL_TREE;
6719 if (operand_equal_p (arg01, arg11, 0))
6720 same = arg01, alt0 = arg00, alt1 = arg10;
6721 else if (operand_equal_p (arg00, arg10, 0))
6722 same = arg00, alt0 = arg01, alt1 = arg11;
6723 else if (operand_equal_p (arg00, arg11, 0))
6724 same = arg00, alt0 = arg01, alt1 = arg10;
6725 else if (operand_equal_p (arg01, arg10, 0))
6726 same = arg01, alt0 = arg00, alt1 = arg11;
6728 /* No identical multiplicands; see if we can find a common
6729 power-of-two factor in non-power-of-two multiplies. This
6730 can help in multi-dimensional array access. */
6731 else if (host_integerp (arg01, 0)
6732 && host_integerp (arg11, 0))
6734 HOST_WIDE_INT int01, int11, tmp;
6735 bool swap = false;
6736 tree maybe_same;
6737 int01 = TREE_INT_CST_LOW (arg01);
6738 int11 = TREE_INT_CST_LOW (arg11);
6740 /* Move min of absolute values to int11. */
6741 if ((int01 >= 0 ? int01 : -int01)
6742 < (int11 >= 0 ? int11 : -int11))
6744 tmp = int01, int01 = int11, int11 = tmp;
6745 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6746 maybe_same = arg01;
6747 swap = true;
6749 else
6750 maybe_same = arg11;
6752 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6754 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6755 build_int_cst (TREE_TYPE (arg00),
6756 int01 / int11));
6757 alt1 = arg10;
6758 same = maybe_same;
6759 if (swap)
6760 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6764 if (same)
6765 return fold_build2 (MULT_EXPR, type,
6766 fold_build2 (code, type,
6767 fold_convert (type, alt0),
6768 fold_convert (type, alt1)),
6769 fold_convert (type, same));
6771 return NULL_TREE;
6774 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6775 specified by EXPR into the buffer PTR of length LEN bytes.
6776 Return the number of bytes placed in the buffer, or zero
6777 upon failure. */
6779 static int
6780 native_encode_int (tree expr, unsigned char *ptr, int len)
6782 tree type = TREE_TYPE (expr);
6783 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6784 int byte, offset, word, words;
6785 unsigned char value;
6787 if (total_bytes > len)
6788 return 0;
6789 words = total_bytes / UNITS_PER_WORD;
6791 for (byte = 0; byte < total_bytes; byte++)
6793 int bitpos = byte * BITS_PER_UNIT;
6794 if (bitpos < HOST_BITS_PER_WIDE_INT)
6795 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6796 else
6797 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6798 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6800 if (total_bytes > UNITS_PER_WORD)
6802 word = byte / UNITS_PER_WORD;
6803 if (WORDS_BIG_ENDIAN)
6804 word = (words - 1) - word;
6805 offset = word * UNITS_PER_WORD;
6806 if (BYTES_BIG_ENDIAN)
6807 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6808 else
6809 offset += byte % UNITS_PER_WORD;
6811 else
6812 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6813 ptr[offset] = value;
6815 return total_bytes;
6819 /* Subroutine of native_encode_expr. Encode the REAL_CST
6820 specified by EXPR into the buffer PTR of length LEN bytes.
6821 Return the number of bytes placed in the buffer, or zero
6822 upon failure. */
6824 static int
6825 native_encode_real (tree expr, unsigned char *ptr, int len)
6827 tree type = TREE_TYPE (expr);
6828 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6829 int byte, offset, word, words;
6830 unsigned char value;
6832 /* There are always 32 bits in each long, no matter the size of
6833 the hosts long. We handle floating point representations with
6834 up to 192 bits. */
6835 long tmp[6];
6837 if (total_bytes > len)
6838 return 0;
6839 words = total_bytes / UNITS_PER_WORD;
6841 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6843 for (byte = 0; byte < total_bytes; byte++)
6845 int bitpos = byte * BITS_PER_UNIT;
6846 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6848 if (total_bytes > UNITS_PER_WORD)
6850 word = byte / UNITS_PER_WORD;
6851 if (FLOAT_WORDS_BIG_ENDIAN)
6852 word = (words - 1) - word;
6853 offset = word * UNITS_PER_WORD;
6854 if (BYTES_BIG_ENDIAN)
6855 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6856 else
6857 offset += byte % UNITS_PER_WORD;
6859 else
6860 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6861 ptr[offset] = value;
6863 return total_bytes;
6866 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6867 specified by EXPR into the buffer PTR of length LEN bytes.
6868 Return the number of bytes placed in the buffer, or zero
6869 upon failure. */
6871 static int
6872 native_encode_complex (tree expr, unsigned char *ptr, int len)
6874 int rsize, isize;
6875 tree part;
6877 part = TREE_REALPART (expr);
6878 rsize = native_encode_expr (part, ptr, len);
6879 if (rsize == 0)
6880 return 0;
6881 part = TREE_IMAGPART (expr);
6882 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6883 if (isize != rsize)
6884 return 0;
6885 return rsize + isize;
6889 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6890 specified by EXPR into the buffer PTR of length LEN bytes.
6891 Return the number of bytes placed in the buffer, or zero
6892 upon failure. */
6894 static int
6895 native_encode_vector (tree expr, unsigned char *ptr, int len)
6897 int i, size, offset, count;
6898 tree elem, elements;
6900 size = 0;
6901 offset = 0;
6902 elements = TREE_VECTOR_CST_ELTS (expr);
6903 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6904 for (i = 0; i < count; i++)
6906 if (elements)
6908 elem = TREE_VALUE (elements);
6909 elements = TREE_CHAIN (elements);
6911 else
6912 elem = NULL_TREE;
6914 if (elem)
6916 size = native_encode_expr (elem, ptr+offset, len-offset);
6917 if (size == 0)
6918 return 0;
6920 else if (size != 0)
6922 if (offset + size > len)
6923 return 0;
6924 memset (ptr+offset, 0, size);
6926 else
6927 return 0;
6928 offset += size;
6930 return offset;
6934 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
6935 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
6936 buffer PTR of length LEN bytes. Return the number of bytes
6937 placed in the buffer, or zero upon failure. */
6939 static int
6940 native_encode_expr (tree expr, unsigned char *ptr, int len)
6942 switch (TREE_CODE (expr))
6944 case INTEGER_CST:
6945 return native_encode_int (expr, ptr, len);
6947 case REAL_CST:
6948 return native_encode_real (expr, ptr, len);
6950 case COMPLEX_CST:
6951 return native_encode_complex (expr, ptr, len);
6953 case VECTOR_CST:
6954 return native_encode_vector (expr, ptr, len);
6956 default:
6957 return 0;
6962 /* Subroutine of native_interpret_expr. Interpret the contents of
6963 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
6964 If the buffer cannot be interpreted, return NULL_TREE. */
6966 static tree
6967 native_interpret_int (tree type, unsigned char *ptr, int len)
6969 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6970 int byte, offset, word, words;
6971 unsigned char value;
6972 unsigned int HOST_WIDE_INT lo = 0;
6973 HOST_WIDE_INT hi = 0;
6975 if (total_bytes > len)
6976 return NULL_TREE;
6977 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
6978 return NULL_TREE;
6979 words = total_bytes / UNITS_PER_WORD;
6981 for (byte = 0; byte < total_bytes; byte++)
6983 int bitpos = byte * BITS_PER_UNIT;
6984 if (total_bytes > UNITS_PER_WORD)
6986 word = byte / UNITS_PER_WORD;
6987 if (WORDS_BIG_ENDIAN)
6988 word = (words - 1) - word;
6989 offset = word * UNITS_PER_WORD;
6990 if (BYTES_BIG_ENDIAN)
6991 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6992 else
6993 offset += byte % UNITS_PER_WORD;
6995 else
6996 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6997 value = ptr[offset];
6999 if (bitpos < HOST_BITS_PER_WIDE_INT)
7000 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7001 else
7002 hi |= (unsigned HOST_WIDE_INT) value
7003 << (bitpos - HOST_BITS_PER_WIDE_INT);
7006 return force_fit_type (build_int_cst_wide (type, lo, hi),
7007 0, false, false);
7011 /* Subroutine of native_interpret_expr. Interpret the contents of
7012 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7013 If the buffer cannot be interpreted, return NULL_TREE. */
7015 static tree
7016 native_interpret_real (tree type, unsigned char *ptr, int len)
7018 enum machine_mode mode = TYPE_MODE (type);
7019 int total_bytes = GET_MODE_SIZE (mode);
7020 int byte, offset, word, words;
7021 unsigned char value;
7022 /* There are always 32 bits in each long, no matter the size of
7023 the hosts long. We handle floating point representations with
7024 up to 192 bits. */
7025 REAL_VALUE_TYPE r;
7026 long tmp[6];
7028 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7029 if (total_bytes > len || total_bytes > 24)
7030 return NULL_TREE;
7031 words = total_bytes / UNITS_PER_WORD;
7033 memset (tmp, 0, sizeof (tmp));
7034 for (byte = 0; byte < total_bytes; byte++)
7036 int bitpos = byte * BITS_PER_UNIT;
7037 if (total_bytes > UNITS_PER_WORD)
7039 word = byte / UNITS_PER_WORD;
7040 if (FLOAT_WORDS_BIG_ENDIAN)
7041 word = (words - 1) - word;
7042 offset = word * UNITS_PER_WORD;
7043 if (BYTES_BIG_ENDIAN)
7044 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7045 else
7046 offset += byte % UNITS_PER_WORD;
7048 else
7049 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7050 value = ptr[offset];
7052 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7055 real_from_target (&r, tmp, mode);
7056 return build_real (type, r);
7060 /* Subroutine of native_interpret_expr. Interpret the contents of
7061 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7062 If the buffer cannot be interpreted, return NULL_TREE. */
7064 static tree
7065 native_interpret_complex (tree type, unsigned char *ptr, int len)
7067 tree etype, rpart, ipart;
7068 int size;
7070 etype = TREE_TYPE (type);
7071 size = GET_MODE_SIZE (TYPE_MODE (etype));
7072 if (size * 2 > len)
7073 return NULL_TREE;
7074 rpart = native_interpret_expr (etype, ptr, size);
7075 if (!rpart)
7076 return NULL_TREE;
7077 ipart = native_interpret_expr (etype, ptr+size, size);
7078 if (!ipart)
7079 return NULL_TREE;
7080 return build_complex (type, rpart, ipart);
7084 /* Subroutine of native_interpret_expr. Interpret the contents of
7085 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7086 If the buffer cannot be interpreted, return NULL_TREE. */
7088 static tree
7089 native_interpret_vector (tree type, unsigned char *ptr, int len)
7091 tree etype, elem, elements;
7092 int i, size, count;
7094 etype = TREE_TYPE (type);
7095 size = GET_MODE_SIZE (TYPE_MODE (etype));
7096 count = TYPE_VECTOR_SUBPARTS (type);
7097 if (size * count > len)
7098 return NULL_TREE;
7100 elements = NULL_TREE;
7101 for (i = count - 1; i >= 0; i--)
7103 elem = native_interpret_expr (etype, ptr+(i*size), size);
7104 if (!elem)
7105 return NULL_TREE;
7106 elements = tree_cons (NULL_TREE, elem, elements);
7108 return build_vector (type, elements);
7112 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7113 the buffer PTR of length LEN as a constant of type TYPE. For
7114 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7115 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7116 return NULL_TREE. */
7118 static tree
7119 native_interpret_expr (tree type, unsigned char *ptr, int len)
7121 switch (TREE_CODE (type))
7123 case INTEGER_TYPE:
7124 case ENUMERAL_TYPE:
7125 case BOOLEAN_TYPE:
7126 return native_interpret_int (type, ptr, len);
7128 case REAL_TYPE:
7129 return native_interpret_real (type, ptr, len);
7131 case COMPLEX_TYPE:
7132 return native_interpret_complex (type, ptr, len);
7134 case VECTOR_TYPE:
7135 return native_interpret_vector (type, ptr, len);
7137 default:
7138 return NULL_TREE;
7143 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7144 TYPE at compile-time. If we're unable to perform the conversion
7145 return NULL_TREE. */
7147 static tree
7148 fold_view_convert_expr (tree type, tree expr)
7150 /* We support up to 512-bit values (for V8DFmode). */
7151 unsigned char buffer[64];
7152 int len;
7154 /* Check that the host and target are sane. */
7155 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7156 return NULL_TREE;
7158 len = native_encode_expr (expr, buffer, sizeof (buffer));
7159 if (len == 0)
7160 return NULL_TREE;
7162 return native_interpret_expr (type, buffer, len);
7166 /* Fold a unary expression of code CODE and type TYPE with operand
7167 OP0. Return the folded expression if folding is successful.
7168 Otherwise, return NULL_TREE. */
7170 tree
7171 fold_unary (enum tree_code code, tree type, tree op0)
7173 tree tem;
7174 tree arg0;
7175 enum tree_code_class kind = TREE_CODE_CLASS (code);
7177 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7178 && TREE_CODE_LENGTH (code) == 1);
7180 arg0 = op0;
7181 if (arg0)
7183 if (code == NOP_EXPR || code == CONVERT_EXPR
7184 || code == FLOAT_EXPR || code == ABS_EXPR)
7186 /* Don't use STRIP_NOPS, because signedness of argument type
7187 matters. */
7188 STRIP_SIGN_NOPS (arg0);
7190 else
7192 /* Strip any conversions that don't change the mode. This
7193 is safe for every expression, except for a comparison
7194 expression because its signedness is derived from its
7195 operands.
7197 Note that this is done as an internal manipulation within
7198 the constant folder, in order to find the simplest
7199 representation of the arguments so that their form can be
7200 studied. In any cases, the appropriate type conversions
7201 should be put back in the tree that will get out of the
7202 constant folder. */
7203 STRIP_NOPS (arg0);
7207 if (TREE_CODE_CLASS (code) == tcc_unary)
7209 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7210 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7211 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7212 else if (TREE_CODE (arg0) == COND_EXPR)
7214 tree arg01 = TREE_OPERAND (arg0, 1);
7215 tree arg02 = TREE_OPERAND (arg0, 2);
7216 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7217 arg01 = fold_build1 (code, type, arg01);
7218 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7219 arg02 = fold_build1 (code, type, arg02);
7220 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7221 arg01, arg02);
7223 /* If this was a conversion, and all we did was to move into
7224 inside the COND_EXPR, bring it back out. But leave it if
7225 it is a conversion from integer to integer and the
7226 result precision is no wider than a word since such a
7227 conversion is cheap and may be optimized away by combine,
7228 while it couldn't if it were outside the COND_EXPR. Then return
7229 so we don't get into an infinite recursion loop taking the
7230 conversion out and then back in. */
7232 if ((code == NOP_EXPR || code == CONVERT_EXPR
7233 || code == NON_LVALUE_EXPR)
7234 && TREE_CODE (tem) == COND_EXPR
7235 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7236 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7237 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7238 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7239 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7240 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7241 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7242 && (INTEGRAL_TYPE_P
7243 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7244 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7245 || flag_syntax_only))
7246 tem = build1 (code, type,
7247 build3 (COND_EXPR,
7248 TREE_TYPE (TREE_OPERAND
7249 (TREE_OPERAND (tem, 1), 0)),
7250 TREE_OPERAND (tem, 0),
7251 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7252 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7253 return tem;
7255 else if (COMPARISON_CLASS_P (arg0))
7257 if (TREE_CODE (type) == BOOLEAN_TYPE)
7259 arg0 = copy_node (arg0);
7260 TREE_TYPE (arg0) = type;
7261 return arg0;
7263 else if (TREE_CODE (type) != INTEGER_TYPE)
7264 return fold_build3 (COND_EXPR, type, arg0,
7265 fold_build1 (code, type,
7266 integer_one_node),
7267 fold_build1 (code, type,
7268 integer_zero_node));
7272 switch (code)
7274 case NOP_EXPR:
7275 case FLOAT_EXPR:
7276 case CONVERT_EXPR:
7277 case FIX_TRUNC_EXPR:
7278 case FIX_CEIL_EXPR:
7279 case FIX_FLOOR_EXPR:
7280 case FIX_ROUND_EXPR:
7281 if (TREE_TYPE (op0) == type)
7282 return op0;
7284 /* If we have (type) (a CMP b) and type is an integral type, return
7285 new expression involving the new type. */
7286 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7287 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7288 TREE_OPERAND (op0, 1));
7290 /* Handle cases of two conversions in a row. */
7291 if (TREE_CODE (op0) == NOP_EXPR
7292 || TREE_CODE (op0) == CONVERT_EXPR)
7294 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7295 tree inter_type = TREE_TYPE (op0);
7296 int inside_int = INTEGRAL_TYPE_P (inside_type);
7297 int inside_ptr = POINTER_TYPE_P (inside_type);
7298 int inside_float = FLOAT_TYPE_P (inside_type);
7299 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7300 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7301 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7302 int inter_int = INTEGRAL_TYPE_P (inter_type);
7303 int inter_ptr = POINTER_TYPE_P (inter_type);
7304 int inter_float = FLOAT_TYPE_P (inter_type);
7305 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7306 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7307 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7308 int final_int = INTEGRAL_TYPE_P (type);
7309 int final_ptr = POINTER_TYPE_P (type);
7310 int final_float = FLOAT_TYPE_P (type);
7311 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7312 unsigned int final_prec = TYPE_PRECISION (type);
7313 int final_unsignedp = TYPE_UNSIGNED (type);
7315 /* In addition to the cases of two conversions in a row
7316 handled below, if we are converting something to its own
7317 type via an object of identical or wider precision, neither
7318 conversion is needed. */
7319 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7320 && (((inter_int || inter_ptr) && final_int)
7321 || (inter_float && final_float))
7322 && inter_prec >= final_prec)
7323 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7325 /* Likewise, if the intermediate and final types are either both
7326 float or both integer, we don't need the middle conversion if
7327 it is wider than the final type and doesn't change the signedness
7328 (for integers). Avoid this if the final type is a pointer
7329 since then we sometimes need the inner conversion. Likewise if
7330 the outer has a precision not equal to the size of its mode. */
7331 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7332 || (inter_float && inside_float)
7333 || (inter_vec && inside_vec))
7334 && inter_prec >= inside_prec
7335 && (inter_float || inter_vec
7336 || inter_unsignedp == inside_unsignedp)
7337 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7338 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7339 && ! final_ptr
7340 && (! final_vec || inter_prec == inside_prec))
7341 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7343 /* If we have a sign-extension of a zero-extended value, we can
7344 replace that by a single zero-extension. */
7345 if (inside_int && inter_int && final_int
7346 && inside_prec < inter_prec && inter_prec < final_prec
7347 && inside_unsignedp && !inter_unsignedp)
7348 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7350 /* Two conversions in a row are not needed unless:
7351 - some conversion is floating-point (overstrict for now), or
7352 - some conversion is a vector (overstrict for now), or
7353 - the intermediate type is narrower than both initial and
7354 final, or
7355 - the intermediate type and innermost type differ in signedness,
7356 and the outermost type is wider than the intermediate, or
7357 - the initial type is a pointer type and the precisions of the
7358 intermediate and final types differ, or
7359 - the final type is a pointer type and the precisions of the
7360 initial and intermediate types differ.
7361 - the final type is a pointer type and the initial type not
7362 - the initial type is a pointer to an array and the final type
7363 not. */
7364 if (! inside_float && ! inter_float && ! final_float
7365 && ! inside_vec && ! inter_vec && ! final_vec
7366 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7367 && ! (inside_int && inter_int
7368 && inter_unsignedp != inside_unsignedp
7369 && inter_prec < final_prec)
7370 && ((inter_unsignedp && inter_prec > inside_prec)
7371 == (final_unsignedp && final_prec > inter_prec))
7372 && ! (inside_ptr && inter_prec != final_prec)
7373 && ! (final_ptr && inside_prec != inter_prec)
7374 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7375 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7376 && final_ptr == inside_ptr
7377 && ! (inside_ptr
7378 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7379 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7380 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7383 /* Handle (T *)&A.B.C for A being of type T and B and C
7384 living at offset zero. This occurs frequently in
7385 C++ upcasting and then accessing the base. */
7386 if (TREE_CODE (op0) == ADDR_EXPR
7387 && POINTER_TYPE_P (type)
7388 && handled_component_p (TREE_OPERAND (op0, 0)))
7390 HOST_WIDE_INT bitsize, bitpos;
7391 tree offset;
7392 enum machine_mode mode;
7393 int unsignedp, volatilep;
7394 tree base = TREE_OPERAND (op0, 0);
7395 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7396 &mode, &unsignedp, &volatilep, false);
7397 /* If the reference was to a (constant) zero offset, we can use
7398 the address of the base if it has the same base type
7399 as the result type. */
7400 if (! offset && bitpos == 0
7401 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7402 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7403 return fold_convert (type, build_fold_addr_expr (base));
7406 if (TREE_CODE (op0) == MODIFY_EXPR
7407 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7408 /* Detect assigning a bitfield. */
7409 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7410 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7412 /* Don't leave an assignment inside a conversion
7413 unless assigning a bitfield. */
7414 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7415 /* First do the assignment, then return converted constant. */
7416 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7417 TREE_NO_WARNING (tem) = 1;
7418 TREE_USED (tem) = 1;
7419 return tem;
7422 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7423 constants (if x has signed type, the sign bit cannot be set
7424 in c). This folds extension into the BIT_AND_EXPR. */
7425 if (INTEGRAL_TYPE_P (type)
7426 && TREE_CODE (type) != BOOLEAN_TYPE
7427 && TREE_CODE (op0) == BIT_AND_EXPR
7428 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7430 tree and = op0;
7431 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7432 int change = 0;
7434 if (TYPE_UNSIGNED (TREE_TYPE (and))
7435 || (TYPE_PRECISION (type)
7436 <= TYPE_PRECISION (TREE_TYPE (and))))
7437 change = 1;
7438 else if (TYPE_PRECISION (TREE_TYPE (and1))
7439 <= HOST_BITS_PER_WIDE_INT
7440 && host_integerp (and1, 1))
7442 unsigned HOST_WIDE_INT cst;
7444 cst = tree_low_cst (and1, 1);
7445 cst &= (HOST_WIDE_INT) -1
7446 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7447 change = (cst == 0);
7448 #ifdef LOAD_EXTEND_OP
7449 if (change
7450 && !flag_syntax_only
7451 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7452 == ZERO_EXTEND))
7454 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7455 and0 = fold_convert (uns, and0);
7456 and1 = fold_convert (uns, and1);
7458 #endif
7460 if (change)
7462 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7463 TREE_INT_CST_HIGH (and1));
7464 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7465 TREE_CONSTANT_OVERFLOW (and1));
7466 return fold_build2 (BIT_AND_EXPR, type,
7467 fold_convert (type, and0), tem);
7471 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7472 T2 being pointers to types of the same size. */
7473 if (POINTER_TYPE_P (type)
7474 && BINARY_CLASS_P (arg0)
7475 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7476 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7478 tree arg00 = TREE_OPERAND (arg0, 0);
7479 tree t0 = type;
7480 tree t1 = TREE_TYPE (arg00);
7481 tree tt0 = TREE_TYPE (t0);
7482 tree tt1 = TREE_TYPE (t1);
7483 tree s0 = TYPE_SIZE (tt0);
7484 tree s1 = TYPE_SIZE (tt1);
7486 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7487 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7488 TREE_OPERAND (arg0, 1));
7491 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7492 of the same precision, and X is a integer type not narrower than
7493 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7494 if (INTEGRAL_TYPE_P (type)
7495 && TREE_CODE (op0) == BIT_NOT_EXPR
7496 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7497 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7498 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7499 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7501 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7502 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7503 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7504 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7507 tem = fold_convert_const (code, type, arg0);
7508 return tem ? tem : NULL_TREE;
7510 case VIEW_CONVERT_EXPR:
7511 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7512 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7513 return fold_view_convert_expr (type, op0);
7515 case NEGATE_EXPR:
7516 if (negate_expr_p (arg0))
7517 return fold_convert (type, negate_expr (arg0));
7518 return NULL_TREE;
7520 case ABS_EXPR:
7521 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7522 return fold_abs_const (arg0, type);
7523 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7524 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7525 /* Convert fabs((double)float) into (double)fabsf(float). */
7526 else if (TREE_CODE (arg0) == NOP_EXPR
7527 && TREE_CODE (type) == REAL_TYPE)
7529 tree targ0 = strip_float_extensions (arg0);
7530 if (targ0 != arg0)
7531 return fold_convert (type, fold_build1 (ABS_EXPR,
7532 TREE_TYPE (targ0),
7533 targ0));
7535 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7536 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7537 return arg0;
7539 /* Strip sign ops from argument. */
7540 if (TREE_CODE (type) == REAL_TYPE)
7542 tem = fold_strip_sign_ops (arg0);
7543 if (tem)
7544 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7546 return NULL_TREE;
7548 case CONJ_EXPR:
7549 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7550 return fold_convert (type, arg0);
7551 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7552 return build2 (COMPLEX_EXPR, type,
7553 TREE_OPERAND (arg0, 0),
7554 negate_expr (TREE_OPERAND (arg0, 1)));
7555 else if (TREE_CODE (arg0) == COMPLEX_CST)
7556 return build_complex (type, TREE_REALPART (arg0),
7557 negate_expr (TREE_IMAGPART (arg0)));
7558 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7559 return fold_build2 (TREE_CODE (arg0), type,
7560 fold_build1 (CONJ_EXPR, type,
7561 TREE_OPERAND (arg0, 0)),
7562 fold_build1 (CONJ_EXPR, type,
7563 TREE_OPERAND (arg0, 1)));
7564 else if (TREE_CODE (arg0) == CONJ_EXPR)
7565 return TREE_OPERAND (arg0, 0);
7566 return NULL_TREE;
7568 case BIT_NOT_EXPR:
7569 if (TREE_CODE (arg0) == INTEGER_CST)
7570 return fold_not_const (arg0, type);
7571 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7572 return TREE_OPERAND (arg0, 0);
7573 /* Convert ~ (-A) to A - 1. */
7574 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7575 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7576 build_int_cst (type, 1));
7577 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7578 else if (INTEGRAL_TYPE_P (type)
7579 && ((TREE_CODE (arg0) == MINUS_EXPR
7580 && integer_onep (TREE_OPERAND (arg0, 1)))
7581 || (TREE_CODE (arg0) == PLUS_EXPR
7582 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7583 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7584 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7585 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7586 && (tem = fold_unary (BIT_NOT_EXPR, type,
7587 fold_convert (type,
7588 TREE_OPERAND (arg0, 0)))))
7589 return fold_build2 (BIT_XOR_EXPR, type, tem,
7590 fold_convert (type, TREE_OPERAND (arg0, 1)));
7591 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7592 && (tem = fold_unary (BIT_NOT_EXPR, type,
7593 fold_convert (type,
7594 TREE_OPERAND (arg0, 1)))))
7595 return fold_build2 (BIT_XOR_EXPR, type,
7596 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7598 return NULL_TREE;
7600 case TRUTH_NOT_EXPR:
7601 /* The argument to invert_truthvalue must have Boolean type. */
7602 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7603 arg0 = fold_convert (boolean_type_node, arg0);
7605 /* Note that the operand of this must be an int
7606 and its values must be 0 or 1.
7607 ("true" is a fixed value perhaps depending on the language,
7608 but we don't handle values other than 1 correctly yet.) */
7609 tem = invert_truthvalue (arg0);
7610 /* Avoid infinite recursion. */
7611 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7612 return NULL_TREE;
7613 return fold_convert (type, tem);
7615 case REALPART_EXPR:
7616 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7617 return NULL_TREE;
7618 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7619 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7620 TREE_OPERAND (arg0, 1));
7621 else if (TREE_CODE (arg0) == COMPLEX_CST)
7622 return TREE_REALPART (arg0);
7623 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7624 return fold_build2 (TREE_CODE (arg0), type,
7625 fold_build1 (REALPART_EXPR, type,
7626 TREE_OPERAND (arg0, 0)),
7627 fold_build1 (REALPART_EXPR, type,
7628 TREE_OPERAND (arg0, 1)));
7629 return NULL_TREE;
7631 case IMAGPART_EXPR:
7632 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7633 return fold_convert (type, integer_zero_node);
7634 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7635 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7636 TREE_OPERAND (arg0, 0));
7637 else if (TREE_CODE (arg0) == COMPLEX_CST)
7638 return TREE_IMAGPART (arg0);
7639 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7640 return fold_build2 (TREE_CODE (arg0), type,
7641 fold_build1 (IMAGPART_EXPR, type,
7642 TREE_OPERAND (arg0, 0)),
7643 fold_build1 (IMAGPART_EXPR, type,
7644 TREE_OPERAND (arg0, 1)));
7645 return NULL_TREE;
7647 default:
7648 return NULL_TREE;
7649 } /* switch (code) */
7652 /* Fold a binary expression of code CODE and type TYPE with operands
7653 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7654 Return the folded expression if folding is successful. Otherwise,
7655 return NULL_TREE. */
7657 static tree
7658 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7660 enum tree_code compl_code;
7662 if (code == MIN_EXPR)
7663 compl_code = MAX_EXPR;
7664 else if (code == MAX_EXPR)
7665 compl_code = MIN_EXPR;
7666 else
7667 gcc_unreachable ();
7669 /* MIN (MAX (a, b), b) == b.  */
7670 if (TREE_CODE (op0) == compl_code
7671 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7672 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7674 /* MIN (MAX (b, a), b) == b.  */
7675 if (TREE_CODE (op0) == compl_code
7676 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7677 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7678 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7680 /* MIN (a, MAX (a, b)) == a.  */
7681 if (TREE_CODE (op1) == compl_code
7682 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7683 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7684 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7686 /* MIN (a, MAX (b, a)) == a.  */
7687 if (TREE_CODE (op1) == compl_code
7688 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7689 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7690 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7692 return NULL_TREE;
7695 /* Subroutine of fold_binary. This routine performs all of the
7696 transformations that are common to the equality/inequality
7697 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7698 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7699 fold_binary should call fold_binary. Fold a comparison with
7700 tree code CODE and type TYPE with operands OP0 and OP1. Return
7701 the folded comparison or NULL_TREE. */
7703 static tree
7704 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7706 tree arg0, arg1, tem;
7708 arg0 = op0;
7709 arg1 = op1;
7711 STRIP_SIGN_NOPS (arg0);
7712 STRIP_SIGN_NOPS (arg1);
7714 tem = fold_relational_const (code, type, arg0, arg1);
7715 if (tem != NULL_TREE)
7716 return tem;
7718 /* If one arg is a real or integer constant, put it last. */
7719 if (tree_swap_operands_p (arg0, arg1, true))
7720 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7722 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7723 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7724 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7725 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7726 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7727 && !(flag_wrapv || flag_trapv))
7728 && (TREE_CODE (arg1) == INTEGER_CST
7729 && !TREE_OVERFLOW (arg1)))
7731 tree const1 = TREE_OPERAND (arg0, 1);
7732 tree const2 = arg1;
7733 tree variable = TREE_OPERAND (arg0, 0);
7734 tree lhs;
7735 int lhs_add;
7736 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7738 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7739 TREE_TYPE (arg1), const2, const1);
7740 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7741 && (TREE_CODE (lhs) != INTEGER_CST
7742 || !TREE_OVERFLOW (lhs)))
7743 return fold_build2 (code, type, variable, lhs);
7746 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7748 tree targ0 = strip_float_extensions (arg0);
7749 tree targ1 = strip_float_extensions (arg1);
7750 tree newtype = TREE_TYPE (targ0);
7752 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7753 newtype = TREE_TYPE (targ1);
7755 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7756 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7757 return fold_build2 (code, type, fold_convert (newtype, targ0),
7758 fold_convert (newtype, targ1));
7760 /* (-a) CMP (-b) -> b CMP a */
7761 if (TREE_CODE (arg0) == NEGATE_EXPR
7762 && TREE_CODE (arg1) == NEGATE_EXPR)
7763 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
7764 TREE_OPERAND (arg0, 0));
7766 if (TREE_CODE (arg1) == REAL_CST)
7768 REAL_VALUE_TYPE cst;
7769 cst = TREE_REAL_CST (arg1);
7771 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7772 if (TREE_CODE (arg0) == NEGATE_EXPR)
7773 return fold_build2 (swap_tree_comparison (code), type,
7774 TREE_OPERAND (arg0, 0),
7775 build_real (TREE_TYPE (arg1),
7776 REAL_VALUE_NEGATE (cst)));
7778 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7779 /* a CMP (-0) -> a CMP 0 */
7780 if (REAL_VALUE_MINUS_ZERO (cst))
7781 return fold_build2 (code, type, arg0,
7782 build_real (TREE_TYPE (arg1), dconst0));
7784 /* x != NaN is always true, other ops are always false. */
7785 if (REAL_VALUE_ISNAN (cst)
7786 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7788 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7789 return omit_one_operand (type, tem, arg0);
7792 /* Fold comparisons against infinity. */
7793 if (REAL_VALUE_ISINF (cst))
7795 tem = fold_inf_compare (code, type, arg0, arg1);
7796 if (tem != NULL_TREE)
7797 return tem;
7801 /* If this is a comparison of a real constant with a PLUS_EXPR
7802 or a MINUS_EXPR of a real constant, we can convert it into a
7803 comparison with a revised real constant as long as no overflow
7804 occurs when unsafe_math_optimizations are enabled. */
7805 if (flag_unsafe_math_optimizations
7806 && TREE_CODE (arg1) == REAL_CST
7807 && (TREE_CODE (arg0) == PLUS_EXPR
7808 || TREE_CODE (arg0) == MINUS_EXPR)
7809 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7810 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7811 ? MINUS_EXPR : PLUS_EXPR,
7812 arg1, TREE_OPERAND (arg0, 1), 0))
7813 && ! TREE_CONSTANT_OVERFLOW (tem))
7814 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
7816 /* Likewise, we can simplify a comparison of a real constant with
7817 a MINUS_EXPR whose first operand is also a real constant, i.e.
7818 (c1 - x) < c2 becomes x > c1-c2. */
7819 if (flag_unsafe_math_optimizations
7820 && TREE_CODE (arg1) == REAL_CST
7821 && TREE_CODE (arg0) == MINUS_EXPR
7822 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7823 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7824 arg1, 0))
7825 && ! TREE_CONSTANT_OVERFLOW (tem))
7826 return fold_build2 (swap_tree_comparison (code), type,
7827 TREE_OPERAND (arg0, 1), tem);
7829 /* Fold comparisons against built-in math functions. */
7830 if (TREE_CODE (arg1) == REAL_CST
7831 && flag_unsafe_math_optimizations
7832 && ! flag_errno_math)
7834 enum built_in_function fcode = builtin_mathfn_code (arg0);
7836 if (fcode != END_BUILTINS)
7838 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7839 if (tem != NULL_TREE)
7840 return tem;
7845 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7846 if (TREE_CONSTANT (arg1)
7847 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7848 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7849 /* This optimization is invalid for ordered comparisons
7850 if CONST+INCR overflows or if foo+incr might overflow.
7851 This optimization is invalid for floating point due to rounding.
7852 For pointer types we assume overflow doesn't happen. */
7853 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7854 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7855 && (code == EQ_EXPR || code == NE_EXPR))))
7857 tree varop, newconst;
7859 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7861 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
7862 arg1, TREE_OPERAND (arg0, 1));
7863 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7864 TREE_OPERAND (arg0, 0),
7865 TREE_OPERAND (arg0, 1));
7867 else
7869 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
7870 arg1, TREE_OPERAND (arg0, 1));
7871 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7872 TREE_OPERAND (arg0, 0),
7873 TREE_OPERAND (arg0, 1));
7877 /* If VAROP is a reference to a bitfield, we must mask
7878 the constant by the width of the field. */
7879 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7880 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
7881 && host_integerp (DECL_SIZE (TREE_OPERAND
7882 (TREE_OPERAND (varop, 0), 1)), 1))
7884 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7885 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
7886 tree folded_compare, shift;
7888 /* First check whether the comparison would come out
7889 always the same. If we don't do that we would
7890 change the meaning with the masking. */
7891 folded_compare = fold_build2 (code, type,
7892 TREE_OPERAND (varop, 0), arg1);
7893 if (TREE_CODE (folded_compare) == INTEGER_CST)
7894 return omit_one_operand (type, folded_compare, varop);
7896 shift = build_int_cst (NULL_TREE,
7897 TYPE_PRECISION (TREE_TYPE (varop)) - size);
7898 shift = fold_convert (TREE_TYPE (varop), shift);
7899 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7900 newconst, shift);
7901 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7902 newconst, shift);
7905 return fold_build2 (code, type, varop, newconst);
7908 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7909 && (TREE_CODE (arg0) == NOP_EXPR
7910 || TREE_CODE (arg0) == CONVERT_EXPR))
7912 /* If we are widening one operand of an integer comparison,
7913 see if the other operand is similarly being widened. Perhaps we
7914 can do the comparison in the narrower type. */
7915 tem = fold_widened_comparison (code, type, arg0, arg1);
7916 if (tem)
7917 return tem;
7919 /* Or if we are changing signedness. */
7920 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
7921 if (tem)
7922 return tem;
7925 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7926 constant, we can simplify it. */
7927 if (TREE_CODE (arg1) == INTEGER_CST
7928 && (TREE_CODE (arg0) == MIN_EXPR
7929 || TREE_CODE (arg0) == MAX_EXPR)
7930 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7932 tem = optimize_minmax_comparison (code, type, op0, op1);
7933 if (tem)
7934 return tem;
7937 /* Simplify comparison of something with itself. (For IEEE
7938 floating-point, we can only do some of these simplifications.) */
7939 if (operand_equal_p (arg0, arg1, 0))
7941 switch (code)
7943 case EQ_EXPR:
7944 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7945 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7946 return constant_boolean_node (1, type);
7947 break;
7949 case GE_EXPR:
7950 case LE_EXPR:
7951 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7952 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7953 return constant_boolean_node (1, type);
7954 return fold_build2 (EQ_EXPR, type, arg0, arg1);
7956 case NE_EXPR:
7957 /* For NE, we can only do this simplification if integer
7958 or we don't honor IEEE floating point NaNs. */
7959 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7960 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7961 break;
7962 /* ... fall through ... */
7963 case GT_EXPR:
7964 case LT_EXPR:
7965 return constant_boolean_node (0, type);
7966 default:
7967 gcc_unreachable ();
7971 /* If we are comparing an expression that just has comparisons
7972 of two integer values, arithmetic expressions of those comparisons,
7973 and constants, we can simplify it. There are only three cases
7974 to check: the two values can either be equal, the first can be
7975 greater, or the second can be greater. Fold the expression for
7976 those three values. Since each value must be 0 or 1, we have
7977 eight possibilities, each of which corresponds to the constant 0
7978 or 1 or one of the six possible comparisons.
7980 This handles common cases like (a > b) == 0 but also handles
7981 expressions like ((x > y) - (y > x)) > 0, which supposedly
7982 occur in macroized code. */
7984 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7986 tree cval1 = 0, cval2 = 0;
7987 int save_p = 0;
7989 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7990 /* Don't handle degenerate cases here; they should already
7991 have been handled anyway. */
7992 && cval1 != 0 && cval2 != 0
7993 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7994 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7995 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7996 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7997 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7998 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7999 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8001 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8002 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8004 /* We can't just pass T to eval_subst in case cval1 or cval2
8005 was the same as ARG1. */
8007 tree high_result
8008 = fold_build2 (code, type,
8009 eval_subst (arg0, cval1, maxval,
8010 cval2, minval),
8011 arg1);
8012 tree equal_result
8013 = fold_build2 (code, type,
8014 eval_subst (arg0, cval1, maxval,
8015 cval2, maxval),
8016 arg1);
8017 tree low_result
8018 = fold_build2 (code, type,
8019 eval_subst (arg0, cval1, minval,
8020 cval2, maxval),
8021 arg1);
8023 /* All three of these results should be 0 or 1. Confirm they are.
8024 Then use those values to select the proper code to use. */
8026 if (TREE_CODE (high_result) == INTEGER_CST
8027 && TREE_CODE (equal_result) == INTEGER_CST
8028 && TREE_CODE (low_result) == INTEGER_CST)
8030 /* Make a 3-bit mask with the high-order bit being the
8031 value for `>', the next for '=', and the low for '<'. */
8032 switch ((integer_onep (high_result) * 4)
8033 + (integer_onep (equal_result) * 2)
8034 + integer_onep (low_result))
8036 case 0:
8037 /* Always false. */
8038 return omit_one_operand (type, integer_zero_node, arg0);
8039 case 1:
8040 code = LT_EXPR;
8041 break;
8042 case 2:
8043 code = EQ_EXPR;
8044 break;
8045 case 3:
8046 code = LE_EXPR;
8047 break;
8048 case 4:
8049 code = GT_EXPR;
8050 break;
8051 case 5:
8052 code = NE_EXPR;
8053 break;
8054 case 6:
8055 code = GE_EXPR;
8056 break;
8057 case 7:
8058 /* Always true. */
8059 return omit_one_operand (type, integer_one_node, arg0);
8062 if (save_p)
8063 return save_expr (build2 (code, type, cval1, cval2));
8064 return fold_build2 (code, type, cval1, cval2);
8069 /* Fold a comparison of the address of COMPONENT_REFs with the same
8070 type and component to a comparison of the address of the base
8071 object. In short, &x->a OP &y->a to x OP y and
8072 &x->a OP &y.a to x OP &y */
8073 if (TREE_CODE (arg0) == ADDR_EXPR
8074 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8075 && TREE_CODE (arg1) == ADDR_EXPR
8076 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8078 tree cref0 = TREE_OPERAND (arg0, 0);
8079 tree cref1 = TREE_OPERAND (arg1, 0);
8080 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8082 tree op0 = TREE_OPERAND (cref0, 0);
8083 tree op1 = TREE_OPERAND (cref1, 0);
8084 return fold_build2 (code, type,
8085 build_fold_addr_expr (op0),
8086 build_fold_addr_expr (op1));
8090 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8091 into a single range test. */
8092 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8093 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8094 && TREE_CODE (arg1) == INTEGER_CST
8095 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8096 && !integer_zerop (TREE_OPERAND (arg0, 1))
8097 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8098 && !TREE_OVERFLOW (arg1))
8100 tem = fold_div_compare (code, type, arg0, arg1);
8101 if (tem != NULL_TREE)
8102 return tem;
8105 return NULL_TREE;
8108 /* Fold a binary expression of code CODE and type TYPE with operands
8109 OP0 and OP1. Return the folded expression if folding is
8110 successful. Otherwise, return NULL_TREE. */
8112 tree
8113 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8115 enum tree_code_class kind = TREE_CODE_CLASS (code);
8116 tree arg0, arg1, tem;
8117 tree t1 = NULL_TREE;
8119 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8120 && TREE_CODE_LENGTH (code) == 2
8121 && op0 != NULL_TREE
8122 && op1 != NULL_TREE);
8124 arg0 = op0;
8125 arg1 = op1;
8127 /* Strip any conversions that don't change the mode. This is
8128 safe for every expression, except for a comparison expression
8129 because its signedness is derived from its operands. So, in
8130 the latter case, only strip conversions that don't change the
8131 signedness.
8133 Note that this is done as an internal manipulation within the
8134 constant folder, in order to find the simplest representation
8135 of the arguments so that their form can be studied. In any
8136 cases, the appropriate type conversions should be put back in
8137 the tree that will get out of the constant folder. */
8139 if (kind == tcc_comparison)
8141 STRIP_SIGN_NOPS (arg0);
8142 STRIP_SIGN_NOPS (arg1);
8144 else
8146 STRIP_NOPS (arg0);
8147 STRIP_NOPS (arg1);
8150 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8151 constant but we can't do arithmetic on them. */
8152 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8153 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8154 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8155 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8157 if (kind == tcc_binary)
8158 tem = const_binop (code, arg0, arg1, 0);
8159 else if (kind == tcc_comparison)
8160 tem = fold_relational_const (code, type, arg0, arg1);
8161 else
8162 tem = NULL_TREE;
8164 if (tem != NULL_TREE)
8166 if (TREE_TYPE (tem) != type)
8167 tem = fold_convert (type, tem);
8168 return tem;
8172 /* If this is a commutative operation, and ARG0 is a constant, move it
8173 to ARG1 to reduce the number of tests below. */
8174 if (commutative_tree_code (code)
8175 && tree_swap_operands_p (arg0, arg1, true))
8176 return fold_build2 (code, type, op1, op0);
8178 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8180 First check for cases where an arithmetic operation is applied to a
8181 compound, conditional, or comparison operation. Push the arithmetic
8182 operation inside the compound or conditional to see if any folding
8183 can then be done. Convert comparison to conditional for this purpose.
8184 The also optimizes non-constant cases that used to be done in
8185 expand_expr.
8187 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8188 one of the operands is a comparison and the other is a comparison, a
8189 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8190 code below would make the expression more complex. Change it to a
8191 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8192 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8194 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8195 || code == EQ_EXPR || code == NE_EXPR)
8196 && ((truth_value_p (TREE_CODE (arg0))
8197 && (truth_value_p (TREE_CODE (arg1))
8198 || (TREE_CODE (arg1) == BIT_AND_EXPR
8199 && integer_onep (TREE_OPERAND (arg1, 1)))))
8200 || (truth_value_p (TREE_CODE (arg1))
8201 && (truth_value_p (TREE_CODE (arg0))
8202 || (TREE_CODE (arg0) == BIT_AND_EXPR
8203 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8205 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8206 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8207 : TRUTH_XOR_EXPR,
8208 boolean_type_node,
8209 fold_convert (boolean_type_node, arg0),
8210 fold_convert (boolean_type_node, arg1));
8212 if (code == EQ_EXPR)
8213 tem = invert_truthvalue (tem);
8215 return fold_convert (type, tem);
8218 if (TREE_CODE_CLASS (code) == tcc_binary
8219 || TREE_CODE_CLASS (code) == tcc_comparison)
8221 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8222 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8223 fold_build2 (code, type,
8224 TREE_OPERAND (arg0, 1), op1));
8225 if (TREE_CODE (arg1) == COMPOUND_EXPR
8226 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8227 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8228 fold_build2 (code, type,
8229 op0, TREE_OPERAND (arg1, 1)));
8231 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8233 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8234 arg0, arg1,
8235 /*cond_first_p=*/1);
8236 if (tem != NULL_TREE)
8237 return tem;
8240 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8242 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8243 arg1, arg0,
8244 /*cond_first_p=*/0);
8245 if (tem != NULL_TREE)
8246 return tem;
8250 switch (code)
8252 case PLUS_EXPR:
8253 /* A + (-B) -> A - B */
8254 if (TREE_CODE (arg1) == NEGATE_EXPR)
8255 return fold_build2 (MINUS_EXPR, type,
8256 fold_convert (type, arg0),
8257 fold_convert (type, TREE_OPERAND (arg1, 0)));
8258 /* (-A) + B -> B - A */
8259 if (TREE_CODE (arg0) == NEGATE_EXPR
8260 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8261 return fold_build2 (MINUS_EXPR, type,
8262 fold_convert (type, arg1),
8263 fold_convert (type, TREE_OPERAND (arg0, 0)));
8264 /* Convert ~A + 1 to -A. */
8265 if (INTEGRAL_TYPE_P (type)
8266 && TREE_CODE (arg0) == BIT_NOT_EXPR
8267 && integer_onep (arg1))
8268 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8270 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8271 same or one. */
8272 if ((TREE_CODE (arg0) == MULT_EXPR
8273 || TREE_CODE (arg1) == MULT_EXPR)
8274 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8276 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8277 if (tem)
8278 return tem;
8281 if (! FLOAT_TYPE_P (type))
8283 if (integer_zerop (arg1))
8284 return non_lvalue (fold_convert (type, arg0));
8286 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8287 with a constant, and the two constants have no bits in common,
8288 we should treat this as a BIT_IOR_EXPR since this may produce more
8289 simplifications. */
8290 if (TREE_CODE (arg0) == BIT_AND_EXPR
8291 && TREE_CODE (arg1) == BIT_AND_EXPR
8292 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8293 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8294 && integer_zerop (const_binop (BIT_AND_EXPR,
8295 TREE_OPERAND (arg0, 1),
8296 TREE_OPERAND (arg1, 1), 0)))
8298 code = BIT_IOR_EXPR;
8299 goto bit_ior;
8302 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8303 (plus (plus (mult) (mult)) (foo)) so that we can
8304 take advantage of the factoring cases below. */
8305 if (((TREE_CODE (arg0) == PLUS_EXPR
8306 || TREE_CODE (arg0) == MINUS_EXPR)
8307 && TREE_CODE (arg1) == MULT_EXPR)
8308 || ((TREE_CODE (arg1) == PLUS_EXPR
8309 || TREE_CODE (arg1) == MINUS_EXPR)
8310 && TREE_CODE (arg0) == MULT_EXPR))
8312 tree parg0, parg1, parg, marg;
8313 enum tree_code pcode;
8315 if (TREE_CODE (arg1) == MULT_EXPR)
8316 parg = arg0, marg = arg1;
8317 else
8318 parg = arg1, marg = arg0;
8319 pcode = TREE_CODE (parg);
8320 parg0 = TREE_OPERAND (parg, 0);
8321 parg1 = TREE_OPERAND (parg, 1);
8322 STRIP_NOPS (parg0);
8323 STRIP_NOPS (parg1);
8325 if (TREE_CODE (parg0) == MULT_EXPR
8326 && TREE_CODE (parg1) != MULT_EXPR)
8327 return fold_build2 (pcode, type,
8328 fold_build2 (PLUS_EXPR, type,
8329 fold_convert (type, parg0),
8330 fold_convert (type, marg)),
8331 fold_convert (type, parg1));
8332 if (TREE_CODE (parg0) != MULT_EXPR
8333 && TREE_CODE (parg1) == MULT_EXPR)
8334 return fold_build2 (PLUS_EXPR, type,
8335 fold_convert (type, parg0),
8336 fold_build2 (pcode, type,
8337 fold_convert (type, marg),
8338 fold_convert (type,
8339 parg1)));
8342 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8343 of the array. Loop optimizer sometimes produce this type of
8344 expressions. */
8345 if (TREE_CODE (arg0) == ADDR_EXPR)
8347 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8348 if (tem)
8349 return fold_convert (type, tem);
8351 else if (TREE_CODE (arg1) == ADDR_EXPR)
8353 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8354 if (tem)
8355 return fold_convert (type, tem);
8358 else
8360 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8361 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8362 return non_lvalue (fold_convert (type, arg0));
8364 /* Likewise if the operands are reversed. */
8365 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8366 return non_lvalue (fold_convert (type, arg1));
8368 /* Convert X + -C into X - C. */
8369 if (TREE_CODE (arg1) == REAL_CST
8370 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8372 tem = fold_negate_const (arg1, type);
8373 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8374 return fold_build2 (MINUS_EXPR, type,
8375 fold_convert (type, arg0),
8376 fold_convert (type, tem));
8379 if (flag_unsafe_math_optimizations
8380 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8381 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8382 && (tem = distribute_real_division (code, type, arg0, arg1)))
8383 return tem;
8385 /* Convert x+x into x*2.0. */
8386 if (operand_equal_p (arg0, arg1, 0)
8387 && SCALAR_FLOAT_TYPE_P (type))
8388 return fold_build2 (MULT_EXPR, type, arg0,
8389 build_real (type, dconst2));
8391 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8392 if (flag_unsafe_math_optimizations
8393 && TREE_CODE (arg1) == PLUS_EXPR
8394 && TREE_CODE (arg0) != MULT_EXPR)
8396 tree tree10 = TREE_OPERAND (arg1, 0);
8397 tree tree11 = TREE_OPERAND (arg1, 1);
8398 if (TREE_CODE (tree11) == MULT_EXPR
8399 && TREE_CODE (tree10) == MULT_EXPR)
8401 tree tree0;
8402 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8403 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8406 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8407 if (flag_unsafe_math_optimizations
8408 && TREE_CODE (arg0) == PLUS_EXPR
8409 && TREE_CODE (arg1) != MULT_EXPR)
8411 tree tree00 = TREE_OPERAND (arg0, 0);
8412 tree tree01 = TREE_OPERAND (arg0, 1);
8413 if (TREE_CODE (tree01) == MULT_EXPR
8414 && TREE_CODE (tree00) == MULT_EXPR)
8416 tree tree0;
8417 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8418 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8423 bit_rotate:
8424 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8425 is a rotate of A by C1 bits. */
8426 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8427 is a rotate of A by B bits. */
8429 enum tree_code code0, code1;
8430 code0 = TREE_CODE (arg0);
8431 code1 = TREE_CODE (arg1);
8432 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8433 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8434 && operand_equal_p (TREE_OPERAND (arg0, 0),
8435 TREE_OPERAND (arg1, 0), 0)
8436 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8438 tree tree01, tree11;
8439 enum tree_code code01, code11;
8441 tree01 = TREE_OPERAND (arg0, 1);
8442 tree11 = TREE_OPERAND (arg1, 1);
8443 STRIP_NOPS (tree01);
8444 STRIP_NOPS (tree11);
8445 code01 = TREE_CODE (tree01);
8446 code11 = TREE_CODE (tree11);
8447 if (code01 == INTEGER_CST
8448 && code11 == INTEGER_CST
8449 && TREE_INT_CST_HIGH (tree01) == 0
8450 && TREE_INT_CST_HIGH (tree11) == 0
8451 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8452 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8453 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8454 code0 == LSHIFT_EXPR ? tree01 : tree11);
8455 else if (code11 == MINUS_EXPR)
8457 tree tree110, tree111;
8458 tree110 = TREE_OPERAND (tree11, 0);
8459 tree111 = TREE_OPERAND (tree11, 1);
8460 STRIP_NOPS (tree110);
8461 STRIP_NOPS (tree111);
8462 if (TREE_CODE (tree110) == INTEGER_CST
8463 && 0 == compare_tree_int (tree110,
8464 TYPE_PRECISION
8465 (TREE_TYPE (TREE_OPERAND
8466 (arg0, 0))))
8467 && operand_equal_p (tree01, tree111, 0))
8468 return build2 ((code0 == LSHIFT_EXPR
8469 ? LROTATE_EXPR
8470 : RROTATE_EXPR),
8471 type, TREE_OPERAND (arg0, 0), tree01);
8473 else if (code01 == MINUS_EXPR)
8475 tree tree010, tree011;
8476 tree010 = TREE_OPERAND (tree01, 0);
8477 tree011 = TREE_OPERAND (tree01, 1);
8478 STRIP_NOPS (tree010);
8479 STRIP_NOPS (tree011);
8480 if (TREE_CODE (tree010) == INTEGER_CST
8481 && 0 == compare_tree_int (tree010,
8482 TYPE_PRECISION
8483 (TREE_TYPE (TREE_OPERAND
8484 (arg0, 0))))
8485 && operand_equal_p (tree11, tree011, 0))
8486 return build2 ((code0 != LSHIFT_EXPR
8487 ? LROTATE_EXPR
8488 : RROTATE_EXPR),
8489 type, TREE_OPERAND (arg0, 0), tree11);
8494 associate:
8495 /* In most languages, can't associate operations on floats through
8496 parentheses. Rather than remember where the parentheses were, we
8497 don't associate floats at all, unless the user has specified
8498 -funsafe-math-optimizations. */
8500 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8502 tree var0, con0, lit0, minus_lit0;
8503 tree var1, con1, lit1, minus_lit1;
8505 /* Split both trees into variables, constants, and literals. Then
8506 associate each group together, the constants with literals,
8507 then the result with variables. This increases the chances of
8508 literals being recombined later and of generating relocatable
8509 expressions for the sum of a constant and literal. */
8510 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8511 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8512 code == MINUS_EXPR);
8514 /* Only do something if we found more than two objects. Otherwise,
8515 nothing has changed and we risk infinite recursion. */
8516 if (2 < ((var0 != 0) + (var1 != 0)
8517 + (con0 != 0) + (con1 != 0)
8518 + (lit0 != 0) + (lit1 != 0)
8519 + (minus_lit0 != 0) + (minus_lit1 != 0)))
8521 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8522 if (code == MINUS_EXPR)
8523 code = PLUS_EXPR;
8525 var0 = associate_trees (var0, var1, code, type);
8526 con0 = associate_trees (con0, con1, code, type);
8527 lit0 = associate_trees (lit0, lit1, code, type);
8528 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8530 /* Preserve the MINUS_EXPR if the negative part of the literal is
8531 greater than the positive part. Otherwise, the multiplicative
8532 folding code (i.e extract_muldiv) may be fooled in case
8533 unsigned constants are subtracted, like in the following
8534 example: ((X*2 + 4) - 8U)/2. */
8535 if (minus_lit0 && lit0)
8537 if (TREE_CODE (lit0) == INTEGER_CST
8538 && TREE_CODE (minus_lit0) == INTEGER_CST
8539 && tree_int_cst_lt (lit0, minus_lit0))
8541 minus_lit0 = associate_trees (minus_lit0, lit0,
8542 MINUS_EXPR, type);
8543 lit0 = 0;
8545 else
8547 lit0 = associate_trees (lit0, minus_lit0,
8548 MINUS_EXPR, type);
8549 minus_lit0 = 0;
8552 if (minus_lit0)
8554 if (con0 == 0)
8555 return fold_convert (type,
8556 associate_trees (var0, minus_lit0,
8557 MINUS_EXPR, type));
8558 else
8560 con0 = associate_trees (con0, minus_lit0,
8561 MINUS_EXPR, type);
8562 return fold_convert (type,
8563 associate_trees (var0, con0,
8564 PLUS_EXPR, type));
8568 con0 = associate_trees (con0, lit0, code, type);
8569 return fold_convert (type, associate_trees (var0, con0,
8570 code, type));
8574 return NULL_TREE;
8576 case MINUS_EXPR:
8577 /* A - (-B) -> A + B */
8578 if (TREE_CODE (arg1) == NEGATE_EXPR)
8579 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8580 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8581 if (TREE_CODE (arg0) == NEGATE_EXPR
8582 && (FLOAT_TYPE_P (type)
8583 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8584 && negate_expr_p (arg1)
8585 && reorder_operands_p (arg0, arg1))
8586 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8587 TREE_OPERAND (arg0, 0));
8588 /* Convert -A - 1 to ~A. */
8589 if (INTEGRAL_TYPE_P (type)
8590 && TREE_CODE (arg0) == NEGATE_EXPR
8591 && integer_onep (arg1))
8592 return fold_build1 (BIT_NOT_EXPR, type,
8593 fold_convert (type, TREE_OPERAND (arg0, 0)));
8595 /* Convert -1 - A to ~A. */
8596 if (INTEGRAL_TYPE_P (type)
8597 && integer_all_onesp (arg0))
8598 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8600 if (! FLOAT_TYPE_P (type))
8602 if (integer_zerop (arg0))
8603 return negate_expr (fold_convert (type, arg1));
8604 if (integer_zerop (arg1))
8605 return non_lvalue (fold_convert (type, arg0));
8607 /* Fold A - (A & B) into ~B & A. */
8608 if (!TREE_SIDE_EFFECTS (arg0)
8609 && TREE_CODE (arg1) == BIT_AND_EXPR)
8611 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8612 return fold_build2 (BIT_AND_EXPR, type,
8613 fold_build1 (BIT_NOT_EXPR, type,
8614 TREE_OPERAND (arg1, 0)),
8615 arg0);
8616 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8617 return fold_build2 (BIT_AND_EXPR, type,
8618 fold_build1 (BIT_NOT_EXPR, type,
8619 TREE_OPERAND (arg1, 1)),
8620 arg0);
8623 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8624 any power of 2 minus 1. */
8625 if (TREE_CODE (arg0) == BIT_AND_EXPR
8626 && TREE_CODE (arg1) == BIT_AND_EXPR
8627 && operand_equal_p (TREE_OPERAND (arg0, 0),
8628 TREE_OPERAND (arg1, 0), 0))
8630 tree mask0 = TREE_OPERAND (arg0, 1);
8631 tree mask1 = TREE_OPERAND (arg1, 1);
8632 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
8634 if (operand_equal_p (tem, mask1, 0))
8636 tem = fold_build2 (BIT_XOR_EXPR, type,
8637 TREE_OPERAND (arg0, 0), mask1);
8638 return fold_build2 (MINUS_EXPR, type, tem, mask1);
8643 /* See if ARG1 is zero and X - ARG1 reduces to X. */
8644 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
8645 return non_lvalue (fold_convert (type, arg0));
8647 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
8648 ARG0 is zero and X + ARG0 reduces to X, since that would mean
8649 (-ARG1 + ARG0) reduces to -ARG1. */
8650 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8651 return negate_expr (fold_convert (type, arg1));
8653 /* Fold &x - &x. This can happen from &x.foo - &x.
8654 This is unsafe for certain floats even in non-IEEE formats.
8655 In IEEE, it is unsafe because it does wrong for NaNs.
8656 Also note that operand_equal_p is always false if an operand
8657 is volatile. */
8659 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8660 && operand_equal_p (arg0, arg1, 0))
8661 return fold_convert (type, integer_zero_node);
8663 /* A - B -> A + (-B) if B is easily negatable. */
8664 if (negate_expr_p (arg1)
8665 && ((FLOAT_TYPE_P (type)
8666 /* Avoid this transformation if B is a positive REAL_CST. */
8667 && (TREE_CODE (arg1) != REAL_CST
8668 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
8669 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
8670 return fold_build2 (PLUS_EXPR, type,
8671 fold_convert (type, arg0),
8672 fold_convert (type, negate_expr (arg1)));
8674 /* Try folding difference of addresses. */
8676 HOST_WIDE_INT diff;
8678 if ((TREE_CODE (arg0) == ADDR_EXPR
8679 || TREE_CODE (arg1) == ADDR_EXPR)
8680 && ptr_difference_const (arg0, arg1, &diff))
8681 return build_int_cst_type (type, diff);
8684 /* Fold &a[i] - &a[j] to i-j. */
8685 if (TREE_CODE (arg0) == ADDR_EXPR
8686 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
8687 && TREE_CODE (arg1) == ADDR_EXPR
8688 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
8690 tree aref0 = TREE_OPERAND (arg0, 0);
8691 tree aref1 = TREE_OPERAND (arg1, 0);
8692 if (operand_equal_p (TREE_OPERAND (aref0, 0),
8693 TREE_OPERAND (aref1, 0), 0))
8695 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
8696 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
8697 tree esz = array_ref_element_size (aref0);
8698 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8699 return fold_build2 (MULT_EXPR, type, diff,
8700 fold_convert (type, esz));
8705 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
8706 of the array. Loop optimizer sometimes produce this type of
8707 expressions. */
8708 if (TREE_CODE (arg0) == ADDR_EXPR)
8710 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
8711 if (tem)
8712 return fold_convert (type, tem);
8715 if (flag_unsafe_math_optimizations
8716 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8717 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8718 && (tem = distribute_real_division (code, type, arg0, arg1)))
8719 return tem;
8721 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
8722 same or one. */
8723 if ((TREE_CODE (arg0) == MULT_EXPR
8724 || TREE_CODE (arg1) == MULT_EXPR)
8725 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8727 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8728 if (tem)
8729 return tem;
8732 goto associate;
8734 case MULT_EXPR:
8735 /* (-A) * (-B) -> A * B */
8736 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8737 return fold_build2 (MULT_EXPR, type,
8738 TREE_OPERAND (arg0, 0),
8739 negate_expr (arg1));
8740 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8741 return fold_build2 (MULT_EXPR, type,
8742 negate_expr (arg0),
8743 TREE_OPERAND (arg1, 0));
8745 if (! FLOAT_TYPE_P (type))
8747 if (integer_zerop (arg1))
8748 return omit_one_operand (type, arg1, arg0);
8749 if (integer_onep (arg1))
8750 return non_lvalue (fold_convert (type, arg0));
8751 /* Transform x * -1 into -x. */
8752 if (integer_all_onesp (arg1))
8753 return fold_convert (type, negate_expr (arg0));
8755 /* (a * (1 << b)) is (a << b) */
8756 if (TREE_CODE (arg1) == LSHIFT_EXPR
8757 && integer_onep (TREE_OPERAND (arg1, 0)))
8758 return fold_build2 (LSHIFT_EXPR, type, arg0,
8759 TREE_OPERAND (arg1, 1));
8760 if (TREE_CODE (arg0) == LSHIFT_EXPR
8761 && integer_onep (TREE_OPERAND (arg0, 0)))
8762 return fold_build2 (LSHIFT_EXPR, type, arg1,
8763 TREE_OPERAND (arg0, 1));
8765 if (TREE_CODE (arg1) == INTEGER_CST
8766 && 0 != (tem = extract_muldiv (op0,
8767 fold_convert (type, arg1),
8768 code, NULL_TREE)))
8769 return fold_convert (type, tem);
8772 else
8774 /* Maybe fold x * 0 to 0. The expressions aren't the same
8775 when x is NaN, since x * 0 is also NaN. Nor are they the
8776 same in modes with signed zeros, since multiplying a
8777 negative value by 0 gives -0, not +0. */
8778 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8779 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8780 && real_zerop (arg1))
8781 return omit_one_operand (type, arg1, arg0);
8782 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
8783 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8784 && real_onep (arg1))
8785 return non_lvalue (fold_convert (type, arg0));
8787 /* Transform x * -1.0 into -x. */
8788 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8789 && real_minus_onep (arg1))
8790 return fold_convert (type, negate_expr (arg0));
8792 /* Convert (C1/X)*C2 into (C1*C2)/X. */
8793 if (flag_unsafe_math_optimizations
8794 && TREE_CODE (arg0) == RDIV_EXPR
8795 && TREE_CODE (arg1) == REAL_CST
8796 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
8798 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
8799 arg1, 0);
8800 if (tem)
8801 return fold_build2 (RDIV_EXPR, type, tem,
8802 TREE_OPERAND (arg0, 1));
8805 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
8806 if (operand_equal_p (arg0, arg1, 0))
8808 tree tem = fold_strip_sign_ops (arg0);
8809 if (tem != NULL_TREE)
8811 tem = fold_convert (type, tem);
8812 return fold_build2 (MULT_EXPR, type, tem, tem);
8816 if (flag_unsafe_math_optimizations)
8818 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8819 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8821 /* Optimizations of root(...)*root(...). */
8822 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
8824 tree rootfn, arg, arglist;
8825 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8826 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8828 /* Optimize sqrt(x)*sqrt(x) as x. */
8829 if (BUILTIN_SQRT_P (fcode0)
8830 && operand_equal_p (arg00, arg10, 0)
8831 && ! HONOR_SNANS (TYPE_MODE (type)))
8832 return arg00;
8834 /* Optimize root(x)*root(y) as root(x*y). */
8835 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8836 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8837 arglist = build_tree_list (NULL_TREE, arg);
8838 return build_function_call_expr (rootfn, arglist);
8841 /* Optimize expN(x)*expN(y) as expN(x+y). */
8842 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
8844 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8845 tree arg = fold_build2 (PLUS_EXPR, type,
8846 TREE_VALUE (TREE_OPERAND (arg0, 1)),
8847 TREE_VALUE (TREE_OPERAND (arg1, 1)));
8848 tree arglist = build_tree_list (NULL_TREE, arg);
8849 return build_function_call_expr (expfn, arglist);
8852 /* Optimizations of pow(...)*pow(...). */
8853 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
8854 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
8855 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
8857 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8858 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8859 1)));
8860 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8861 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8862 1)));
8864 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
8865 if (operand_equal_p (arg01, arg11, 0))
8867 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8868 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8869 tree arglist = tree_cons (NULL_TREE, arg,
8870 build_tree_list (NULL_TREE,
8871 arg01));
8872 return build_function_call_expr (powfn, arglist);
8875 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
8876 if (operand_equal_p (arg00, arg10, 0))
8878 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8879 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
8880 tree arglist = tree_cons (NULL_TREE, arg00,
8881 build_tree_list (NULL_TREE,
8882 arg));
8883 return build_function_call_expr (powfn, arglist);
8887 /* Optimize tan(x)*cos(x) as sin(x). */
8888 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
8889 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
8890 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
8891 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
8892 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
8893 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
8894 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8895 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8897 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
8899 if (sinfn != NULL_TREE)
8900 return build_function_call_expr (sinfn,
8901 TREE_OPERAND (arg0, 1));
8904 /* Optimize x*pow(x,c) as pow(x,c+1). */
8905 if (fcode1 == BUILT_IN_POW
8906 || fcode1 == BUILT_IN_POWF
8907 || fcode1 == BUILT_IN_POWL)
8909 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8910 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8911 1)));
8912 if (TREE_CODE (arg11) == REAL_CST
8913 && ! TREE_CONSTANT_OVERFLOW (arg11)
8914 && operand_equal_p (arg0, arg10, 0))
8916 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8917 REAL_VALUE_TYPE c;
8918 tree arg, arglist;
8920 c = TREE_REAL_CST (arg11);
8921 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8922 arg = build_real (type, c);
8923 arglist = build_tree_list (NULL_TREE, arg);
8924 arglist = tree_cons (NULL_TREE, arg0, arglist);
8925 return build_function_call_expr (powfn, arglist);
8929 /* Optimize pow(x,c)*x as pow(x,c+1). */
8930 if (fcode0 == BUILT_IN_POW
8931 || fcode0 == BUILT_IN_POWF
8932 || fcode0 == BUILT_IN_POWL)
8934 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8935 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8936 1)));
8937 if (TREE_CODE (arg01) == REAL_CST
8938 && ! TREE_CONSTANT_OVERFLOW (arg01)
8939 && operand_equal_p (arg1, arg00, 0))
8941 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8942 REAL_VALUE_TYPE c;
8943 tree arg, arglist;
8945 c = TREE_REAL_CST (arg01);
8946 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8947 arg = build_real (type, c);
8948 arglist = build_tree_list (NULL_TREE, arg);
8949 arglist = tree_cons (NULL_TREE, arg1, arglist);
8950 return build_function_call_expr (powfn, arglist);
8954 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8955 if (! optimize_size
8956 && operand_equal_p (arg0, arg1, 0))
8958 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8960 if (powfn)
8962 tree arg = build_real (type, dconst2);
8963 tree arglist = build_tree_list (NULL_TREE, arg);
8964 arglist = tree_cons (NULL_TREE, arg0, arglist);
8965 return build_function_call_expr (powfn, arglist);
8970 goto associate;
8972 case BIT_IOR_EXPR:
8973 bit_ior:
8974 if (integer_all_onesp (arg1))
8975 return omit_one_operand (type, arg1, arg0);
8976 if (integer_zerop (arg1))
8977 return non_lvalue (fold_convert (type, arg0));
8978 if (operand_equal_p (arg0, arg1, 0))
8979 return non_lvalue (fold_convert (type, arg0));
8981 /* ~X | X is -1. */
8982 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8983 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8985 t1 = build_int_cst (type, -1);
8986 t1 = force_fit_type (t1, 0, false, false);
8987 return omit_one_operand (type, t1, arg1);
8990 /* X | ~X is -1. */
8991 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8992 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8994 t1 = build_int_cst (type, -1);
8995 t1 = force_fit_type (t1, 0, false, false);
8996 return omit_one_operand (type, t1, arg0);
8999 /* Canonicalize (X & C1) | C2. */
9000 if (TREE_CODE (arg0) == BIT_AND_EXPR
9001 && TREE_CODE (arg1) == INTEGER_CST
9002 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9004 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9005 int width = TYPE_PRECISION (type);
9006 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9007 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9008 hi2 = TREE_INT_CST_HIGH (arg1);
9009 lo2 = TREE_INT_CST_LOW (arg1);
9011 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9012 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9013 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9015 if (width > HOST_BITS_PER_WIDE_INT)
9017 mhi = (unsigned HOST_WIDE_INT) -1
9018 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9019 mlo = -1;
9021 else
9023 mhi = 0;
9024 mlo = (unsigned HOST_WIDE_INT) -1
9025 >> (HOST_BITS_PER_WIDE_INT - width);
9028 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9029 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9030 return fold_build2 (BIT_IOR_EXPR, type,
9031 TREE_OPERAND (arg0, 0), arg1);
9033 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9034 hi1 &= mhi;
9035 lo1 &= mlo;
9036 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9037 return fold_build2 (BIT_IOR_EXPR, type,
9038 fold_build2 (BIT_AND_EXPR, type,
9039 TREE_OPERAND (arg0, 0),
9040 build_int_cst_wide (type,
9041 lo1 & ~lo2,
9042 hi1 & ~hi2)),
9043 arg1);
9046 /* (X & Y) | Y is (X, Y). */
9047 if (TREE_CODE (arg0) == BIT_AND_EXPR
9048 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9049 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9050 /* (X & Y) | X is (Y, X). */
9051 if (TREE_CODE (arg0) == BIT_AND_EXPR
9052 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9053 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9054 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9055 /* X | (X & Y) is (Y, X). */
9056 if (TREE_CODE (arg1) == BIT_AND_EXPR
9057 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9058 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9059 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9060 /* X | (Y & X) is (Y, X). */
9061 if (TREE_CODE (arg1) == BIT_AND_EXPR
9062 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9063 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9064 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9066 t1 = distribute_bit_expr (code, type, arg0, arg1);
9067 if (t1 != NULL_TREE)
9068 return t1;
9070 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9072 This results in more efficient code for machines without a NAND
9073 instruction. Combine will canonicalize to the first form
9074 which will allow use of NAND instructions provided by the
9075 backend if they exist. */
9076 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9077 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9079 return fold_build1 (BIT_NOT_EXPR, type,
9080 build2 (BIT_AND_EXPR, type,
9081 TREE_OPERAND (arg0, 0),
9082 TREE_OPERAND (arg1, 0)));
9085 /* See if this can be simplified into a rotate first. If that
9086 is unsuccessful continue in the association code. */
9087 goto bit_rotate;
9089 case BIT_XOR_EXPR:
9090 if (integer_zerop (arg1))
9091 return non_lvalue (fold_convert (type, arg0));
9092 if (integer_all_onesp (arg1))
9093 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9094 if (operand_equal_p (arg0, arg1, 0))
9095 return omit_one_operand (type, integer_zero_node, arg0);
9097 /* ~X ^ X is -1. */
9098 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9099 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9101 t1 = build_int_cst (type, -1);
9102 t1 = force_fit_type (t1, 0, false, false);
9103 return omit_one_operand (type, t1, arg1);
9106 /* X ^ ~X is -1. */
9107 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9108 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9110 t1 = build_int_cst (type, -1);
9111 t1 = force_fit_type (t1, 0, false, false);
9112 return omit_one_operand (type, t1, arg0);
9115 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9116 with a constant, and the two constants have no bits in common,
9117 we should treat this as a BIT_IOR_EXPR since this may produce more
9118 simplifications. */
9119 if (TREE_CODE (arg0) == BIT_AND_EXPR
9120 && TREE_CODE (arg1) == BIT_AND_EXPR
9121 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9122 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9123 && integer_zerop (const_binop (BIT_AND_EXPR,
9124 TREE_OPERAND (arg0, 1),
9125 TREE_OPERAND (arg1, 1), 0)))
9127 code = BIT_IOR_EXPR;
9128 goto bit_ior;
9131 /* (X | Y) ^ X -> Y & ~ X*/
9132 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9133 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9135 tree t2 = TREE_OPERAND (arg0, 1);
9136 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9137 arg1);
9138 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9139 fold_convert (type, t1));
9140 return t1;
9143 /* (Y | X) ^ X -> Y & ~ X*/
9144 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9145 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9147 tree t2 = TREE_OPERAND (arg0, 0);
9148 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9149 arg1);
9150 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9151 fold_convert (type, t1));
9152 return t1;
9155 /* X ^ (X | Y) -> Y & ~ X*/
9156 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9157 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9159 tree t2 = TREE_OPERAND (arg1, 1);
9160 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9161 arg0);
9162 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9163 fold_convert (type, t1));
9164 return t1;
9167 /* X ^ (Y | X) -> Y & ~ X*/
9168 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9169 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9171 tree t2 = TREE_OPERAND (arg1, 0);
9172 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9173 arg0);
9174 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9175 fold_convert (type, t1));
9176 return t1;
9179 /* Convert ~X ^ ~Y to X ^ Y. */
9180 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9181 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9182 return fold_build2 (code, type,
9183 fold_convert (type, TREE_OPERAND (arg0, 0)),
9184 fold_convert (type, TREE_OPERAND (arg1, 0)));
9186 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9187 if (TREE_CODE (arg0) == BIT_AND_EXPR
9188 && integer_onep (TREE_OPERAND (arg0, 1))
9189 && integer_onep (arg1))
9190 return fold_build2 (EQ_EXPR, type, arg0,
9191 build_int_cst (TREE_TYPE (arg0), 0));
9193 /* Fold (X & Y) ^ Y as ~X & Y. */
9194 if (TREE_CODE (arg0) == BIT_AND_EXPR
9195 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9197 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9198 return fold_build2 (BIT_AND_EXPR, type,
9199 fold_build1 (BIT_NOT_EXPR, type, tem),
9200 fold_convert (type, arg1));
9202 /* Fold (X & Y) ^ X as ~Y & X. */
9203 if (TREE_CODE (arg0) == BIT_AND_EXPR
9204 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9205 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9207 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9208 return fold_build2 (BIT_AND_EXPR, type,
9209 fold_build1 (BIT_NOT_EXPR, type, tem),
9210 fold_convert (type, arg1));
9212 /* Fold X ^ (X & Y) as X & ~Y. */
9213 if (TREE_CODE (arg1) == BIT_AND_EXPR
9214 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9216 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9217 return fold_build2 (BIT_AND_EXPR, type,
9218 fold_convert (type, arg0),
9219 fold_build1 (BIT_NOT_EXPR, type, tem));
9221 /* Fold X ^ (Y & X) as ~Y & X. */
9222 if (TREE_CODE (arg1) == BIT_AND_EXPR
9223 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9224 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9226 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9227 return fold_build2 (BIT_AND_EXPR, type,
9228 fold_build1 (BIT_NOT_EXPR, type, tem),
9229 fold_convert (type, arg0));
9232 /* See if this can be simplified into a rotate first. If that
9233 is unsuccessful continue in the association code. */
9234 goto bit_rotate;
9236 case BIT_AND_EXPR:
9237 if (integer_all_onesp (arg1))
9238 return non_lvalue (fold_convert (type, arg0));
9239 if (integer_zerop (arg1))
9240 return omit_one_operand (type, arg1, arg0);
9241 if (operand_equal_p (arg0, arg1, 0))
9242 return non_lvalue (fold_convert (type, arg0));
9244 /* ~X & X is always zero. */
9245 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9246 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9247 return omit_one_operand (type, integer_zero_node, arg1);
9249 /* X & ~X is always zero. */
9250 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9251 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9252 return omit_one_operand (type, integer_zero_node, arg0);
9254 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9255 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9256 && TREE_CODE (arg1) == INTEGER_CST
9257 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9258 return fold_build2 (BIT_IOR_EXPR, type,
9259 fold_build2 (BIT_AND_EXPR, type,
9260 TREE_OPERAND (arg0, 0), arg1),
9261 fold_build2 (BIT_AND_EXPR, type,
9262 TREE_OPERAND (arg0, 1), arg1));
9264 /* (X | Y) & Y is (X, Y). */
9265 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9266 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9267 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9268 /* (X | Y) & X is (Y, X). */
9269 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9270 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9271 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9272 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9273 /* X & (X | Y) is (Y, X). */
9274 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9275 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9276 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9277 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9278 /* X & (Y | X) is (Y, X). */
9279 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9280 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9281 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9282 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9284 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9285 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9286 && integer_onep (TREE_OPERAND (arg0, 1))
9287 && integer_onep (arg1))
9289 tem = TREE_OPERAND (arg0, 0);
9290 return fold_build2 (EQ_EXPR, type,
9291 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9292 build_int_cst (TREE_TYPE (tem), 1)),
9293 build_int_cst (TREE_TYPE (tem), 0));
9295 /* Fold ~X & 1 as (X & 1) == 0. */
9296 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9297 && integer_onep (arg1))
9299 tem = TREE_OPERAND (arg0, 0);
9300 return fold_build2 (EQ_EXPR, type,
9301 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9302 build_int_cst (TREE_TYPE (tem), 1)),
9303 build_int_cst (TREE_TYPE (tem), 0));
9306 /* Fold (X ^ Y) & Y as ~X & Y. */
9307 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9308 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9310 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9311 return fold_build2 (BIT_AND_EXPR, type,
9312 fold_build1 (BIT_NOT_EXPR, type, tem),
9313 fold_convert (type, arg1));
9315 /* Fold (X ^ Y) & X as ~Y & X. */
9316 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9317 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9318 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9320 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9321 return fold_build2 (BIT_AND_EXPR, type,
9322 fold_build1 (BIT_NOT_EXPR, type, tem),
9323 fold_convert (type, arg1));
9325 /* Fold X & (X ^ Y) as X & ~Y. */
9326 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9327 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9329 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9330 return fold_build2 (BIT_AND_EXPR, type,
9331 fold_convert (type, arg0),
9332 fold_build1 (BIT_NOT_EXPR, type, tem));
9334 /* Fold X & (Y ^ X) as ~Y & X. */
9335 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9336 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9337 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9339 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9340 return fold_build2 (BIT_AND_EXPR, type,
9341 fold_build1 (BIT_NOT_EXPR, type, tem),
9342 fold_convert (type, arg0));
9345 t1 = distribute_bit_expr (code, type, arg0, arg1);
9346 if (t1 != NULL_TREE)
9347 return t1;
9348 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9349 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9350 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9352 unsigned int prec
9353 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9355 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9356 && (~TREE_INT_CST_LOW (arg1)
9357 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9358 return fold_convert (type, TREE_OPERAND (arg0, 0));
9361 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9363 This results in more efficient code for machines without a NOR
9364 instruction. Combine will canonicalize to the first form
9365 which will allow use of NOR instructions provided by the
9366 backend if they exist. */
9367 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9368 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9370 return fold_build1 (BIT_NOT_EXPR, type,
9371 build2 (BIT_IOR_EXPR, type,
9372 TREE_OPERAND (arg0, 0),
9373 TREE_OPERAND (arg1, 0)));
9376 goto associate;
9378 case RDIV_EXPR:
9379 /* Don't touch a floating-point divide by zero unless the mode
9380 of the constant can represent infinity. */
9381 if (TREE_CODE (arg1) == REAL_CST
9382 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9383 && real_zerop (arg1))
9384 return NULL_TREE;
9386 /* Optimize A / A to 1.0 if we don't care about
9387 NaNs or Infinities. Skip the transformation
9388 for non-real operands. */
9389 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9390 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9391 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9392 && operand_equal_p (arg0, arg1, 0))
9394 tree r = build_real (TREE_TYPE (arg0), dconst1);
9396 return omit_two_operands (type, r, arg0, arg1);
9399 /* The complex version of the above A / A optimization. */
9400 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9401 && operand_equal_p (arg0, arg1, 0))
9403 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9404 if (! HONOR_NANS (TYPE_MODE (elem_type))
9405 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9407 tree r = build_real (elem_type, dconst1);
9408 /* omit_two_operands will call fold_convert for us. */
9409 return omit_two_operands (type, r, arg0, arg1);
9413 /* (-A) / (-B) -> A / B */
9414 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9415 return fold_build2 (RDIV_EXPR, type,
9416 TREE_OPERAND (arg0, 0),
9417 negate_expr (arg1));
9418 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9419 return fold_build2 (RDIV_EXPR, type,
9420 negate_expr (arg0),
9421 TREE_OPERAND (arg1, 0));
9423 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9424 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9425 && real_onep (arg1))
9426 return non_lvalue (fold_convert (type, arg0));
9428 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9429 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9430 && real_minus_onep (arg1))
9431 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9433 /* If ARG1 is a constant, we can convert this to a multiply by the
9434 reciprocal. This does not have the same rounding properties,
9435 so only do this if -funsafe-math-optimizations. We can actually
9436 always safely do it if ARG1 is a power of two, but it's hard to
9437 tell if it is or not in a portable manner. */
9438 if (TREE_CODE (arg1) == REAL_CST)
9440 if (flag_unsafe_math_optimizations
9441 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9442 arg1, 0)))
9443 return fold_build2 (MULT_EXPR, type, arg0, tem);
9444 /* Find the reciprocal if optimizing and the result is exact. */
9445 if (optimize)
9447 REAL_VALUE_TYPE r;
9448 r = TREE_REAL_CST (arg1);
9449 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9451 tem = build_real (type, r);
9452 return fold_build2 (MULT_EXPR, type,
9453 fold_convert (type, arg0), tem);
9457 /* Convert A/B/C to A/(B*C). */
9458 if (flag_unsafe_math_optimizations
9459 && TREE_CODE (arg0) == RDIV_EXPR)
9460 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9461 fold_build2 (MULT_EXPR, type,
9462 TREE_OPERAND (arg0, 1), arg1));
9464 /* Convert A/(B/C) to (A/B)*C. */
9465 if (flag_unsafe_math_optimizations
9466 && TREE_CODE (arg1) == RDIV_EXPR)
9467 return fold_build2 (MULT_EXPR, type,
9468 fold_build2 (RDIV_EXPR, type, arg0,
9469 TREE_OPERAND (arg1, 0)),
9470 TREE_OPERAND (arg1, 1));
9472 /* Convert C1/(X*C2) into (C1/C2)/X. */
9473 if (flag_unsafe_math_optimizations
9474 && TREE_CODE (arg1) == MULT_EXPR
9475 && TREE_CODE (arg0) == REAL_CST
9476 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9478 tree tem = const_binop (RDIV_EXPR, arg0,
9479 TREE_OPERAND (arg1, 1), 0);
9480 if (tem)
9481 return fold_build2 (RDIV_EXPR, type, tem,
9482 TREE_OPERAND (arg1, 0));
9485 if (flag_unsafe_math_optimizations)
9487 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9488 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9490 /* Optimize sin(x)/cos(x) as tan(x). */
9491 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9492 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9493 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9494 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9495 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9497 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9499 if (tanfn != NULL_TREE)
9500 return build_function_call_expr (tanfn,
9501 TREE_OPERAND (arg0, 1));
9504 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9505 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9506 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9507 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9508 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9509 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9511 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9513 if (tanfn != NULL_TREE)
9515 tree tmp = TREE_OPERAND (arg0, 1);
9516 tmp = build_function_call_expr (tanfn, tmp);
9517 return fold_build2 (RDIV_EXPR, type,
9518 build_real (type, dconst1), tmp);
9522 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9523 NaNs or Infinities. */
9524 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9525 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9526 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9528 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9529 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9531 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9532 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9533 && operand_equal_p (arg00, arg01, 0))
9535 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9537 if (cosfn != NULL_TREE)
9538 return build_function_call_expr (cosfn,
9539 TREE_OPERAND (arg0, 1));
9543 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9544 NaNs or Infinities. */
9545 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9546 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9547 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9549 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9550 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9552 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9553 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9554 && operand_equal_p (arg00, arg01, 0))
9556 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9558 if (cosfn != NULL_TREE)
9560 tree tmp = TREE_OPERAND (arg0, 1);
9561 tmp = build_function_call_expr (cosfn, tmp);
9562 return fold_build2 (RDIV_EXPR, type,
9563 build_real (type, dconst1),
9564 tmp);
9569 /* Optimize pow(x,c)/x as pow(x,c-1). */
9570 if (fcode0 == BUILT_IN_POW
9571 || fcode0 == BUILT_IN_POWF
9572 || fcode0 == BUILT_IN_POWL)
9574 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9575 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9576 if (TREE_CODE (arg01) == REAL_CST
9577 && ! TREE_CONSTANT_OVERFLOW (arg01)
9578 && operand_equal_p (arg1, arg00, 0))
9580 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9581 REAL_VALUE_TYPE c;
9582 tree arg, arglist;
9584 c = TREE_REAL_CST (arg01);
9585 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9586 arg = build_real (type, c);
9587 arglist = build_tree_list (NULL_TREE, arg);
9588 arglist = tree_cons (NULL_TREE, arg1, arglist);
9589 return build_function_call_expr (powfn, arglist);
9593 /* Optimize x/expN(y) into x*expN(-y). */
9594 if (BUILTIN_EXPONENT_P (fcode1))
9596 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9597 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
9598 tree arglist = build_tree_list (NULL_TREE,
9599 fold_convert (type, arg));
9600 arg1 = build_function_call_expr (expfn, arglist);
9601 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9604 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9605 if (fcode1 == BUILT_IN_POW
9606 || fcode1 == BUILT_IN_POWF
9607 || fcode1 == BUILT_IN_POWL)
9609 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9610 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9611 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
9612 tree neg11 = fold_convert (type, negate_expr (arg11));
9613 tree arglist = tree_cons(NULL_TREE, arg10,
9614 build_tree_list (NULL_TREE, neg11));
9615 arg1 = build_function_call_expr (powfn, arglist);
9616 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9619 return NULL_TREE;
9621 case TRUNC_DIV_EXPR:
9622 case FLOOR_DIV_EXPR:
9623 /* Simplify A / (B << N) where A and B are positive and B is
9624 a power of 2, to A >> (N + log2(B)). */
9625 if (TREE_CODE (arg1) == LSHIFT_EXPR
9626 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9628 tree sval = TREE_OPERAND (arg1, 0);
9629 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
9631 tree sh_cnt = TREE_OPERAND (arg1, 1);
9632 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
9634 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
9635 sh_cnt, build_int_cst (NULL_TREE, pow2));
9636 return fold_build2 (RSHIFT_EXPR, type,
9637 fold_convert (type, arg0), sh_cnt);
9640 /* Fall thru */
9642 case ROUND_DIV_EXPR:
9643 case CEIL_DIV_EXPR:
9644 case EXACT_DIV_EXPR:
9645 if (integer_onep (arg1))
9646 return non_lvalue (fold_convert (type, arg0));
9647 if (integer_zerop (arg1))
9648 return NULL_TREE;
9649 /* X / -1 is -X. */
9650 if (!TYPE_UNSIGNED (type)
9651 && TREE_CODE (arg1) == INTEGER_CST
9652 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9653 && TREE_INT_CST_HIGH (arg1) == -1)
9654 return fold_convert (type, negate_expr (arg0));
9656 /* Convert -A / -B to A / B when the type is signed and overflow is
9657 undefined. */
9658 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9659 && TREE_CODE (arg0) == NEGATE_EXPR
9660 && negate_expr_p (arg1))
9661 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9662 negate_expr (arg1));
9663 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9664 && TREE_CODE (arg1) == NEGATE_EXPR
9665 && negate_expr_p (arg0))
9666 return fold_build2 (code, type, negate_expr (arg0),
9667 TREE_OPERAND (arg1, 0));
9669 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
9670 operation, EXACT_DIV_EXPR.
9672 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
9673 At one time others generated faster code, it's not clear if they do
9674 after the last round to changes to the DIV code in expmed.c. */
9675 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
9676 && multiple_of_p (type, arg0, arg1))
9677 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
9679 if (TREE_CODE (arg1) == INTEGER_CST
9680 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9681 return fold_convert (type, tem);
9683 return NULL_TREE;
9685 case CEIL_MOD_EXPR:
9686 case FLOOR_MOD_EXPR:
9687 case ROUND_MOD_EXPR:
9688 case TRUNC_MOD_EXPR:
9689 /* X % 1 is always zero, but be sure to preserve any side
9690 effects in X. */
9691 if (integer_onep (arg1))
9692 return omit_one_operand (type, integer_zero_node, arg0);
9694 /* X % 0, return X % 0 unchanged so that we can get the
9695 proper warnings and errors. */
9696 if (integer_zerop (arg1))
9697 return NULL_TREE;
9699 /* 0 % X is always zero, but be sure to preserve any side
9700 effects in X. Place this after checking for X == 0. */
9701 if (integer_zerop (arg0))
9702 return omit_one_operand (type, integer_zero_node, arg1);
9704 /* X % -1 is zero. */
9705 if (!TYPE_UNSIGNED (type)
9706 && TREE_CODE (arg1) == INTEGER_CST
9707 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9708 && TREE_INT_CST_HIGH (arg1) == -1)
9709 return omit_one_operand (type, integer_zero_node, arg0);
9711 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
9712 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
9713 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
9714 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9716 tree c = arg1;
9717 /* Also optimize A % (C << N) where C is a power of 2,
9718 to A & ((C << N) - 1). */
9719 if (TREE_CODE (arg1) == LSHIFT_EXPR)
9720 c = TREE_OPERAND (arg1, 0);
9722 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
9724 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
9725 arg1, integer_one_node);
9726 return fold_build2 (BIT_AND_EXPR, type,
9727 fold_convert (type, arg0),
9728 fold_convert (type, mask));
9732 /* X % -C is the same as X % C. */
9733 if (code == TRUNC_MOD_EXPR
9734 && !TYPE_UNSIGNED (type)
9735 && TREE_CODE (arg1) == INTEGER_CST
9736 && !TREE_CONSTANT_OVERFLOW (arg1)
9737 && TREE_INT_CST_HIGH (arg1) < 0
9738 && !flag_trapv
9739 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
9740 && !sign_bit_p (arg1, arg1))
9741 return fold_build2 (code, type, fold_convert (type, arg0),
9742 fold_convert (type, negate_expr (arg1)));
9744 /* X % -Y is the same as X % Y. */
9745 if (code == TRUNC_MOD_EXPR
9746 && !TYPE_UNSIGNED (type)
9747 && TREE_CODE (arg1) == NEGATE_EXPR
9748 && !flag_trapv)
9749 return fold_build2 (code, type, fold_convert (type, arg0),
9750 fold_convert (type, TREE_OPERAND (arg1, 0)));
9752 if (TREE_CODE (arg1) == INTEGER_CST
9753 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9754 return fold_convert (type, tem);
9756 return NULL_TREE;
9758 case LROTATE_EXPR:
9759 case RROTATE_EXPR:
9760 if (integer_all_onesp (arg0))
9761 return omit_one_operand (type, arg0, arg1);
9762 goto shift;
9764 case RSHIFT_EXPR:
9765 /* Optimize -1 >> x for arithmetic right shifts. */
9766 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
9767 return omit_one_operand (type, arg0, arg1);
9768 /* ... fall through ... */
9770 case LSHIFT_EXPR:
9771 shift:
9772 if (integer_zerop (arg1))
9773 return non_lvalue (fold_convert (type, arg0));
9774 if (integer_zerop (arg0))
9775 return omit_one_operand (type, arg0, arg1);
9777 /* Since negative shift count is not well-defined,
9778 don't try to compute it in the compiler. */
9779 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
9780 return NULL_TREE;
9782 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
9783 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
9784 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9785 && host_integerp (TREE_OPERAND (arg0, 1), false)
9786 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9788 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
9789 + TREE_INT_CST_LOW (arg1));
9791 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
9792 being well defined. */
9793 if (low >= TYPE_PRECISION (type))
9795 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
9796 low = low % TYPE_PRECISION (type);
9797 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
9798 return build_int_cst (type, 0);
9799 else
9800 low = TYPE_PRECISION (type) - 1;
9803 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9804 build_int_cst (type, low));
9807 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
9808 into x & ((unsigned)-1 >> c) for unsigned types. */
9809 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
9810 || (TYPE_UNSIGNED (type)
9811 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
9812 && host_integerp (arg1, false)
9813 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9814 && host_integerp (TREE_OPERAND (arg0, 1), false)
9815 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9817 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9818 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
9819 tree lshift;
9820 tree arg00;
9822 if (low0 == low1)
9824 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9826 lshift = build_int_cst (type, -1);
9827 lshift = int_const_binop (code, lshift, arg1, 0);
9829 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
9833 /* Rewrite an LROTATE_EXPR by a constant into an
9834 RROTATE_EXPR by a new constant. */
9835 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
9837 tree tem = build_int_cst (NULL_TREE,
9838 GET_MODE_BITSIZE (TYPE_MODE (type)));
9839 tem = fold_convert (TREE_TYPE (arg1), tem);
9840 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
9841 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
9844 /* If we have a rotate of a bit operation with the rotate count and
9845 the second operand of the bit operation both constant,
9846 permute the two operations. */
9847 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9848 && (TREE_CODE (arg0) == BIT_AND_EXPR
9849 || TREE_CODE (arg0) == BIT_IOR_EXPR
9850 || TREE_CODE (arg0) == BIT_XOR_EXPR)
9851 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9852 return fold_build2 (TREE_CODE (arg0), type,
9853 fold_build2 (code, type,
9854 TREE_OPERAND (arg0, 0), arg1),
9855 fold_build2 (code, type,
9856 TREE_OPERAND (arg0, 1), arg1));
9858 /* Two consecutive rotates adding up to the width of the mode can
9859 be ignored. */
9860 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9861 && TREE_CODE (arg0) == RROTATE_EXPR
9862 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9863 && TREE_INT_CST_HIGH (arg1) == 0
9864 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
9865 && ((TREE_INT_CST_LOW (arg1)
9866 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
9867 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
9868 return TREE_OPERAND (arg0, 0);
9870 return NULL_TREE;
9872 case MIN_EXPR:
9873 if (operand_equal_p (arg0, arg1, 0))
9874 return omit_one_operand (type, arg0, arg1);
9875 if (INTEGRAL_TYPE_P (type)
9876 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9877 return omit_one_operand (type, arg1, arg0);
9878 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
9879 if (tem)
9880 return tem;
9881 goto associate;
9883 case MAX_EXPR:
9884 if (operand_equal_p (arg0, arg1, 0))
9885 return omit_one_operand (type, arg0, arg1);
9886 if (INTEGRAL_TYPE_P (type)
9887 && TYPE_MAX_VALUE (type)
9888 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
9889 return omit_one_operand (type, arg1, arg0);
9890 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
9891 if (tem)
9892 return tem;
9893 goto associate;
9895 case TRUTH_ANDIF_EXPR:
9896 /* Note that the operands of this must be ints
9897 and their values must be 0 or 1.
9898 ("true" is a fixed value perhaps depending on the language.) */
9899 /* If first arg is constant zero, return it. */
9900 if (integer_zerop (arg0))
9901 return fold_convert (type, arg0);
9902 case TRUTH_AND_EXPR:
9903 /* If either arg is constant true, drop it. */
9904 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
9905 return non_lvalue (fold_convert (type, arg1));
9906 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
9907 /* Preserve sequence points. */
9908 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
9909 return non_lvalue (fold_convert (type, arg0));
9910 /* If second arg is constant zero, result is zero, but first arg
9911 must be evaluated. */
9912 if (integer_zerop (arg1))
9913 return omit_one_operand (type, arg1, arg0);
9914 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
9915 case will be handled here. */
9916 if (integer_zerop (arg0))
9917 return omit_one_operand (type, arg0, arg1);
9919 /* !X && X is always false. */
9920 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9921 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9922 return omit_one_operand (type, integer_zero_node, arg1);
9923 /* X && !X is always false. */
9924 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
9925 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9926 return omit_one_operand (type, integer_zero_node, arg0);
9928 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
9929 means A >= Y && A != MAX, but in this case we know that
9930 A < X <= MAX. */
9932 if (!TREE_SIDE_EFFECTS (arg0)
9933 && !TREE_SIDE_EFFECTS (arg1))
9935 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
9936 if (tem && !operand_equal_p (tem, arg0, 0))
9937 return fold_build2 (code, type, tem, arg1);
9939 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
9940 if (tem && !operand_equal_p (tem, arg1, 0))
9941 return fold_build2 (code, type, arg0, tem);
9944 truth_andor:
9945 /* We only do these simplifications if we are optimizing. */
9946 if (!optimize)
9947 return NULL_TREE;
9949 /* Check for things like (A || B) && (A || C). We can convert this
9950 to A || (B && C). Note that either operator can be any of the four
9951 truth and/or operations and the transformation will still be
9952 valid. Also note that we only care about order for the
9953 ANDIF and ORIF operators. If B contains side effects, this
9954 might change the truth-value of A. */
9955 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9956 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9957 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9958 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9959 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9960 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9962 tree a00 = TREE_OPERAND (arg0, 0);
9963 tree a01 = TREE_OPERAND (arg0, 1);
9964 tree a10 = TREE_OPERAND (arg1, 0);
9965 tree a11 = TREE_OPERAND (arg1, 1);
9966 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9967 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9968 && (code == TRUTH_AND_EXPR
9969 || code == TRUTH_OR_EXPR));
9971 if (operand_equal_p (a00, a10, 0))
9972 return fold_build2 (TREE_CODE (arg0), type, a00,
9973 fold_build2 (code, type, a01, a11));
9974 else if (commutative && operand_equal_p (a00, a11, 0))
9975 return fold_build2 (TREE_CODE (arg0), type, a00,
9976 fold_build2 (code, type, a01, a10));
9977 else if (commutative && operand_equal_p (a01, a10, 0))
9978 return fold_build2 (TREE_CODE (arg0), type, a01,
9979 fold_build2 (code, type, a00, a11));
9981 /* This case if tricky because we must either have commutative
9982 operators or else A10 must not have side-effects. */
9984 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9985 && operand_equal_p (a01, a11, 0))
9986 return fold_build2 (TREE_CODE (arg0), type,
9987 fold_build2 (code, type, a00, a10),
9988 a01);
9991 /* See if we can build a range comparison. */
9992 if (0 != (tem = fold_range_test (code, type, op0, op1)))
9993 return tem;
9995 /* Check for the possibility of merging component references. If our
9996 lhs is another similar operation, try to merge its rhs with our
9997 rhs. Then try to merge our lhs and rhs. */
9998 if (TREE_CODE (arg0) == code
9999 && 0 != (tem = fold_truthop (code, type,
10000 TREE_OPERAND (arg0, 1), arg1)))
10001 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10003 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10004 return tem;
10006 return NULL_TREE;
10008 case TRUTH_ORIF_EXPR:
10009 /* Note that the operands of this must be ints
10010 and their values must be 0 or true.
10011 ("true" is a fixed value perhaps depending on the language.) */
10012 /* If first arg is constant true, return it. */
10013 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10014 return fold_convert (type, arg0);
10015 case TRUTH_OR_EXPR:
10016 /* If either arg is constant zero, drop it. */
10017 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10018 return non_lvalue (fold_convert (type, arg1));
10019 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10020 /* Preserve sequence points. */
10021 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10022 return non_lvalue (fold_convert (type, arg0));
10023 /* If second arg is constant true, result is true, but we must
10024 evaluate first arg. */
10025 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10026 return omit_one_operand (type, arg1, arg0);
10027 /* Likewise for first arg, but note this only occurs here for
10028 TRUTH_OR_EXPR. */
10029 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10030 return omit_one_operand (type, arg0, arg1);
10032 /* !X || X is always true. */
10033 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10034 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10035 return omit_one_operand (type, integer_one_node, arg1);
10036 /* X || !X is always true. */
10037 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10038 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10039 return omit_one_operand (type, integer_one_node, arg0);
10041 goto truth_andor;
10043 case TRUTH_XOR_EXPR:
10044 /* If the second arg is constant zero, drop it. */
10045 if (integer_zerop (arg1))
10046 return non_lvalue (fold_convert (type, arg0));
10047 /* If the second arg is constant true, this is a logical inversion. */
10048 if (integer_onep (arg1))
10050 /* Only call invert_truthvalue if operand is a truth value. */
10051 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10052 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10053 else
10054 tem = invert_truthvalue (arg0);
10055 return non_lvalue (fold_convert (type, tem));
10057 /* Identical arguments cancel to zero. */
10058 if (operand_equal_p (arg0, arg1, 0))
10059 return omit_one_operand (type, integer_zero_node, arg0);
10061 /* !X ^ X is always true. */
10062 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10063 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10064 return omit_one_operand (type, integer_one_node, arg1);
10066 /* X ^ !X is always true. */
10067 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10068 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10069 return omit_one_operand (type, integer_one_node, arg0);
10071 return NULL_TREE;
10073 case EQ_EXPR:
10074 case NE_EXPR:
10075 tem = fold_comparison (code, type, op0, op1);
10076 if (tem != NULL_TREE)
10077 return tem;
10079 /* bool_var != 0 becomes bool_var. */
10080 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10081 && code == NE_EXPR)
10082 return non_lvalue (fold_convert (type, arg0));
10084 /* bool_var == 1 becomes bool_var. */
10085 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10086 && code == EQ_EXPR)
10087 return non_lvalue (fold_convert (type, arg0));
10089 /* bool_var != 1 becomes !bool_var. */
10090 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10091 && code == NE_EXPR)
10092 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10094 /* bool_var == 0 becomes !bool_var. */
10095 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10096 && code == EQ_EXPR)
10097 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10099 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
10100 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10101 && TREE_CODE (arg1) == INTEGER_CST)
10102 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10103 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10104 arg1));
10106 /* If this is an equality comparison of the address of a non-weak
10107 object against zero, then we know the result. */
10108 if (TREE_CODE (arg0) == ADDR_EXPR
10109 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10110 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10111 && integer_zerop (arg1))
10112 return constant_boolean_node (code != EQ_EXPR, type);
10114 /* If this is an equality comparison of the address of two non-weak,
10115 unaliased symbols neither of which are extern (since we do not
10116 have access to attributes for externs), then we know the result. */
10117 if (TREE_CODE (arg0) == ADDR_EXPR
10118 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10119 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10120 && ! lookup_attribute ("alias",
10121 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10122 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10123 && TREE_CODE (arg1) == ADDR_EXPR
10124 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10125 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10126 && ! lookup_attribute ("alias",
10127 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10128 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10130 /* We know that we're looking at the address of two
10131 non-weak, unaliased, static _DECL nodes.
10133 It is both wasteful and incorrect to call operand_equal_p
10134 to compare the two ADDR_EXPR nodes. It is wasteful in that
10135 all we need to do is test pointer equality for the arguments
10136 to the two ADDR_EXPR nodes. It is incorrect to use
10137 operand_equal_p as that function is NOT equivalent to a
10138 C equality test. It can in fact return false for two
10139 objects which would test as equal using the C equality
10140 operator. */
10141 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10142 return constant_boolean_node (equal
10143 ? code == EQ_EXPR : code != EQ_EXPR,
10144 type);
10147 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10148 a MINUS_EXPR of a constant, we can convert it into a comparison with
10149 a revised constant as long as no overflow occurs. */
10150 if (TREE_CODE (arg1) == INTEGER_CST
10151 && (TREE_CODE (arg0) == PLUS_EXPR
10152 || TREE_CODE (arg0) == MINUS_EXPR)
10153 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10154 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10155 ? MINUS_EXPR : PLUS_EXPR,
10156 arg1, TREE_OPERAND (arg0, 1), 0))
10157 && ! TREE_CONSTANT_OVERFLOW (tem))
10158 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10160 /* Similarly for a NEGATE_EXPR. */
10161 if (TREE_CODE (arg0) == NEGATE_EXPR
10162 && TREE_CODE (arg1) == INTEGER_CST
10163 && 0 != (tem = negate_expr (arg1))
10164 && TREE_CODE (tem) == INTEGER_CST
10165 && ! TREE_CONSTANT_OVERFLOW (tem))
10166 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10168 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10169 for !=. Don't do this for ordered comparisons due to overflow. */
10170 if (TREE_CODE (arg0) == MINUS_EXPR
10171 && integer_zerop (arg1))
10172 return fold_build2 (code, type,
10173 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10175 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10176 if (TREE_CODE (arg0) == ABS_EXPR
10177 && (integer_zerop (arg1) || real_zerop (arg1)))
10178 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10180 /* If this is an EQ or NE comparison with zero and ARG0 is
10181 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10182 two operations, but the latter can be done in one less insn
10183 on machines that have only two-operand insns or on which a
10184 constant cannot be the first operand. */
10185 if (TREE_CODE (arg0) == BIT_AND_EXPR
10186 && integer_zerop (arg1))
10188 tree arg00 = TREE_OPERAND (arg0, 0);
10189 tree arg01 = TREE_OPERAND (arg0, 1);
10190 if (TREE_CODE (arg00) == LSHIFT_EXPR
10191 && integer_onep (TREE_OPERAND (arg00, 0)))
10192 return
10193 fold_build2 (code, type,
10194 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10195 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10196 arg01, TREE_OPERAND (arg00, 1)),
10197 fold_convert (TREE_TYPE (arg0),
10198 integer_one_node)),
10199 arg1);
10200 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10201 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10202 return
10203 fold_build2 (code, type,
10204 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10205 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10206 arg00, TREE_OPERAND (arg01, 1)),
10207 fold_convert (TREE_TYPE (arg0),
10208 integer_one_node)),
10209 arg1);
10212 /* If this is an NE or EQ comparison of zero against the result of a
10213 signed MOD operation whose second operand is a power of 2, make
10214 the MOD operation unsigned since it is simpler and equivalent. */
10215 if (integer_zerop (arg1)
10216 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10217 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10218 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10219 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10220 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10221 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10223 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10224 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10225 fold_convert (newtype,
10226 TREE_OPERAND (arg0, 0)),
10227 fold_convert (newtype,
10228 TREE_OPERAND (arg0, 1)));
10230 return fold_build2 (code, type, newmod,
10231 fold_convert (newtype, arg1));
10234 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10235 C1 is a valid shift constant, and C2 is a power of two, i.e.
10236 a single bit. */
10237 if (TREE_CODE (arg0) == BIT_AND_EXPR
10238 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10239 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10240 == INTEGER_CST
10241 && integer_pow2p (TREE_OPERAND (arg0, 1))
10242 && integer_zerop (arg1))
10244 tree itype = TREE_TYPE (arg0);
10245 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10246 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10248 /* Check for a valid shift count. */
10249 if (TREE_INT_CST_HIGH (arg001) == 0
10250 && TREE_INT_CST_LOW (arg001) < prec)
10252 tree arg01 = TREE_OPERAND (arg0, 1);
10253 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10254 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10255 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10256 can be rewritten as (X & (C2 << C1)) != 0. */
10257 if ((log2 + TREE_INT_CST_LOW (arg01)) < prec)
10259 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10260 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10261 return fold_build2 (code, type, tem, arg1);
10263 /* Otherwise, for signed (arithmetic) shifts,
10264 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10265 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10266 else if (!TYPE_UNSIGNED (itype))
10267 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10268 arg000, build_int_cst (itype, 0));
10269 /* Otherwise, of unsigned (logical) shifts,
10270 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10271 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10272 else
10273 return omit_one_operand (type,
10274 code == EQ_EXPR ? integer_one_node
10275 : integer_zero_node,
10276 arg000);
10280 /* If this is an NE comparison of zero with an AND of one, remove the
10281 comparison since the AND will give the correct value. */
10282 if (code == NE_EXPR
10283 && integer_zerop (arg1)
10284 && TREE_CODE (arg0) == BIT_AND_EXPR
10285 && integer_onep (TREE_OPERAND (arg0, 1)))
10286 return fold_convert (type, arg0);
10288 /* If we have (A & C) == C where C is a power of 2, convert this into
10289 (A & C) != 0. Similarly for NE_EXPR. */
10290 if (TREE_CODE (arg0) == BIT_AND_EXPR
10291 && integer_pow2p (TREE_OPERAND (arg0, 1))
10292 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10293 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10294 arg0, fold_convert (TREE_TYPE (arg0),
10295 integer_zero_node));
10297 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10298 bit, then fold the expression into A < 0 or A >= 0. */
10299 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10300 if (tem)
10301 return tem;
10303 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10304 Similarly for NE_EXPR. */
10305 if (TREE_CODE (arg0) == BIT_AND_EXPR
10306 && TREE_CODE (arg1) == INTEGER_CST
10307 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10309 tree notc = fold_build1 (BIT_NOT_EXPR,
10310 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10311 TREE_OPERAND (arg0, 1));
10312 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10313 arg1, notc);
10314 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10315 if (integer_nonzerop (dandnotc))
10316 return omit_one_operand (type, rslt, arg0);
10319 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10320 Similarly for NE_EXPR. */
10321 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10322 && TREE_CODE (arg1) == INTEGER_CST
10323 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10325 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10326 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10327 TREE_OPERAND (arg0, 1), notd);
10328 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10329 if (integer_nonzerop (candnotd))
10330 return omit_one_operand (type, rslt, arg0);
10333 /* If this is a comparison of a field, we may be able to simplify it. */
10334 if (((TREE_CODE (arg0) == COMPONENT_REF
10335 && lang_hooks.can_use_bit_fields_p ())
10336 || TREE_CODE (arg0) == BIT_FIELD_REF)
10337 /* Handle the constant case even without -O
10338 to make sure the warnings are given. */
10339 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10341 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10342 if (t1)
10343 return t1;
10346 /* Optimize comparisons of strlen vs zero to a compare of the
10347 first character of the string vs zero. To wit,
10348 strlen(ptr) == 0 => *ptr == 0
10349 strlen(ptr) != 0 => *ptr != 0
10350 Other cases should reduce to one of these two (or a constant)
10351 due to the return value of strlen being unsigned. */
10352 if (TREE_CODE (arg0) == CALL_EXPR
10353 && integer_zerop (arg1))
10355 tree fndecl = get_callee_fndecl (arg0);
10356 tree arglist;
10358 if (fndecl
10359 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10360 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10361 && (arglist = TREE_OPERAND (arg0, 1))
10362 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10363 && ! TREE_CHAIN (arglist))
10365 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10366 return fold_build2 (code, type, iref,
10367 build_int_cst (TREE_TYPE (iref), 0));
10371 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10372 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10373 if (TREE_CODE (arg0) == RSHIFT_EXPR
10374 && integer_zerop (arg1)
10375 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10377 tree arg00 = TREE_OPERAND (arg0, 0);
10378 tree arg01 = TREE_OPERAND (arg0, 1);
10379 tree itype = TREE_TYPE (arg00);
10380 if (TREE_INT_CST_HIGH (arg01) == 0
10381 && TREE_INT_CST_LOW (arg01)
10382 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10384 if (TYPE_UNSIGNED (itype))
10386 itype = lang_hooks.types.signed_type (itype);
10387 arg00 = fold_convert (itype, arg00);
10389 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10390 type, arg00, build_int_cst (itype, 0));
10394 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10395 if (integer_zerop (arg1)
10396 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10397 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10398 TREE_OPERAND (arg0, 1));
10400 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10401 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10402 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10403 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10404 build_int_cst (TREE_TYPE (arg1), 0));
10405 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10406 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10407 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10408 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10409 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10410 build_int_cst (TREE_TYPE (arg1), 0));
10412 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10413 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10414 && TREE_CODE (arg1) == INTEGER_CST
10415 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10416 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10417 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10418 TREE_OPERAND (arg0, 1), arg1));
10420 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10421 (X & C) == 0 when C is a single bit. */
10422 if (TREE_CODE (arg0) == BIT_AND_EXPR
10423 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10424 && integer_zerop (arg1)
10425 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10427 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10428 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10429 TREE_OPERAND (arg0, 1));
10430 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10431 type, tem, arg1);
10434 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10435 constant C is a power of two, i.e. a single bit. */
10436 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10437 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10438 && integer_zerop (arg1)
10439 && integer_pow2p (TREE_OPERAND (arg0, 1))
10440 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10441 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10443 tree arg00 = TREE_OPERAND (arg0, 0);
10444 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10445 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10448 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10449 when is C is a power of two, i.e. a single bit. */
10450 if (TREE_CODE (arg0) == BIT_AND_EXPR
10451 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10452 && integer_zerop (arg1)
10453 && integer_pow2p (TREE_OPERAND (arg0, 1))
10454 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10455 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10457 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10458 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10459 arg000, TREE_OPERAND (arg0, 1));
10460 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10461 tem, build_int_cst (TREE_TYPE (tem), 0));
10464 /* If this is a comparison of two exprs that look like an
10465 ARRAY_REF of the same object, then we can fold this to a
10466 comparison of the two offsets. This is only safe for
10467 EQ_EXPR and NE_EXPR because of overflow issues. */
10469 tree base0, offset0, base1, offset1;
10471 if (extract_array_ref (arg0, &base0, &offset0)
10472 && extract_array_ref (arg1, &base1, &offset1)
10473 && operand_equal_p (base0, base1, 0))
10475 /* Handle no offsets on both sides specially. */
10476 if (offset0 == NULL_TREE && offset1 == NULL_TREE)
10477 return fold_build2 (code, type, integer_zero_node,
10478 integer_zero_node);
10480 if (!offset0 || !offset1
10481 || TREE_TYPE (offset0) == TREE_TYPE (offset1))
10483 if (offset0 == NULL_TREE)
10484 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
10485 if (offset1 == NULL_TREE)
10486 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
10487 return fold_build2 (code, type, offset0, offset1);
10492 if (integer_zerop (arg1)
10493 && tree_expr_nonzero_p (arg0))
10495 tree res = constant_boolean_node (code==NE_EXPR, type);
10496 return omit_one_operand (type, res, arg0);
10498 return NULL_TREE;
10500 case LT_EXPR:
10501 case GT_EXPR:
10502 case LE_EXPR:
10503 case GE_EXPR:
10504 tem = fold_comparison (code, type, op0, op1);
10505 if (tem != NULL_TREE)
10506 return tem;
10508 /* Transform comparisons of the form X +- C CMP X. */
10509 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10510 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10511 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10512 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10513 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10514 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
10515 && !(flag_wrapv || flag_trapv))))
10517 tree arg01 = TREE_OPERAND (arg0, 1);
10518 enum tree_code code0 = TREE_CODE (arg0);
10519 int is_positive;
10521 if (TREE_CODE (arg01) == REAL_CST)
10522 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10523 else
10524 is_positive = tree_int_cst_sgn (arg01);
10526 /* (X - c) > X becomes false. */
10527 if (code == GT_EXPR
10528 && ((code0 == MINUS_EXPR && is_positive >= 0)
10529 || (code0 == PLUS_EXPR && is_positive <= 0)))
10530 return constant_boolean_node (0, type);
10532 /* Likewise (X + c) < X becomes false. */
10533 if (code == LT_EXPR
10534 && ((code0 == PLUS_EXPR && is_positive >= 0)
10535 || (code0 == MINUS_EXPR && is_positive <= 0)))
10536 return constant_boolean_node (0, type);
10538 /* Convert (X - c) <= X to true. */
10539 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10540 && code == LE_EXPR
10541 && ((code0 == MINUS_EXPR && is_positive >= 0)
10542 || (code0 == PLUS_EXPR && is_positive <= 0)))
10543 return constant_boolean_node (1, type);
10545 /* Convert (X + c) >= X to true. */
10546 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10547 && code == GE_EXPR
10548 && ((code0 == PLUS_EXPR && is_positive >= 0)
10549 || (code0 == MINUS_EXPR && is_positive <= 0)))
10550 return constant_boolean_node (1, type);
10552 if (TREE_CODE (arg01) == INTEGER_CST)
10554 /* Convert X + c > X and X - c < X to true for integers. */
10555 if (code == GT_EXPR
10556 && ((code0 == PLUS_EXPR && is_positive > 0)
10557 || (code0 == MINUS_EXPR && is_positive < 0)))
10558 return constant_boolean_node (1, type);
10560 if (code == LT_EXPR
10561 && ((code0 == MINUS_EXPR && is_positive > 0)
10562 || (code0 == PLUS_EXPR && is_positive < 0)))
10563 return constant_boolean_node (1, type);
10565 /* Convert X + c <= X and X - c >= X to false for integers. */
10566 if (code == LE_EXPR
10567 && ((code0 == PLUS_EXPR && is_positive > 0)
10568 || (code0 == MINUS_EXPR && is_positive < 0)))
10569 return constant_boolean_node (0, type);
10571 if (code == GE_EXPR
10572 && ((code0 == MINUS_EXPR && is_positive > 0)
10573 || (code0 == PLUS_EXPR && is_positive < 0)))
10574 return constant_boolean_node (0, type);
10578 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10579 This transformation affects the cases which are handled in later
10580 optimizations involving comparisons with non-negative constants. */
10581 if (TREE_CODE (arg1) == INTEGER_CST
10582 && TREE_CODE (arg0) != INTEGER_CST
10583 && tree_int_cst_sgn (arg1) > 0)
10585 if (code == GE_EXPR)
10587 arg1 = const_binop (MINUS_EXPR, arg1,
10588 build_int_cst (TREE_TYPE (arg1), 1), 0);
10589 return fold_build2 (GT_EXPR, type, arg0,
10590 fold_convert (TREE_TYPE (arg0), arg1));
10592 if (code == LT_EXPR)
10594 arg1 = const_binop (MINUS_EXPR, arg1,
10595 build_int_cst (TREE_TYPE (arg1), 1), 0);
10596 return fold_build2 (LE_EXPR, type, arg0,
10597 fold_convert (TREE_TYPE (arg0), arg1));
10601 /* Comparisons with the highest or lowest possible integer of
10602 the specified size will have known values. */
10604 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
10606 if (TREE_CODE (arg1) == INTEGER_CST
10607 && ! TREE_CONSTANT_OVERFLOW (arg1)
10608 && width <= 2 * HOST_BITS_PER_WIDE_INT
10609 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10610 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10612 HOST_WIDE_INT signed_max_hi;
10613 unsigned HOST_WIDE_INT signed_max_lo;
10614 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
10616 if (width <= HOST_BITS_PER_WIDE_INT)
10618 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10619 - 1;
10620 signed_max_hi = 0;
10621 max_hi = 0;
10623 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10625 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10626 min_lo = 0;
10627 min_hi = 0;
10629 else
10631 max_lo = signed_max_lo;
10632 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10633 min_hi = -1;
10636 else
10638 width -= HOST_BITS_PER_WIDE_INT;
10639 signed_max_lo = -1;
10640 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10641 - 1;
10642 max_lo = -1;
10643 min_lo = 0;
10645 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10647 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10648 min_hi = 0;
10650 else
10652 max_hi = signed_max_hi;
10653 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10657 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
10658 && TREE_INT_CST_LOW (arg1) == max_lo)
10659 switch (code)
10661 case GT_EXPR:
10662 return omit_one_operand (type, integer_zero_node, arg0);
10664 case GE_EXPR:
10665 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10667 case LE_EXPR:
10668 return omit_one_operand (type, integer_one_node, arg0);
10670 case LT_EXPR:
10671 return fold_build2 (NE_EXPR, type, arg0, arg1);
10673 /* The GE_EXPR and LT_EXPR cases above are not normally
10674 reached because of previous transformations. */
10676 default:
10677 break;
10679 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10680 == max_hi
10681 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
10682 switch (code)
10684 case GT_EXPR:
10685 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10686 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10687 case LE_EXPR:
10688 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10689 return fold_build2 (NE_EXPR, type, arg0, arg1);
10690 default:
10691 break;
10693 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10694 == min_hi
10695 && TREE_INT_CST_LOW (arg1) == min_lo)
10696 switch (code)
10698 case LT_EXPR:
10699 return omit_one_operand (type, integer_zero_node, arg0);
10701 case LE_EXPR:
10702 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10704 case GE_EXPR:
10705 return omit_one_operand (type, integer_one_node, arg0);
10707 case GT_EXPR:
10708 return fold_build2 (NE_EXPR, type, op0, op1);
10710 default:
10711 break;
10713 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10714 == min_hi
10715 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
10716 switch (code)
10718 case GE_EXPR:
10719 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10720 return fold_build2 (NE_EXPR, type, arg0, arg1);
10721 case LT_EXPR:
10722 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10723 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10724 default:
10725 break;
10728 else if (!in_gimple_form
10729 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
10730 && TREE_INT_CST_LOW (arg1) == signed_max_lo
10731 && TYPE_UNSIGNED (TREE_TYPE (arg1))
10732 /* signed_type does not work on pointer types. */
10733 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
10735 /* The following case also applies to X < signed_max+1
10736 and X >= signed_max+1 because previous transformations. */
10737 if (code == LE_EXPR || code == GT_EXPR)
10739 tree st0, st1;
10740 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
10741 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
10742 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10743 type, fold_convert (st0, arg0),
10744 build_int_cst (st1, 0));
10750 /* If we are comparing an ABS_EXPR with a constant, we can
10751 convert all the cases into explicit comparisons, but they may
10752 well not be faster than doing the ABS and one comparison.
10753 But ABS (X) <= C is a range comparison, which becomes a subtraction
10754 and a comparison, and is probably faster. */
10755 if (code == LE_EXPR
10756 && TREE_CODE (arg1) == INTEGER_CST
10757 && TREE_CODE (arg0) == ABS_EXPR
10758 && ! TREE_SIDE_EFFECTS (arg0)
10759 && (0 != (tem = negate_expr (arg1)))
10760 && TREE_CODE (tem) == INTEGER_CST
10761 && ! TREE_CONSTANT_OVERFLOW (tem))
10762 return fold_build2 (TRUTH_ANDIF_EXPR, type,
10763 build2 (GE_EXPR, type,
10764 TREE_OPERAND (arg0, 0), tem),
10765 build2 (LE_EXPR, type,
10766 TREE_OPERAND (arg0, 0), arg1));
10768 /* Convert ABS_EXPR<x> >= 0 to true. */
10769 if (code == GE_EXPR
10770 && tree_expr_nonnegative_p (arg0)
10771 && (integer_zerop (arg1)
10772 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10773 && real_zerop (arg1))))
10774 return omit_one_operand (type, integer_one_node, arg0);
10776 /* Convert ABS_EXPR<x> < 0 to false. */
10777 if (code == LT_EXPR
10778 && tree_expr_nonnegative_p (arg0)
10779 && (integer_zerop (arg1) || real_zerop (arg1)))
10780 return omit_one_operand (type, integer_zero_node, arg0);
10782 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
10783 and similarly for >= into !=. */
10784 if ((code == LT_EXPR || code == GE_EXPR)
10785 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10786 && TREE_CODE (arg1) == LSHIFT_EXPR
10787 && integer_onep (TREE_OPERAND (arg1, 0)))
10788 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10789 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10790 TREE_OPERAND (arg1, 1)),
10791 build_int_cst (TREE_TYPE (arg0), 0));
10793 if ((code == LT_EXPR || code == GE_EXPR)
10794 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10795 && (TREE_CODE (arg1) == NOP_EXPR
10796 || TREE_CODE (arg1) == CONVERT_EXPR)
10797 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
10798 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
10799 return
10800 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10801 fold_convert (TREE_TYPE (arg0),
10802 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10803 TREE_OPERAND (TREE_OPERAND (arg1, 0),
10804 1))),
10805 build_int_cst (TREE_TYPE (arg0), 0));
10807 return NULL_TREE;
10809 case UNORDERED_EXPR:
10810 case ORDERED_EXPR:
10811 case UNLT_EXPR:
10812 case UNLE_EXPR:
10813 case UNGT_EXPR:
10814 case UNGE_EXPR:
10815 case UNEQ_EXPR:
10816 case LTGT_EXPR:
10817 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10819 t1 = fold_relational_const (code, type, arg0, arg1);
10820 if (t1 != NULL_TREE)
10821 return t1;
10824 /* If the first operand is NaN, the result is constant. */
10825 if (TREE_CODE (arg0) == REAL_CST
10826 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
10827 && (code != LTGT_EXPR || ! flag_trapping_math))
10829 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10830 ? integer_zero_node
10831 : integer_one_node;
10832 return omit_one_operand (type, t1, arg1);
10835 /* If the second operand is NaN, the result is constant. */
10836 if (TREE_CODE (arg1) == REAL_CST
10837 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
10838 && (code != LTGT_EXPR || ! flag_trapping_math))
10840 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10841 ? integer_zero_node
10842 : integer_one_node;
10843 return omit_one_operand (type, t1, arg0);
10846 /* Simplify unordered comparison of something with itself. */
10847 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
10848 && operand_equal_p (arg0, arg1, 0))
10849 return constant_boolean_node (1, type);
10851 if (code == LTGT_EXPR
10852 && !flag_trapping_math
10853 && operand_equal_p (arg0, arg1, 0))
10854 return constant_boolean_node (0, type);
10856 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10858 tree targ0 = strip_float_extensions (arg0);
10859 tree targ1 = strip_float_extensions (arg1);
10860 tree newtype = TREE_TYPE (targ0);
10862 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
10863 newtype = TREE_TYPE (targ1);
10865 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
10866 return fold_build2 (code, type, fold_convert (newtype, targ0),
10867 fold_convert (newtype, targ1));
10870 return NULL_TREE;
10872 case COMPOUND_EXPR:
10873 /* When pedantic, a compound expression can be neither an lvalue
10874 nor an integer constant expression. */
10875 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
10876 return NULL_TREE;
10877 /* Don't let (0, 0) be null pointer constant. */
10878 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
10879 : fold_convert (type, arg1);
10880 return pedantic_non_lvalue (tem);
10882 case COMPLEX_EXPR:
10883 if ((TREE_CODE (arg0) == REAL_CST
10884 && TREE_CODE (arg1) == REAL_CST)
10885 || (TREE_CODE (arg0) == INTEGER_CST
10886 && TREE_CODE (arg1) == INTEGER_CST))
10887 return build_complex (type, arg0, arg1);
10888 return NULL_TREE;
10890 case ASSERT_EXPR:
10891 /* An ASSERT_EXPR should never be passed to fold_binary. */
10892 gcc_unreachable ();
10894 default:
10895 return NULL_TREE;
10896 } /* switch (code) */
10899 /* Callback for walk_tree, looking for LABEL_EXPR.
10900 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10901 Do not check the sub-tree of GOTO_EXPR. */
10903 static tree
10904 contains_label_1 (tree *tp,
10905 int *walk_subtrees,
10906 void *data ATTRIBUTE_UNUSED)
10908 switch (TREE_CODE (*tp))
10910 case LABEL_EXPR:
10911 return *tp;
10912 case GOTO_EXPR:
10913 *walk_subtrees = 0;
10914 /* no break */
10915 default:
10916 return NULL_TREE;
10920 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
10921 accessible from outside the sub-tree. Returns NULL_TREE if no
10922 addressable label is found. */
10924 static bool
10925 contains_label_p (tree st)
10927 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
10930 /* Fold a ternary expression of code CODE and type TYPE with operands
10931 OP0, OP1, and OP2. Return the folded expression if folding is
10932 successful. Otherwise, return NULL_TREE. */
10934 tree
10935 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10937 tree tem;
10938 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
10939 enum tree_code_class kind = TREE_CODE_CLASS (code);
10941 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10942 && TREE_CODE_LENGTH (code) == 3);
10944 /* Strip any conversions that don't change the mode. This is safe
10945 for every expression, except for a comparison expression because
10946 its signedness is derived from its operands. So, in the latter
10947 case, only strip conversions that don't change the signedness.
10949 Note that this is done as an internal manipulation within the
10950 constant folder, in order to find the simplest representation of
10951 the arguments so that their form can be studied. In any cases,
10952 the appropriate type conversions should be put back in the tree
10953 that will get out of the constant folder. */
10954 if (op0)
10956 arg0 = op0;
10957 STRIP_NOPS (arg0);
10960 if (op1)
10962 arg1 = op1;
10963 STRIP_NOPS (arg1);
10966 switch (code)
10968 case COMPONENT_REF:
10969 if (TREE_CODE (arg0) == CONSTRUCTOR
10970 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
10972 unsigned HOST_WIDE_INT idx;
10973 tree field, value;
10974 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
10975 if (field == arg1)
10976 return value;
10978 return NULL_TREE;
10980 case COND_EXPR:
10981 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10982 so all simple results must be passed through pedantic_non_lvalue. */
10983 if (TREE_CODE (arg0) == INTEGER_CST)
10985 tree unused_op = integer_zerop (arg0) ? op1 : op2;
10986 tem = integer_zerop (arg0) ? op2 : op1;
10987 /* Only optimize constant conditions when the selected branch
10988 has the same type as the COND_EXPR. This avoids optimizing
10989 away "c ? x : throw", where the throw has a void type.
10990 Avoid throwing away that operand which contains label. */
10991 if ((!TREE_SIDE_EFFECTS (unused_op)
10992 || !contains_label_p (unused_op))
10993 && (! VOID_TYPE_P (TREE_TYPE (tem))
10994 || VOID_TYPE_P (type)))
10995 return pedantic_non_lvalue (tem);
10996 return NULL_TREE;
10998 if (operand_equal_p (arg1, op2, 0))
10999 return pedantic_omit_one_operand (type, arg1, arg0);
11001 /* If we have A op B ? A : C, we may be able to convert this to a
11002 simpler expression, depending on the operation and the values
11003 of B and C. Signed zeros prevent all of these transformations,
11004 for reasons given above each one.
11006 Also try swapping the arguments and inverting the conditional. */
11007 if (COMPARISON_CLASS_P (arg0)
11008 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11009 arg1, TREE_OPERAND (arg0, 1))
11010 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11012 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11013 if (tem)
11014 return tem;
11017 if (COMPARISON_CLASS_P (arg0)
11018 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11019 op2,
11020 TREE_OPERAND (arg0, 1))
11021 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11023 tem = invert_truthvalue (arg0);
11024 if (COMPARISON_CLASS_P (tem))
11026 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11027 if (tem)
11028 return tem;
11032 /* If the second operand is simpler than the third, swap them
11033 since that produces better jump optimization results. */
11034 if (truth_value_p (TREE_CODE (arg0))
11035 && tree_swap_operands_p (op1, op2, false))
11037 /* See if this can be inverted. If it can't, possibly because
11038 it was a floating-point inequality comparison, don't do
11039 anything. */
11040 tem = invert_truthvalue (arg0);
11042 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11043 return fold_build3 (code, type, tem, op2, op1);
11046 /* Convert A ? 1 : 0 to simply A. */
11047 if (integer_onep (op1)
11048 && integer_zerop (op2)
11049 /* If we try to convert OP0 to our type, the
11050 call to fold will try to move the conversion inside
11051 a COND, which will recurse. In that case, the COND_EXPR
11052 is probably the best choice, so leave it alone. */
11053 && type == TREE_TYPE (arg0))
11054 return pedantic_non_lvalue (arg0);
11056 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11057 over COND_EXPR in cases such as floating point comparisons. */
11058 if (integer_zerop (op1)
11059 && integer_onep (op2)
11060 && truth_value_p (TREE_CODE (arg0)))
11061 return pedantic_non_lvalue (fold_convert (type,
11062 invert_truthvalue (arg0)));
11064 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11065 if (TREE_CODE (arg0) == LT_EXPR
11066 && integer_zerop (TREE_OPERAND (arg0, 1))
11067 && integer_zerop (op2)
11068 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11069 return fold_convert (type,
11070 fold_build2 (BIT_AND_EXPR,
11071 TREE_TYPE (tem), tem,
11072 fold_convert (TREE_TYPE (tem), arg1)));
11074 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11075 already handled above. */
11076 if (TREE_CODE (arg0) == BIT_AND_EXPR
11077 && integer_onep (TREE_OPERAND (arg0, 1))
11078 && integer_zerop (op2)
11079 && integer_pow2p (arg1))
11081 tree tem = TREE_OPERAND (arg0, 0);
11082 STRIP_NOPS (tem);
11083 if (TREE_CODE (tem) == RSHIFT_EXPR
11084 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11085 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11086 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11087 return fold_build2 (BIT_AND_EXPR, type,
11088 TREE_OPERAND (tem, 0), arg1);
11091 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11092 is probably obsolete because the first operand should be a
11093 truth value (that's why we have the two cases above), but let's
11094 leave it in until we can confirm this for all front-ends. */
11095 if (integer_zerop (op2)
11096 && TREE_CODE (arg0) == NE_EXPR
11097 && integer_zerop (TREE_OPERAND (arg0, 1))
11098 && integer_pow2p (arg1)
11099 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11100 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11101 arg1, OEP_ONLY_CONST))
11102 return pedantic_non_lvalue (fold_convert (type,
11103 TREE_OPERAND (arg0, 0)));
11105 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11106 if (integer_zerop (op2)
11107 && truth_value_p (TREE_CODE (arg0))
11108 && truth_value_p (TREE_CODE (arg1)))
11109 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11110 fold_convert (type, arg0),
11111 arg1);
11113 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11114 if (integer_onep (op2)
11115 && truth_value_p (TREE_CODE (arg0))
11116 && truth_value_p (TREE_CODE (arg1)))
11118 /* Only perform transformation if ARG0 is easily inverted. */
11119 tem = invert_truthvalue (arg0);
11120 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11121 return fold_build2 (TRUTH_ORIF_EXPR, type,
11122 fold_convert (type, tem),
11123 arg1);
11126 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11127 if (integer_zerop (arg1)
11128 && truth_value_p (TREE_CODE (arg0))
11129 && truth_value_p (TREE_CODE (op2)))
11131 /* Only perform transformation if ARG0 is easily inverted. */
11132 tem = invert_truthvalue (arg0);
11133 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11134 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11135 fold_convert (type, tem),
11136 op2);
11139 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11140 if (integer_onep (arg1)
11141 && truth_value_p (TREE_CODE (arg0))
11142 && truth_value_p (TREE_CODE (op2)))
11143 return fold_build2 (TRUTH_ORIF_EXPR, type,
11144 fold_convert (type, arg0),
11145 op2);
11147 return NULL_TREE;
11149 case CALL_EXPR:
11150 /* Check for a built-in function. */
11151 if (TREE_CODE (op0) == ADDR_EXPR
11152 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11153 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11154 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11155 return NULL_TREE;
11157 case BIT_FIELD_REF:
11158 if (TREE_CODE (arg0) == VECTOR_CST
11159 && type == TREE_TYPE (TREE_TYPE (arg0))
11160 && host_integerp (arg1, 1)
11161 && host_integerp (op2, 1))
11163 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11164 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11166 if (width != 0
11167 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11168 && (idx % width) == 0
11169 && (idx = idx / width)
11170 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11172 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11173 while (idx-- > 0 && elements)
11174 elements = TREE_CHAIN (elements);
11175 if (elements)
11176 return TREE_VALUE (elements);
11177 else
11178 return fold_convert (type, integer_zero_node);
11181 return NULL_TREE;
11183 default:
11184 return NULL_TREE;
11185 } /* switch (code) */
11188 /* Perform constant folding and related simplification of EXPR.
11189 The related simplifications include x*1 => x, x*0 => 0, etc.,
11190 and application of the associative law.
11191 NOP_EXPR conversions may be removed freely (as long as we
11192 are careful not to change the type of the overall expression).
11193 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11194 but we can constant-fold them if they have constant operands. */
11196 #ifdef ENABLE_FOLD_CHECKING
11197 # define fold(x) fold_1 (x)
11198 static tree fold_1 (tree);
11199 static
11200 #endif
11201 tree
11202 fold (tree expr)
11204 const tree t = expr;
11205 enum tree_code code = TREE_CODE (t);
11206 enum tree_code_class kind = TREE_CODE_CLASS (code);
11207 tree tem;
11209 /* Return right away if a constant. */
11210 if (kind == tcc_constant)
11211 return t;
11213 if (IS_EXPR_CODE_CLASS (kind))
11215 tree type = TREE_TYPE (t);
11216 tree op0, op1, op2;
11218 switch (TREE_CODE_LENGTH (code))
11220 case 1:
11221 op0 = TREE_OPERAND (t, 0);
11222 tem = fold_unary (code, type, op0);
11223 return tem ? tem : expr;
11224 case 2:
11225 op0 = TREE_OPERAND (t, 0);
11226 op1 = TREE_OPERAND (t, 1);
11227 tem = fold_binary (code, type, op0, op1);
11228 return tem ? tem : expr;
11229 case 3:
11230 op0 = TREE_OPERAND (t, 0);
11231 op1 = TREE_OPERAND (t, 1);
11232 op2 = TREE_OPERAND (t, 2);
11233 tem = fold_ternary (code, type, op0, op1, op2);
11234 return tem ? tem : expr;
11235 default:
11236 break;
11240 switch (code)
11242 case CONST_DECL:
11243 return fold (DECL_INITIAL (t));
11245 default:
11246 return t;
11247 } /* switch (code) */
11250 #ifdef ENABLE_FOLD_CHECKING
11251 #undef fold
11253 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11254 static void fold_check_failed (tree, tree);
11255 void print_fold_checksum (tree);
11257 /* When --enable-checking=fold, compute a digest of expr before
11258 and after actual fold call to see if fold did not accidentally
11259 change original expr. */
11261 tree
11262 fold (tree expr)
11264 tree ret;
11265 struct md5_ctx ctx;
11266 unsigned char checksum_before[16], checksum_after[16];
11267 htab_t ht;
11269 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11270 md5_init_ctx (&ctx);
11271 fold_checksum_tree (expr, &ctx, ht);
11272 md5_finish_ctx (&ctx, checksum_before);
11273 htab_empty (ht);
11275 ret = fold_1 (expr);
11277 md5_init_ctx (&ctx);
11278 fold_checksum_tree (expr, &ctx, ht);
11279 md5_finish_ctx (&ctx, checksum_after);
11280 htab_delete (ht);
11282 if (memcmp (checksum_before, checksum_after, 16))
11283 fold_check_failed (expr, ret);
11285 return ret;
11288 void
11289 print_fold_checksum (tree expr)
11291 struct md5_ctx ctx;
11292 unsigned char checksum[16], cnt;
11293 htab_t ht;
11295 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11296 md5_init_ctx (&ctx);
11297 fold_checksum_tree (expr, &ctx, ht);
11298 md5_finish_ctx (&ctx, checksum);
11299 htab_delete (ht);
11300 for (cnt = 0; cnt < 16; ++cnt)
11301 fprintf (stderr, "%02x", checksum[cnt]);
11302 putc ('\n', stderr);
11305 static void
11306 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11308 internal_error ("fold check: original tree changed by fold");
11311 static void
11312 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11314 void **slot;
11315 enum tree_code code;
11316 struct tree_function_decl buf;
11317 int i, len;
11319 recursive_label:
11321 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11322 <= sizeof (struct tree_function_decl))
11323 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11324 if (expr == NULL)
11325 return;
11326 slot = htab_find_slot (ht, expr, INSERT);
11327 if (*slot != NULL)
11328 return;
11329 *slot = expr;
11330 code = TREE_CODE (expr);
11331 if (TREE_CODE_CLASS (code) == tcc_declaration
11332 && DECL_ASSEMBLER_NAME_SET_P (expr))
11334 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11335 memcpy ((char *) &buf, expr, tree_size (expr));
11336 expr = (tree) &buf;
11337 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11339 else if (TREE_CODE_CLASS (code) == tcc_type
11340 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11341 || TYPE_CACHED_VALUES_P (expr)
11342 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11344 /* Allow these fields to be modified. */
11345 memcpy ((char *) &buf, expr, tree_size (expr));
11346 expr = (tree) &buf;
11347 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11348 TYPE_POINTER_TO (expr) = NULL;
11349 TYPE_REFERENCE_TO (expr) = NULL;
11350 if (TYPE_CACHED_VALUES_P (expr))
11352 TYPE_CACHED_VALUES_P (expr) = 0;
11353 TYPE_CACHED_VALUES (expr) = NULL;
11356 md5_process_bytes (expr, tree_size (expr), ctx);
11357 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11358 if (TREE_CODE_CLASS (code) != tcc_type
11359 && TREE_CODE_CLASS (code) != tcc_declaration
11360 && code != TREE_LIST)
11361 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11362 switch (TREE_CODE_CLASS (code))
11364 case tcc_constant:
11365 switch (code)
11367 case STRING_CST:
11368 md5_process_bytes (TREE_STRING_POINTER (expr),
11369 TREE_STRING_LENGTH (expr), ctx);
11370 break;
11371 case COMPLEX_CST:
11372 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11373 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11374 break;
11375 case VECTOR_CST:
11376 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11377 break;
11378 default:
11379 break;
11381 break;
11382 case tcc_exceptional:
11383 switch (code)
11385 case TREE_LIST:
11386 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11387 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11388 expr = TREE_CHAIN (expr);
11389 goto recursive_label;
11390 break;
11391 case TREE_VEC:
11392 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11393 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11394 break;
11395 default:
11396 break;
11398 break;
11399 case tcc_expression:
11400 case tcc_reference:
11401 case tcc_comparison:
11402 case tcc_unary:
11403 case tcc_binary:
11404 case tcc_statement:
11405 len = TREE_CODE_LENGTH (code);
11406 for (i = 0; i < len; ++i)
11407 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11408 break;
11409 case tcc_declaration:
11410 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11411 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11412 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11414 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11415 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11416 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11417 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11418 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11420 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11421 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11423 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11425 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11426 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11427 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11429 break;
11430 case tcc_type:
11431 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11432 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11433 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11434 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11435 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11436 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11437 if (INTEGRAL_TYPE_P (expr)
11438 || SCALAR_FLOAT_TYPE_P (expr))
11440 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
11441 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
11443 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
11444 if (TREE_CODE (expr) == RECORD_TYPE
11445 || TREE_CODE (expr) == UNION_TYPE
11446 || TREE_CODE (expr) == QUAL_UNION_TYPE)
11447 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
11448 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
11449 break;
11450 default:
11451 break;
11455 #endif
11457 /* Fold a unary tree expression with code CODE of type TYPE with an
11458 operand OP0. Return a folded expression if successful. Otherwise,
11459 return a tree expression with code CODE of type TYPE with an
11460 operand OP0. */
11462 tree
11463 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
11465 tree tem;
11466 #ifdef ENABLE_FOLD_CHECKING
11467 unsigned char checksum_before[16], checksum_after[16];
11468 struct md5_ctx ctx;
11469 htab_t ht;
11471 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11472 md5_init_ctx (&ctx);
11473 fold_checksum_tree (op0, &ctx, ht);
11474 md5_finish_ctx (&ctx, checksum_before);
11475 htab_empty (ht);
11476 #endif
11478 tem = fold_unary (code, type, op0);
11479 if (!tem)
11480 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
11482 #ifdef ENABLE_FOLD_CHECKING
11483 md5_init_ctx (&ctx);
11484 fold_checksum_tree (op0, &ctx, ht);
11485 md5_finish_ctx (&ctx, checksum_after);
11486 htab_delete (ht);
11488 if (memcmp (checksum_before, checksum_after, 16))
11489 fold_check_failed (op0, tem);
11490 #endif
11491 return tem;
11494 /* Fold a binary tree expression with code CODE of type TYPE with
11495 operands OP0 and OP1. Return a folded expression if successful.
11496 Otherwise, return a tree expression with code CODE of type TYPE
11497 with operands OP0 and OP1. */
11499 tree
11500 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
11501 MEM_STAT_DECL)
11503 tree tem;
11504 #ifdef ENABLE_FOLD_CHECKING
11505 unsigned char checksum_before_op0[16],
11506 checksum_before_op1[16],
11507 checksum_after_op0[16],
11508 checksum_after_op1[16];
11509 struct md5_ctx ctx;
11510 htab_t ht;
11512 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11513 md5_init_ctx (&ctx);
11514 fold_checksum_tree (op0, &ctx, ht);
11515 md5_finish_ctx (&ctx, checksum_before_op0);
11516 htab_empty (ht);
11518 md5_init_ctx (&ctx);
11519 fold_checksum_tree (op1, &ctx, ht);
11520 md5_finish_ctx (&ctx, checksum_before_op1);
11521 htab_empty (ht);
11522 #endif
11524 tem = fold_binary (code, type, op0, op1);
11525 if (!tem)
11526 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
11528 #ifdef ENABLE_FOLD_CHECKING
11529 md5_init_ctx (&ctx);
11530 fold_checksum_tree (op0, &ctx, ht);
11531 md5_finish_ctx (&ctx, checksum_after_op0);
11532 htab_empty (ht);
11534 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11535 fold_check_failed (op0, tem);
11537 md5_init_ctx (&ctx);
11538 fold_checksum_tree (op1, &ctx, ht);
11539 md5_finish_ctx (&ctx, checksum_after_op1);
11540 htab_delete (ht);
11542 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11543 fold_check_failed (op1, tem);
11544 #endif
11545 return tem;
11548 /* Fold a ternary tree expression with code CODE of type TYPE with
11549 operands OP0, OP1, and OP2. Return a folded expression if
11550 successful. Otherwise, return a tree expression with code CODE of
11551 type TYPE with operands OP0, OP1, and OP2. */
11553 tree
11554 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
11555 MEM_STAT_DECL)
11557 tree tem;
11558 #ifdef ENABLE_FOLD_CHECKING
11559 unsigned char checksum_before_op0[16],
11560 checksum_before_op1[16],
11561 checksum_before_op2[16],
11562 checksum_after_op0[16],
11563 checksum_after_op1[16],
11564 checksum_after_op2[16];
11565 struct md5_ctx ctx;
11566 htab_t ht;
11568 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11569 md5_init_ctx (&ctx);
11570 fold_checksum_tree (op0, &ctx, ht);
11571 md5_finish_ctx (&ctx, checksum_before_op0);
11572 htab_empty (ht);
11574 md5_init_ctx (&ctx);
11575 fold_checksum_tree (op1, &ctx, ht);
11576 md5_finish_ctx (&ctx, checksum_before_op1);
11577 htab_empty (ht);
11579 md5_init_ctx (&ctx);
11580 fold_checksum_tree (op2, &ctx, ht);
11581 md5_finish_ctx (&ctx, checksum_before_op2);
11582 htab_empty (ht);
11583 #endif
11585 tem = fold_ternary (code, type, op0, op1, op2);
11586 if (!tem)
11587 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
11589 #ifdef ENABLE_FOLD_CHECKING
11590 md5_init_ctx (&ctx);
11591 fold_checksum_tree (op0, &ctx, ht);
11592 md5_finish_ctx (&ctx, checksum_after_op0);
11593 htab_empty (ht);
11595 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11596 fold_check_failed (op0, tem);
11598 md5_init_ctx (&ctx);
11599 fold_checksum_tree (op1, &ctx, ht);
11600 md5_finish_ctx (&ctx, checksum_after_op1);
11601 htab_empty (ht);
11603 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11604 fold_check_failed (op1, tem);
11606 md5_init_ctx (&ctx);
11607 fold_checksum_tree (op2, &ctx, ht);
11608 md5_finish_ctx (&ctx, checksum_after_op2);
11609 htab_delete (ht);
11611 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
11612 fold_check_failed (op2, tem);
11613 #endif
11614 return tem;
11617 /* Perform constant folding and related simplification of initializer
11618 expression EXPR. These behave identically to "fold_buildN" but ignore
11619 potential run-time traps and exceptions that fold must preserve. */
11621 #define START_FOLD_INIT \
11622 int saved_signaling_nans = flag_signaling_nans;\
11623 int saved_trapping_math = flag_trapping_math;\
11624 int saved_rounding_math = flag_rounding_math;\
11625 int saved_trapv = flag_trapv;\
11626 flag_signaling_nans = 0;\
11627 flag_trapping_math = 0;\
11628 flag_rounding_math = 0;\
11629 flag_trapv = 0
11631 #define END_FOLD_INIT \
11632 flag_signaling_nans = saved_signaling_nans;\
11633 flag_trapping_math = saved_trapping_math;\
11634 flag_rounding_math = saved_rounding_math;\
11635 flag_trapv = saved_trapv
11637 tree
11638 fold_build1_initializer (enum tree_code code, tree type, tree op)
11640 tree result;
11641 START_FOLD_INIT;
11643 result = fold_build1 (code, type, op);
11645 END_FOLD_INIT;
11646 return result;
11649 tree
11650 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
11652 tree result;
11653 START_FOLD_INIT;
11655 result = fold_build2 (code, type, op0, op1);
11657 END_FOLD_INIT;
11658 return result;
11661 tree
11662 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
11663 tree op2)
11665 tree result;
11666 START_FOLD_INIT;
11668 result = fold_build3 (code, type, op0, op1, op2);
11670 END_FOLD_INIT;
11671 return result;
11674 #undef START_FOLD_INIT
11675 #undef END_FOLD_INIT
11677 /* Determine if first argument is a multiple of second argument. Return 0 if
11678 it is not, or we cannot easily determined it to be.
11680 An example of the sort of thing we care about (at this point; this routine
11681 could surely be made more general, and expanded to do what the *_DIV_EXPR's
11682 fold cases do now) is discovering that
11684 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11686 is a multiple of
11688 SAVE_EXPR (J * 8)
11690 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
11692 This code also handles discovering that
11694 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11696 is a multiple of 8 so we don't have to worry about dealing with a
11697 possible remainder.
11699 Note that we *look* inside a SAVE_EXPR only to determine how it was
11700 calculated; it is not safe for fold to do much of anything else with the
11701 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
11702 at run time. For example, the latter example above *cannot* be implemented
11703 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
11704 evaluation time of the original SAVE_EXPR is not necessarily the same at
11705 the time the new expression is evaluated. The only optimization of this
11706 sort that would be valid is changing
11708 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
11710 divided by 8 to
11712 SAVE_EXPR (I) * SAVE_EXPR (J)
11714 (where the same SAVE_EXPR (J) is used in the original and the
11715 transformed version). */
11717 static int
11718 multiple_of_p (tree type, tree top, tree bottom)
11720 if (operand_equal_p (top, bottom, 0))
11721 return 1;
11723 if (TREE_CODE (type) != INTEGER_TYPE)
11724 return 0;
11726 switch (TREE_CODE (top))
11728 case BIT_AND_EXPR:
11729 /* Bitwise and provides a power of two multiple. If the mask is
11730 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
11731 if (!integer_pow2p (bottom))
11732 return 0;
11733 /* FALLTHRU */
11735 case MULT_EXPR:
11736 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11737 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11739 case PLUS_EXPR:
11740 case MINUS_EXPR:
11741 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11742 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11744 case LSHIFT_EXPR:
11745 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
11747 tree op1, t1;
11749 op1 = TREE_OPERAND (top, 1);
11750 /* const_binop may not detect overflow correctly,
11751 so check for it explicitly here. */
11752 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
11753 > TREE_INT_CST_LOW (op1)
11754 && TREE_INT_CST_HIGH (op1) == 0
11755 && 0 != (t1 = fold_convert (type,
11756 const_binop (LSHIFT_EXPR,
11757 size_one_node,
11758 op1, 0)))
11759 && ! TREE_OVERFLOW (t1))
11760 return multiple_of_p (type, t1, bottom);
11762 return 0;
11764 case NOP_EXPR:
11765 /* Can't handle conversions from non-integral or wider integral type. */
11766 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
11767 || (TYPE_PRECISION (type)
11768 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
11769 return 0;
11771 /* .. fall through ... */
11773 case SAVE_EXPR:
11774 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
11776 case INTEGER_CST:
11777 if (TREE_CODE (bottom) != INTEGER_CST
11778 || (TYPE_UNSIGNED (type)
11779 && (tree_int_cst_sgn (top) < 0
11780 || tree_int_cst_sgn (bottom) < 0)))
11781 return 0;
11782 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
11783 top, bottom, 0));
11785 default:
11786 return 0;
11790 /* Return true if `t' is known to be non-negative. */
11793 tree_expr_nonnegative_p (tree t)
11795 if (t == error_mark_node)
11796 return 0;
11798 if (TYPE_UNSIGNED (TREE_TYPE (t)))
11799 return 1;
11801 switch (TREE_CODE (t))
11803 case SSA_NAME:
11804 /* Query VRP to see if it has recorded any information about
11805 the range of this object. */
11806 return ssa_name_nonnegative_p (t);
11808 case ABS_EXPR:
11809 /* We can't return 1 if flag_wrapv is set because
11810 ABS_EXPR<INT_MIN> = INT_MIN. */
11811 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
11812 return 1;
11813 break;
11815 case INTEGER_CST:
11816 return tree_int_cst_sgn (t) >= 0;
11818 case REAL_CST:
11819 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
11821 case PLUS_EXPR:
11822 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11823 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11824 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11826 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
11827 both unsigned and at least 2 bits shorter than the result. */
11828 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11829 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11830 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11832 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11833 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11834 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11835 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11837 unsigned int prec = MAX (TYPE_PRECISION (inner1),
11838 TYPE_PRECISION (inner2)) + 1;
11839 return prec < TYPE_PRECISION (TREE_TYPE (t));
11842 break;
11844 case MULT_EXPR:
11845 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11847 /* x * x for floating point x is always non-negative. */
11848 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
11849 return 1;
11850 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11851 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11854 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
11855 both unsigned and their total bits is shorter than the result. */
11856 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11857 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11858 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11860 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11861 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11862 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11863 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11864 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
11865 < TYPE_PRECISION (TREE_TYPE (t));
11867 return 0;
11869 case BIT_AND_EXPR:
11870 case MAX_EXPR:
11871 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11872 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11874 case BIT_IOR_EXPR:
11875 case BIT_XOR_EXPR:
11876 case MIN_EXPR:
11877 case RDIV_EXPR:
11878 case TRUNC_DIV_EXPR:
11879 case CEIL_DIV_EXPR:
11880 case FLOOR_DIV_EXPR:
11881 case ROUND_DIV_EXPR:
11882 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11883 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11885 case TRUNC_MOD_EXPR:
11886 case CEIL_MOD_EXPR:
11887 case FLOOR_MOD_EXPR:
11888 case ROUND_MOD_EXPR:
11889 case SAVE_EXPR:
11890 case NON_LVALUE_EXPR:
11891 case FLOAT_EXPR:
11892 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11894 case COMPOUND_EXPR:
11895 case MODIFY_EXPR:
11896 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11898 case BIND_EXPR:
11899 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
11901 case COND_EXPR:
11902 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
11903 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
11905 case NOP_EXPR:
11907 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11908 tree outer_type = TREE_TYPE (t);
11910 if (TREE_CODE (outer_type) == REAL_TYPE)
11912 if (TREE_CODE (inner_type) == REAL_TYPE)
11913 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11914 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11916 if (TYPE_UNSIGNED (inner_type))
11917 return 1;
11918 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11921 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
11923 if (TREE_CODE (inner_type) == REAL_TYPE)
11924 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
11925 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11926 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
11927 && TYPE_UNSIGNED (inner_type);
11930 break;
11932 case TARGET_EXPR:
11934 tree temp = TARGET_EXPR_SLOT (t);
11935 t = TARGET_EXPR_INITIAL (t);
11937 /* If the initializer is non-void, then it's a normal expression
11938 that will be assigned to the slot. */
11939 if (!VOID_TYPE_P (t))
11940 return tree_expr_nonnegative_p (t);
11942 /* Otherwise, the initializer sets the slot in some way. One common
11943 way is an assignment statement at the end of the initializer. */
11944 while (1)
11946 if (TREE_CODE (t) == BIND_EXPR)
11947 t = expr_last (BIND_EXPR_BODY (t));
11948 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
11949 || TREE_CODE (t) == TRY_CATCH_EXPR)
11950 t = expr_last (TREE_OPERAND (t, 0));
11951 else if (TREE_CODE (t) == STATEMENT_LIST)
11952 t = expr_last (t);
11953 else
11954 break;
11956 if (TREE_CODE (t) == MODIFY_EXPR
11957 && TREE_OPERAND (t, 0) == temp)
11958 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11960 return 0;
11963 case CALL_EXPR:
11965 tree fndecl = get_callee_fndecl (t);
11966 tree arglist = TREE_OPERAND (t, 1);
11967 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
11968 switch (DECL_FUNCTION_CODE (fndecl))
11970 CASE_FLT_FN (BUILT_IN_ACOS):
11971 CASE_FLT_FN (BUILT_IN_ACOSH):
11972 CASE_FLT_FN (BUILT_IN_CABS):
11973 CASE_FLT_FN (BUILT_IN_COSH):
11974 CASE_FLT_FN (BUILT_IN_ERFC):
11975 CASE_FLT_FN (BUILT_IN_EXP):
11976 CASE_FLT_FN (BUILT_IN_EXP10):
11977 CASE_FLT_FN (BUILT_IN_EXP2):
11978 CASE_FLT_FN (BUILT_IN_FABS):
11979 CASE_FLT_FN (BUILT_IN_FDIM):
11980 CASE_FLT_FN (BUILT_IN_HYPOT):
11981 CASE_FLT_FN (BUILT_IN_POW10):
11982 CASE_INT_FN (BUILT_IN_FFS):
11983 CASE_INT_FN (BUILT_IN_PARITY):
11984 CASE_INT_FN (BUILT_IN_POPCOUNT):
11985 /* Always true. */
11986 return 1;
11988 CASE_FLT_FN (BUILT_IN_SQRT):
11989 /* sqrt(-0.0) is -0.0. */
11990 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
11991 return 1;
11992 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11994 CASE_FLT_FN (BUILT_IN_ASINH):
11995 CASE_FLT_FN (BUILT_IN_ATAN):
11996 CASE_FLT_FN (BUILT_IN_ATANH):
11997 CASE_FLT_FN (BUILT_IN_CBRT):
11998 CASE_FLT_FN (BUILT_IN_CEIL):
11999 CASE_FLT_FN (BUILT_IN_ERF):
12000 CASE_FLT_FN (BUILT_IN_EXPM1):
12001 CASE_FLT_FN (BUILT_IN_FLOOR):
12002 CASE_FLT_FN (BUILT_IN_FMOD):
12003 CASE_FLT_FN (BUILT_IN_FREXP):
12004 CASE_FLT_FN (BUILT_IN_LCEIL):
12005 CASE_FLT_FN (BUILT_IN_LDEXP):
12006 CASE_FLT_FN (BUILT_IN_LFLOOR):
12007 CASE_FLT_FN (BUILT_IN_LLCEIL):
12008 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12009 CASE_FLT_FN (BUILT_IN_LLRINT):
12010 CASE_FLT_FN (BUILT_IN_LLROUND):
12011 CASE_FLT_FN (BUILT_IN_LRINT):
12012 CASE_FLT_FN (BUILT_IN_LROUND):
12013 CASE_FLT_FN (BUILT_IN_MODF):
12014 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12015 CASE_FLT_FN (BUILT_IN_POW):
12016 CASE_FLT_FN (BUILT_IN_RINT):
12017 CASE_FLT_FN (BUILT_IN_ROUND):
12018 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12019 CASE_FLT_FN (BUILT_IN_SINH):
12020 CASE_FLT_FN (BUILT_IN_TANH):
12021 CASE_FLT_FN (BUILT_IN_TRUNC):
12022 /* True if the 1st argument is nonnegative. */
12023 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12025 CASE_FLT_FN (BUILT_IN_FMAX):
12026 /* True if the 1st OR 2nd arguments are nonnegative. */
12027 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12028 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12030 CASE_FLT_FN (BUILT_IN_FMIN):
12031 /* True if the 1st AND 2nd arguments are nonnegative. */
12032 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12033 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12035 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12036 /* True if the 2nd argument is nonnegative. */
12037 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12039 default:
12040 break;
12044 /* ... fall through ... */
12046 default:
12047 if (truth_value_p (TREE_CODE (t)))
12048 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12049 return 1;
12052 /* We don't know sign of `t', so be conservative and return false. */
12053 return 0;
12056 /* Return true when T is an address and is known to be nonzero.
12057 For floating point we further ensure that T is not denormal.
12058 Similar logic is present in nonzero_address in rtlanal.h. */
12060 bool
12061 tree_expr_nonzero_p (tree t)
12063 tree type = TREE_TYPE (t);
12065 /* Doing something useful for floating point would need more work. */
12066 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12067 return false;
12069 switch (TREE_CODE (t))
12071 case SSA_NAME:
12072 /* Query VRP to see if it has recorded any information about
12073 the range of this object. */
12074 return ssa_name_nonzero_p (t);
12076 case ABS_EXPR:
12077 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12079 case INTEGER_CST:
12080 /* We used to test for !integer_zerop here. This does not work correctly
12081 if TREE_CONSTANT_OVERFLOW (t). */
12082 return (TREE_INT_CST_LOW (t) != 0
12083 || TREE_INT_CST_HIGH (t) != 0);
12085 case PLUS_EXPR:
12086 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12088 /* With the presence of negative values it is hard
12089 to say something. */
12090 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12091 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12092 return false;
12093 /* One of operands must be positive and the other non-negative. */
12094 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12095 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12097 break;
12099 case MULT_EXPR:
12100 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12102 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12103 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12105 break;
12107 case NOP_EXPR:
12109 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12110 tree outer_type = TREE_TYPE (t);
12112 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12113 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12115 break;
12117 case ADDR_EXPR:
12119 tree base = get_base_address (TREE_OPERAND (t, 0));
12121 if (!base)
12122 return false;
12124 /* Weak declarations may link to NULL. */
12125 if (VAR_OR_FUNCTION_DECL_P (base))
12126 return !DECL_WEAK (base);
12128 /* Constants are never weak. */
12129 if (CONSTANT_CLASS_P (base))
12130 return true;
12132 return false;
12135 case COND_EXPR:
12136 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12137 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12139 case MIN_EXPR:
12140 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12141 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12143 case MAX_EXPR:
12144 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12146 /* When both operands are nonzero, then MAX must be too. */
12147 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12148 return true;
12150 /* MAX where operand 0 is positive is positive. */
12151 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12153 /* MAX where operand 1 is positive is positive. */
12154 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12155 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12156 return true;
12157 break;
12159 case COMPOUND_EXPR:
12160 case MODIFY_EXPR:
12161 case BIND_EXPR:
12162 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
12164 case SAVE_EXPR:
12165 case NON_LVALUE_EXPR:
12166 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12168 case BIT_IOR_EXPR:
12169 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12170 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12172 case CALL_EXPR:
12173 return alloca_call_p (t);
12175 default:
12176 break;
12178 return false;
12181 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12182 attempt to fold the expression to a constant without modifying TYPE,
12183 OP0 or OP1.
12185 If the expression could be simplified to a constant, then return
12186 the constant. If the expression would not be simplified to a
12187 constant, then return NULL_TREE. */
12189 tree
12190 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12192 tree tem = fold_binary (code, type, op0, op1);
12193 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12196 /* Given the components of a unary expression CODE, TYPE and OP0,
12197 attempt to fold the expression to a constant without modifying
12198 TYPE or OP0.
12200 If the expression could be simplified to a constant, then return
12201 the constant. If the expression would not be simplified to a
12202 constant, then return NULL_TREE. */
12204 tree
12205 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12207 tree tem = fold_unary (code, type, op0);
12208 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12211 /* If EXP represents referencing an element in a constant string
12212 (either via pointer arithmetic or array indexing), return the
12213 tree representing the value accessed, otherwise return NULL. */
12215 tree
12216 fold_read_from_constant_string (tree exp)
12218 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
12220 tree exp1 = TREE_OPERAND (exp, 0);
12221 tree index;
12222 tree string;
12224 if (TREE_CODE (exp) == INDIRECT_REF)
12225 string = string_constant (exp1, &index);
12226 else
12228 tree low_bound = array_ref_low_bound (exp);
12229 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12231 /* Optimize the special-case of a zero lower bound.
12233 We convert the low_bound to sizetype to avoid some problems
12234 with constant folding. (E.g. suppose the lower bound is 1,
12235 and its mode is QI. Without the conversion,l (ARRAY
12236 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12237 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12238 if (! integer_zerop (low_bound))
12239 index = size_diffop (index, fold_convert (sizetype, low_bound));
12241 string = exp1;
12244 if (string
12245 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12246 && TREE_CODE (string) == STRING_CST
12247 && TREE_CODE (index) == INTEGER_CST
12248 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12249 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12250 == MODE_INT)
12251 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12252 return fold_convert (TREE_TYPE (exp),
12253 build_int_cst (NULL_TREE,
12254 (TREE_STRING_POINTER (string)
12255 [TREE_INT_CST_LOW (index)])));
12257 return NULL;
12260 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12261 an integer constant or real constant.
12263 TYPE is the type of the result. */
12265 static tree
12266 fold_negate_const (tree arg0, tree type)
12268 tree t = NULL_TREE;
12270 switch (TREE_CODE (arg0))
12272 case INTEGER_CST:
12274 unsigned HOST_WIDE_INT low;
12275 HOST_WIDE_INT high;
12276 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12277 TREE_INT_CST_HIGH (arg0),
12278 &low, &high);
12279 t = build_int_cst_wide (type, low, high);
12280 t = force_fit_type (t, 1,
12281 (overflow | TREE_OVERFLOW (arg0))
12282 && !TYPE_UNSIGNED (type),
12283 TREE_CONSTANT_OVERFLOW (arg0));
12284 break;
12287 case REAL_CST:
12288 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12289 break;
12291 default:
12292 gcc_unreachable ();
12295 return t;
12298 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12299 an integer constant or real constant.
12301 TYPE is the type of the result. */
12303 tree
12304 fold_abs_const (tree arg0, tree type)
12306 tree t = NULL_TREE;
12308 switch (TREE_CODE (arg0))
12310 case INTEGER_CST:
12311 /* If the value is unsigned, then the absolute value is
12312 the same as the ordinary value. */
12313 if (TYPE_UNSIGNED (type))
12314 t = arg0;
12315 /* Similarly, if the value is non-negative. */
12316 else if (INT_CST_LT (integer_minus_one_node, arg0))
12317 t = arg0;
12318 /* If the value is negative, then the absolute value is
12319 its negation. */
12320 else
12322 unsigned HOST_WIDE_INT low;
12323 HOST_WIDE_INT high;
12324 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12325 TREE_INT_CST_HIGH (arg0),
12326 &low, &high);
12327 t = build_int_cst_wide (type, low, high);
12328 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
12329 TREE_CONSTANT_OVERFLOW (arg0));
12331 break;
12333 case REAL_CST:
12334 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
12335 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12336 else
12337 t = arg0;
12338 break;
12340 default:
12341 gcc_unreachable ();
12344 return t;
12347 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
12348 constant. TYPE is the type of the result. */
12350 static tree
12351 fold_not_const (tree arg0, tree type)
12353 tree t = NULL_TREE;
12355 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
12357 t = build_int_cst_wide (type,
12358 ~ TREE_INT_CST_LOW (arg0),
12359 ~ TREE_INT_CST_HIGH (arg0));
12360 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
12361 TREE_CONSTANT_OVERFLOW (arg0));
12363 return t;
12366 /* Given CODE, a relational operator, the target type, TYPE and two
12367 constant operands OP0 and OP1, return the result of the
12368 relational operation. If the result is not a compile time
12369 constant, then return NULL_TREE. */
12371 static tree
12372 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
12374 int result, invert;
12376 /* From here on, the only cases we handle are when the result is
12377 known to be a constant. */
12379 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
12381 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
12382 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
12384 /* Handle the cases where either operand is a NaN. */
12385 if (real_isnan (c0) || real_isnan (c1))
12387 switch (code)
12389 case EQ_EXPR:
12390 case ORDERED_EXPR:
12391 result = 0;
12392 break;
12394 case NE_EXPR:
12395 case UNORDERED_EXPR:
12396 case UNLT_EXPR:
12397 case UNLE_EXPR:
12398 case UNGT_EXPR:
12399 case UNGE_EXPR:
12400 case UNEQ_EXPR:
12401 result = 1;
12402 break;
12404 case LT_EXPR:
12405 case LE_EXPR:
12406 case GT_EXPR:
12407 case GE_EXPR:
12408 case LTGT_EXPR:
12409 if (flag_trapping_math)
12410 return NULL_TREE;
12411 result = 0;
12412 break;
12414 default:
12415 gcc_unreachable ();
12418 return constant_boolean_node (result, type);
12421 return constant_boolean_node (real_compare (code, c0, c1), type);
12424 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
12426 To compute GT, swap the arguments and do LT.
12427 To compute GE, do LT and invert the result.
12428 To compute LE, swap the arguments, do LT and invert the result.
12429 To compute NE, do EQ and invert the result.
12431 Therefore, the code below must handle only EQ and LT. */
12433 if (code == LE_EXPR || code == GT_EXPR)
12435 tree tem = op0;
12436 op0 = op1;
12437 op1 = tem;
12438 code = swap_tree_comparison (code);
12441 /* Note that it is safe to invert for real values here because we
12442 have already handled the one case that it matters. */
12444 invert = 0;
12445 if (code == NE_EXPR || code == GE_EXPR)
12447 invert = 1;
12448 code = invert_tree_comparison (code, false);
12451 /* Compute a result for LT or EQ if args permit;
12452 Otherwise return T. */
12453 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
12455 if (code == EQ_EXPR)
12456 result = tree_int_cst_equal (op0, op1);
12457 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
12458 result = INT_CST_LT_UNSIGNED (op0, op1);
12459 else
12460 result = INT_CST_LT (op0, op1);
12462 else
12463 return NULL_TREE;
12465 if (invert)
12466 result ^= 1;
12467 return constant_boolean_node (result, type);
12470 /* Build an expression for the a clean point containing EXPR with type TYPE.
12471 Don't build a cleanup point expression for EXPR which don't have side
12472 effects. */
12474 tree
12475 fold_build_cleanup_point_expr (tree type, tree expr)
12477 /* If the expression does not have side effects then we don't have to wrap
12478 it with a cleanup point expression. */
12479 if (!TREE_SIDE_EFFECTS (expr))
12480 return expr;
12482 /* If the expression is a return, check to see if the expression inside the
12483 return has no side effects or the right hand side of the modify expression
12484 inside the return. If either don't have side effects set we don't need to
12485 wrap the expression in a cleanup point expression. Note we don't check the
12486 left hand side of the modify because it should always be a return decl. */
12487 if (TREE_CODE (expr) == RETURN_EXPR)
12489 tree op = TREE_OPERAND (expr, 0);
12490 if (!op || !TREE_SIDE_EFFECTS (op))
12491 return expr;
12492 op = TREE_OPERAND (op, 1);
12493 if (!TREE_SIDE_EFFECTS (op))
12494 return expr;
12497 return build1 (CLEANUP_POINT_EXPR, type, expr);
12500 /* Build an expression for the address of T. Folds away INDIRECT_REF to
12501 avoid confusing the gimplify process. */
12503 tree
12504 build_fold_addr_expr_with_type (tree t, tree ptrtype)
12506 /* The size of the object is not relevant when talking about its address. */
12507 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12508 t = TREE_OPERAND (t, 0);
12510 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
12511 if (TREE_CODE (t) == INDIRECT_REF
12512 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
12514 t = TREE_OPERAND (t, 0);
12515 if (TREE_TYPE (t) != ptrtype)
12516 t = build1 (NOP_EXPR, ptrtype, t);
12518 else
12520 tree base = t;
12522 while (handled_component_p (base))
12523 base = TREE_OPERAND (base, 0);
12524 if (DECL_P (base))
12525 TREE_ADDRESSABLE (base) = 1;
12527 t = build1 (ADDR_EXPR, ptrtype, t);
12530 return t;
12533 tree
12534 build_fold_addr_expr (tree t)
12536 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
12539 /* Given a pointer value OP0 and a type TYPE, return a simplified version
12540 of an indirection through OP0, or NULL_TREE if no simplification is
12541 possible. */
12543 tree
12544 fold_indirect_ref_1 (tree type, tree op0)
12546 tree sub = op0;
12547 tree subtype;
12549 STRIP_NOPS (sub);
12550 subtype = TREE_TYPE (sub);
12551 if (!POINTER_TYPE_P (subtype))
12552 return NULL_TREE;
12554 if (TREE_CODE (sub) == ADDR_EXPR)
12556 tree op = TREE_OPERAND (sub, 0);
12557 tree optype = TREE_TYPE (op);
12558 /* *&p => p; make sure to handle *&"str"[cst] here. */
12559 if (type == optype)
12561 tree fop = fold_read_from_constant_string (op);
12562 if (fop)
12563 return fop;
12564 else
12565 return op;
12567 /* *(foo *)&fooarray => fooarray[0] */
12568 else if (TREE_CODE (optype) == ARRAY_TYPE
12569 && type == TREE_TYPE (optype))
12571 tree type_domain = TYPE_DOMAIN (optype);
12572 tree min_val = size_zero_node;
12573 if (type_domain && TYPE_MIN_VALUE (type_domain))
12574 min_val = TYPE_MIN_VALUE (type_domain);
12575 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
12577 /* *(foo *)&complexfoo => __real__ complexfoo */
12578 else if (TREE_CODE (optype) == COMPLEX_TYPE
12579 && type == TREE_TYPE (optype))
12580 return fold_build1 (REALPART_EXPR, type, op);
12583 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
12584 if (TREE_CODE (sub) == PLUS_EXPR
12585 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
12587 tree op00 = TREE_OPERAND (sub, 0);
12588 tree op01 = TREE_OPERAND (sub, 1);
12589 tree op00type;
12591 STRIP_NOPS (op00);
12592 op00type = TREE_TYPE (op00);
12593 if (TREE_CODE (op00) == ADDR_EXPR
12594 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
12595 && type == TREE_TYPE (TREE_TYPE (op00type)))
12597 tree size = TYPE_SIZE_UNIT (type);
12598 if (tree_int_cst_equal (size, op01))
12599 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
12603 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
12604 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
12605 && type == TREE_TYPE (TREE_TYPE (subtype)))
12607 tree type_domain;
12608 tree min_val = size_zero_node;
12609 sub = build_fold_indirect_ref (sub);
12610 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
12611 if (type_domain && TYPE_MIN_VALUE (type_domain))
12612 min_val = TYPE_MIN_VALUE (type_domain);
12613 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
12616 return NULL_TREE;
12619 /* Builds an expression for an indirection through T, simplifying some
12620 cases. */
12622 tree
12623 build_fold_indirect_ref (tree t)
12625 tree type = TREE_TYPE (TREE_TYPE (t));
12626 tree sub = fold_indirect_ref_1 (type, t);
12628 if (sub)
12629 return sub;
12630 else
12631 return build1 (INDIRECT_REF, type, t);
12634 /* Given an INDIRECT_REF T, return either T or a simplified version. */
12636 tree
12637 fold_indirect_ref (tree t)
12639 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
12641 if (sub)
12642 return sub;
12643 else
12644 return t;
12647 /* Strip non-trapping, non-side-effecting tree nodes from an expression
12648 whose result is ignored. The type of the returned tree need not be
12649 the same as the original expression. */
12651 tree
12652 fold_ignored_result (tree t)
12654 if (!TREE_SIDE_EFFECTS (t))
12655 return integer_zero_node;
12657 for (;;)
12658 switch (TREE_CODE_CLASS (TREE_CODE (t)))
12660 case tcc_unary:
12661 t = TREE_OPERAND (t, 0);
12662 break;
12664 case tcc_binary:
12665 case tcc_comparison:
12666 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12667 t = TREE_OPERAND (t, 0);
12668 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
12669 t = TREE_OPERAND (t, 1);
12670 else
12671 return t;
12672 break;
12674 case tcc_expression:
12675 switch (TREE_CODE (t))
12677 case COMPOUND_EXPR:
12678 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12679 return t;
12680 t = TREE_OPERAND (t, 0);
12681 break;
12683 case COND_EXPR:
12684 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
12685 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
12686 return t;
12687 t = TREE_OPERAND (t, 0);
12688 break;
12690 default:
12691 return t;
12693 break;
12695 default:
12696 return t;
12700 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
12701 This can only be applied to objects of a sizetype. */
12703 tree
12704 round_up (tree value, int divisor)
12706 tree div = NULL_TREE;
12708 gcc_assert (divisor > 0);
12709 if (divisor == 1)
12710 return value;
12712 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12713 have to do anything. Only do this when we are not given a const,
12714 because in that case, this check is more expensive than just
12715 doing it. */
12716 if (TREE_CODE (value) != INTEGER_CST)
12718 div = build_int_cst (TREE_TYPE (value), divisor);
12720 if (multiple_of_p (TREE_TYPE (value), value, div))
12721 return value;
12724 /* If divisor is a power of two, simplify this to bit manipulation. */
12725 if (divisor == (divisor & -divisor))
12727 tree t;
12729 t = build_int_cst (TREE_TYPE (value), divisor - 1);
12730 value = size_binop (PLUS_EXPR, value, t);
12731 t = build_int_cst (TREE_TYPE (value), -divisor);
12732 value = size_binop (BIT_AND_EXPR, value, t);
12734 else
12736 if (!div)
12737 div = build_int_cst (TREE_TYPE (value), divisor);
12738 value = size_binop (CEIL_DIV_EXPR, value, div);
12739 value = size_binop (MULT_EXPR, value, div);
12742 return value;
12745 /* Likewise, but round down. */
12747 tree
12748 round_down (tree value, int divisor)
12750 tree div = NULL_TREE;
12752 gcc_assert (divisor > 0);
12753 if (divisor == 1)
12754 return value;
12756 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12757 have to do anything. Only do this when we are not given a const,
12758 because in that case, this check is more expensive than just
12759 doing it. */
12760 if (TREE_CODE (value) != INTEGER_CST)
12762 div = build_int_cst (TREE_TYPE (value), divisor);
12764 if (multiple_of_p (TREE_TYPE (value), value, div))
12765 return value;
12768 /* If divisor is a power of two, simplify this to bit manipulation. */
12769 if (divisor == (divisor & -divisor))
12771 tree t;
12773 t = build_int_cst (TREE_TYPE (value), -divisor);
12774 value = size_binop (BIT_AND_EXPR, value, t);
12776 else
12778 if (!div)
12779 div = build_int_cst (TREE_TYPE (value), divisor);
12780 value = size_binop (FLOOR_DIV_EXPR, value, div);
12781 value = size_binop (MULT_EXPR, value, div);
12784 return value;
12787 /* Returns the pointer to the base of the object addressed by EXP and
12788 extracts the information about the offset of the access, storing it
12789 to PBITPOS and POFFSET. */
12791 static tree
12792 split_address_to_core_and_offset (tree exp,
12793 HOST_WIDE_INT *pbitpos, tree *poffset)
12795 tree core;
12796 enum machine_mode mode;
12797 int unsignedp, volatilep;
12798 HOST_WIDE_INT bitsize;
12800 if (TREE_CODE (exp) == ADDR_EXPR)
12802 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
12803 poffset, &mode, &unsignedp, &volatilep,
12804 false);
12805 core = build_fold_addr_expr (core);
12807 else
12809 core = exp;
12810 *pbitpos = 0;
12811 *poffset = NULL_TREE;
12814 return core;
12817 /* Returns true if addresses of E1 and E2 differ by a constant, false
12818 otherwise. If they do, E1 - E2 is stored in *DIFF. */
12820 bool
12821 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
12823 tree core1, core2;
12824 HOST_WIDE_INT bitpos1, bitpos2;
12825 tree toffset1, toffset2, tdiff, type;
12827 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
12828 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
12830 if (bitpos1 % BITS_PER_UNIT != 0
12831 || bitpos2 % BITS_PER_UNIT != 0
12832 || !operand_equal_p (core1, core2, 0))
12833 return false;
12835 if (toffset1 && toffset2)
12837 type = TREE_TYPE (toffset1);
12838 if (type != TREE_TYPE (toffset2))
12839 toffset2 = fold_convert (type, toffset2);
12841 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
12842 if (!cst_and_fits_in_hwi (tdiff))
12843 return false;
12845 *diff = int_cst_value (tdiff);
12847 else if (toffset1 || toffset2)
12849 /* If only one of the offsets is non-constant, the difference cannot
12850 be a constant. */
12851 return false;
12853 else
12854 *diff = 0;
12856 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
12857 return true;
12860 /* Simplify the floating point expression EXP when the sign of the
12861 result is not significant. Return NULL_TREE if no simplification
12862 is possible. */
12864 tree
12865 fold_strip_sign_ops (tree exp)
12867 tree arg0, arg1;
12869 switch (TREE_CODE (exp))
12871 case ABS_EXPR:
12872 case NEGATE_EXPR:
12873 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12874 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
12876 case MULT_EXPR:
12877 case RDIV_EXPR:
12878 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
12879 return NULL_TREE;
12880 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12881 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
12882 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
12883 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
12884 arg0 ? arg0 : TREE_OPERAND (exp, 0),
12885 arg1 ? arg1 : TREE_OPERAND (exp, 1));
12886 break;
12888 default:
12889 break;
12891 return NULL_TREE;